def _find_cache_data(self, shots, steps=None, tasks=None, assets=None, hide_omitted=None, stale_only=None, progress=False, force=False, dialog=None, verbose=0): """Search cache data. Args: shots (TTShotRoot list): shots to check steps (str list): return only data with these steps tasks (str list): return only data with these tasks assets (TTAssetOutputName list): return only data with these asset names hide_omitted (bool): ignore omitted caches stale_only (bool): ignore caches that used the latest rig/asset progress (bool): show progress on read shots force (bool): force reread data from shotgun dialog (QDialog): parent dialog (for progress bars) verbose (int): print process data Returns: (dict list): filtered cache data """ assert not verbose _hide_omitted = (self.hide_omitted if hide_omitted is None else hide_omitted) _stale_only = (self.stale_only if stale_only is None else stale_only) if verbose: print 'READING CACHE DATA' print 'TASKS', tasks print 'ASSETS', assets print 'STALE ONLY', _stale_only print 'HIDE OMITTED', _hide_omitted _cache_data = [] _pos = dialog.ui.get_c() if dialog else None for _shot in qt.ProgressBar(shots, 'Reading {:d} shots', col='SeaGreen', show=progress, pos=_pos): if shots and _shot not in shots: continue for _data in _shot.read_cache_data(force=force): _cache = _data['cache'] _asset_ver = _data['asset_ver'] _asset = _asset_ver.get_name() if _hide_omitted and _data['sg_status_list'] == 'omt': lprint(' - OMITTED REJECT', _cache, verbose=verbose) continue if tasks is not None and _cache.task not in tasks: lprint(' - TASK REJECT', _cache, verbose=verbose) continue if steps is not None and _cache.step not in steps: lprint(' - STEP REJECT', _cache, verbose=verbose) continue if assets is not None and _asset not in assets: lprint(' - ASSET REJECT', _cache, verbose=verbose) continue if _stale_only and _asset_ver.is_latest(): lprint(' - NOT STALE REJECT', _cache, verbose=verbose) continue lprint(' - ACCEPTED', _cache, _data['origin_scene'], verbose=verbose) _cache_data.append(_data) return _cache_data
def _clean_leftover_modules(force=False, verbose=0): """Clean unused tk modules from sys.modules dict. Args: force (bool): remove leftover libs with no confirmation verbose (int): print process data """ _engine = tank.platform.current_engine() # Find leftover modules _to_delete = [] for _app_name in _engine.apps: _other_name = _get_app_other_name(_app_name) _app = _engine.apps[_app_name] _id = _app._TankBundle__module_uid if not _id: lprint('MISSING ID', _app_name, verbose=verbose > 1) continue lprint(_app_name, verbose=verbose) lprint(' -', _other_name, verbose=verbose > 1) lprint(' -', _id, verbose=verbose > 1) for _mod in refresh.find_mods(): if ( _app_name not in _mod.__file__ and _other_name not in _mod.__file__): continue if not _mod.__name__.startswith('tkimp'): continue if not _mod.__name__.startswith(_id): lprint(' - DELETE', _mod, verbose=verbose > 1) _to_delete.append(_mod.__name__) continue _name = '.'.join(_mod.__name__.split('.')[1:]) lprint( ' - {:90} {}'.format(_name, abs_path(_mod.__file__)), verbose=verbose) lprint(verbose=verbose) # Remove modules if _to_delete: if not force: qt.ok_cancel( 'Delete {:d} leftover modules?'.format(len(_to_delete))) for _mod_name in _to_delete: del sys.modules[_mod_name] else: print 'Nothing to clean'
def prepareForDraw(self, obj, cam, frame_context, data, verbose=0): """Retrieve data cache (create if does not exist). Args: obj (MDagPath): path to object being drawn cam (MDagPath): path to viewport camera frame_context (MFrameContext): frame context data (MeshXRayerData): previous data verbose (int): print process data Returns: (MeshXRayerData): node data """ lprint('PREPARE FOR DRAW', verbose=verbose) _data = data if not isinstance(_data, MeshXRayerData): _data = MeshXRayerData() lprint(' - USING EXISTING DATA', _data, verbose=verbose) lprint(' - DATA', _data, verbose=verbose) # Read in_mesh plug lprint(' - OBJ', obj, verbose=verbose) _node = obj.node() _in_mesh_plug = om.MPlug(_node, MeshXRayer.in_mesh) lprint(' - IN MESH PLUG', _in_mesh_plug, verbose=verbose) _data.mesh_tris.clear() if _in_mesh_plug.isNull: return None if _in_mesh_plug.asMDataHandle().type() != om.MFnData.kMesh: return None _in_mesh_handle = _in_mesh_plug.asMDataHandle().asMesh() _in_mesh = om.MFnMesh(_in_mesh_handle) # Read mesh triangles _mesh_pts = _in_mesh.getPoints() for _poly_id in range(_in_mesh.numPolygons): _vtx_ids = _in_mesh.getPolygonVertices(_poly_id) for _vtx_id in _vtx_ids[:3]: _data.mesh_tris.append(_mesh_pts[_vtx_id]) lprint(' - IN MESH', _in_mesh, len(_in_mesh.getPoints()), verbose=verbose) # Read col/hide_angle + draw toggles _col_plug = om.MPlug(_node, MeshXRayer.color) _data.color = om.MColor( [_col_plug.child(_idx).asFloat() for _idx in range(3)]) _data.hide_angle = om.MPlug(_node, MeshXRayer.hide_angle).asFloat() _data.draw_control = om.MPlug(_node, MeshXRayer.draw_control).asBool() _data.draw_mesh = om.MPlug(_node, MeshXRayer.draw_mesh).asBool() _obj_pos = hom.HMatrix(obj.inclusiveMatrix()).pos() _cam_pos = hom.HMatrix(cam.inclusiveMatrix()).pos() _data.mesh_to_cam = _cam_pos - _obj_pos return _data
def apply_fk_to_ik(self, pole_vect_depth=30.0, apply_=True, build_tmp_geo=False, verbose=0): """Apply fk to ik. First the pole vector is calculated by extending a line from the elbow joint in the direction of the cross product of the limb vector (fk_ctrls[0] to fk3) and the limb bend. The ik joint is then moved to the position of the fk3 control. The arm/knee offset is reset on apply. Args: pole_vect_depth (float): distance of pole vector from fk_ctrls[1] apply_ (bool): apply the update to gimbal ctrl build_tmp_geo (bool): build tmp geo verbose (int): print process data """ lprint('APPLYING FK -> IK', verbose=verbose) # Reset offset for _offs in self.ik_offs: cmds.setAttr(_offs, 0) # Calculate pole pos _limb_v = hom.get_p(self.fk_ctrls[2]) - hom.get_p(self.fk_ctrls[0]) if self.limb is Limb.ARM: _limb_bend = -hom.get_m(self.fk_ctrls[1]).lz_().normalized() elif self.limb is Limb.LEG: _limb_bend = hom.get_m(self.fk_ctrls[1]).lz_().normalized() else: raise ValueError(self.limb) _pole_dir = -(_limb_v ^ _limb_bend).normalized() _pole_p = hom.get_p(self.fk_ctrls[1]) + _pole_dir * pole_vect_depth lprint(' - APPLYING POLE', self.ik_pole, verbose=verbose) # Read fk3 mtx _ik_mtx = hom.get_m(self.fk_ctrls[2]) _diff = None if self.side is Side.LEFT and self.limb is Limb.ARM: _offs = hom.HEulerRotation(math.pi / 2, math.pi, 0) elif self.side is Side.LEFT and self.limb is Limb.LEG: _offs = hom.HEulerRotation(0, math.pi / 2, -math.pi / 2) elif self.side is Side.RIGHT and self.limb is Limb.ARM: _offs = hom.HEulerRotation(-math.pi / 2, math.pi, 0) elif self.side is Side.RIGHT and self.limb is Limb.LEG: _offs = hom.HEulerRotation(0, math.pi / 2, math.pi / 2) else: raise ValueError(self.side, self.limb) _ik_mtx = _offs.as_mtx() * _ik_mtx # Apply vals to ik ctrls if apply_: _ik_mtx.apply_to(self.ik_) if _diff: print 'APPLY DIFF', _diff _diff.apply_to(self.ik_, relative=True) _pole_p.apply_to(self.ik_pole, use_constraint=True) self.set_to_ik() lprint('SET', self.ik_, 'TO IK', verbose=verbose) if build_tmp_geo: set_namespace(":tmp", clean=True) _limb_v.build_crv(hom.get_p(self.fk_ctrls[0]), name='limb_v') _limb_bend.build_crv(hom.get_p(self.fk_ctrls[1]), name='limb_bend') _pole_dir.build_crv(hom.get_p(self.fk_ctrls[1]), name='pole_dir') _pole_p.build_loc(name='pole') _ik_mtx.build_geo(name='trg_ik') hom.get_m(self.ik_).build_geo(name='cur_ik') set_namespace(":")
def drive_shade_geo_from_rig(cache_set, progress=False, verbose=0): """Use a rig to drive tmp geo duplicated from its shade asset. The shade asset is referenced into the scene, all meshes with corresponding meshes in the rig are duplicated and then attached to the rig geo using a blendshape. The shade asset is then removed. Args: cache_set (pm.ObjectSet): cache set from rig being cached progress (bool): show progress on bind verbose (int): print process data Returns: (HFnMesh list): list of driven shade geo """ from psyhive import tk2 # Get anim rig _cache_set = cache_set or nt.ObjectSet(u'archer_rig2:bakeSet') print 'CACHE SET', _cache_set _rig = ref.find_ref(_cache_set.namespace().strip(':')) print 'RIG', _rig print 'RIG PATH', _rig.path # Find/import tmp shade asset _rig_out = tk2.TTOutputName(_rig.path) print 'RIG OUT', _rig_out.path _shade_out = _rig_out.map_to(Step='shade', output_type='shadegeo', Task='shade').find_latest() print 'SHADE OUT', _shade_out.path if not _shade_out.exists(): raise RuntimeError("Missing shade file " + _shade_out.path) _shade_file = _shade_out.find_file(extn='mb', format_='maya') print 'SHADE FILE', _shade_file _shade = ref.create_ref(_shade_file.path, namespace='psyhive_tmp', force=True) # Duplicate geo and bind to rig _bake_geo = [] _cleanup = [] set_namespace(':tmp_{}'.format(_rig.namespace), clean=True) for _shade_mesh in qt.progress_bar(_shade.find_nodes(type_='mesh'), 'Binding {:d} geo{}', col='Tomato', show=progress): # Check there is equivalent mesh in rig if _shade_mesh.plug('intermediateObject').get_val(): continue _shade_tfm = hom.HFnTransform(get_parent(_shade_mesh)) try: _rig_tfm = _rig.get_node(_shade_tfm, class_=hom.HFnTransform) except ValueError: continue lprint(' - BINDING MESH', _shade_tfm, '->', _rig_tfm, verbose=verbose) # Duplicate mesh _dup = _shade_tfm.duplicate() lprint(' - DUPLICATING', _shade_tfm, verbose=verbose) _dup.parent(world=True) _clean_unused_uv_sets(_dup) _connect_visibility(_rig_tfm, _dup) _bake_geo.append(_dup) _cleanup.append(_dup) # Bind to rig _blend = hom.CMDS.blendShape(_rig_tfm, _dup) _blend.plug('origin').set_enum('world') _blend.plug('weight[0]').set_val(1.0) _shade.remove(force=True) return _bake_geo, _cleanup
def drive_shade_geo_from_rig(cache_set, progress=False, verbose=0): """Use a rig to drive tmp geo duplicated from its shade asset. The shade asset is referenced into the scene, all meshes with corresponding meshes in the rig are duplicated and then attached to the rig geo using a blendshape. The shade asset is then removed. Args: cache_set (pm.ObjectSet): cache set from rig being cached progress (bool): show progress on bind verbose (int): print process data Returns: (HFnMesh list): list of driven shade geo """ print 'DRIVE SHADE GEO FROM RIG' # Get anim rig _cache_set = cache_set or nt.ObjectSet(u'archer_rig2:bakeSet') print ' - CACHE SET', _cache_set if not _cache_set.referenceFile(): print ' - NO CORRESPONDING RIG' raise RuntimeError("No rig found for {}".format(_cache_set)) _rig = ref.find_ref(_cache_set.referenceFile().namespace) print ' - RIG', _rig print ' - RIG PATH', _rig.path # Find/import tmp shade asset _shade_file = get_shade_mb_for_rig(_rig) _shade = ref.create_ref(_shade_file.path, namespace='psyhive_tmp', force=True) # Duplicate geo and bind to rig _bake_geo = [] _tmp_ns = ':tmp_{}'.format(_rig.namespace) set_namespace(_tmp_ns, clean=True) for _shade_mesh in qt.progress_bar(_shade.find_nodes(type_='mesh'), 'Binding {:d} geo{}', col='Tomato', show=progress): # Check there is equivalent mesh in rig if _shade_mesh.plug('intermediateObject').get_val(): continue _shade_tfm = hom.HFnTransform(get_parent(_shade_mesh)) try: _rig_tfm = _rig.get_node(_shade_tfm, class_=hom.HFnTransform) except ValueError: continue lprint(' - BINDING MESH', _shade_tfm, '->', _rig_tfm, verbose=verbose) # Duplicate mesh _dup = _shade_tfm.duplicate() lprint(' - DUPLICATING', _shade_tfm, verbose=verbose) _dup.parent(world=True) _clean_unused_uv_sets(_dup) _connect_visibility(_rig_tfm, _dup) _bake_geo.append(_dup) # Bind to rig _blend = hom.CMDS.blendShape(_rig_tfm, _dup) _blend.plug('origin').set_enum('world') _blend.plug('weight[0]').set_val(1.0) _shade.remove(force=True) cmds.namespace(set=":") if not _bake_geo: del_namespace(_tmp_ns) raise RuntimeError('No geo was attached - this means none of the ' 'shade geo matched the rig bakeSet geo.') return _bake_geo, _bake_geo
def check_current_scene(show_dialog=True, verbose=1): """Check current scene for ingestion issues. Args: show_dialog (bool): show status dialog on completion verbose (int): print process data Returns: (str list): list of issues with current file """ _file = File(host.cur_scene()) _issues = [] lprint('FILE', _file, verbose=verbose) lprint(' - BASENAME', _file.basename, verbose=verbose) # Check current scene filename _issues += _find_scene_name_issues(_file) # Check maya version _ver = int(cmds.about(version=True)) if _ver != 2018: _issues.append('Bad maya version {:d}'.format(_ver)) # Check for unwanted node types for _type in ['displayLayer', 'renderLayer']: _lyrs = [ _lyr for _lyr in cmds.ls(type=_type) if _lyr not in DEFAULT_NODES if not cmds.referenceQuery(_lyr, isNodeReferenced=True) ] if _lyrs: _issues.append('Scene has {} layers: {}'.format( _type.replace("Layer", ""), ', '.join(_lyrs))) for _type in ['unknown']: _nodes = [ _node for _node in cmds.ls(type=_type) if _node not in DEFAULT_NODES if not cmds.referenceQuery(_node, isNodeReferenced=True) ] if _nodes: _issues.append('Scene has {} nodes: {}'.format( _type, ', '.join(_nodes))) # Check references _refs = ref.find_refs(unloaded=False) lprint('CHECKING {:d} REFS'.format(len(_refs)), verbose=verbose) for _ref in _refs: lprint(' - CHECKING', _ref, verbose=verbose) _issues += _find_ref_issues(_ref) # Print summary if verbose: print '\nSUMMARY: FOUND {:d} ISSUE{}'.format( len(_issues), get_plural(_issues).upper()) for _idx, _issue in enumerate(_issues): print ' {:5} {}'.format('[{:d}]'.format(_idx + 1), _issue) print if not show_dialog: pass elif not _issues: qt.notify('No issues found.\n\nFile is read to send to psyop.', verbose=0) else: qt.notify_warning( 'This file has {:d} issue{}.\n\nCheck the script editor for ' 'details.'.format(len(_issues), get_plural(_issues)), verbose=0) return _issues
def ingest_ma_files_to_pipeline( src_dir=('P:/projects/frasier_38732V/production/vendor_in/' 'Motion Burner/Delivery_2020-02-12'), ma_filter='', work_filter='', replace=False, blast_=False, legs_to_ik=False, reverse=False, limit=0, verbose=0): """Copy ma file from vendors_in to psyop pipeline. This creates a work file for each ma file and also generates face/body blasts. Args: src_dir (str): vendor in directory to search for ma files ma_filter (str): apply filter to ma file path work_filter (str): apply filter to work file path replace (bool): overwrite existing files blast_ (bool): execute blasts legs_to_ik (bool): execute legs ik switch (slow) reverse (bool): reverse the list (for parallel processing) limit (int): limit the number of files to be processed verbose (int): print process data """ _src_dir = abs_path(src_dir) print 'SRC DIR', _src_dir assert os.path.exists(_src_dir) _mas = _find_ma_files_to_check(_src_dir, ma_filter, work_filter, limit) # Check which mas need processing _to_process = [] _overwrites = [] _replacing = [] for _idx, _ma in qt.progress_bar(enumerate(_mas), 'Checking {:d} ma files'): lprint('PROCESSING MA {:d}/{:d} {}\n - MA {}'.format( _idx + 1, len(_mas), _ma.filename, _ma.path), verbose=verbose) # Check for overwrite _work = _ma.get_work() lprint(' - WORK', _work.path, verbose=verbose) if _work.exists(): _vendor_file = _work.get_vendor_file() if replace or _ma.path != _work.get_vendor_file(): _overwrites.append((_ma, _work)) if len(_work.find_vers()) > 1: _replacing.append((_ma, _work)) elif (_work.blast_comp.exists() and _work.get_export_fbx().exists() and _work.get_export_fbx(dated=True).exists()): print ' - COMP BLAST', _work.blast_comp.path print ' - FBX', _work.get_export_fbx().path print ' - NO PROCESSING NEEDED' print continue _to_process.append([_ma, _work]) print print 'FOUND {:d} FILES TO PROCESS'.format(len(_to_process)) print # Remove any data to be replaced if _replacing: _text = 'Replacing {:d} files:\n\n'.format(len(_replacing)) for _ma, _work in _replacing: _text += '\n - MA {}\n - WORK {}\n\n'.format(_ma.path, _work.path) qt.ok_cancel(_text) if _overwrites: _remove_existing_data(_overwrites) # Execute the ingestion if not _to_process: return qt.ok_cancel('Ingest {:d} files?'.format(len(_to_process)), icon=ICON) if reverse: _to_process = reversed(_to_process) for _ma, _work in qt.progress_bar(_to_process, 'Ingesting {:d} ma{}', col='LightSkyBlue'): _ingest_vendor_ma(ma_=_ma, work=_work, blast_=blast_, legs_to_ik=legs_to_ik)
def _get_abc_range_from_sg(abc, mode='shot', verbose=0): """Read abc frame range from shotgun. Args: abc (str): path to abc file mode (str): where to get range from abc - read bake range of abc shot - read cut in/out range of shot verbose (int): print process data Returns: (tuple|None): frame range (if any) """ _out = tk2.get_output(abc) if not _out: lprint('NO OUTPUT', abc, verbose=verbose) return None _shotgun = tank.platform.current_engine().shotgun _project = pipe.cur_project() if mode == 'abc': _sg_data = get_single( _shotgun.find( "PublishedFile", filters=[ ["project", "is", [tk2.get_project_sg_data(_project)]], ["entity", "is", [tk2.get_shot_sg_data(_out.shot)]], ["sg_format", "is", 'alembic'], ["sg_component_name", "is", _out.output_name], ["version_number", "is", _out.version], ], fields=[ "code", "name", "sg_status_list", "sg_metadata", "path" ])) _data = eval(_sg_data['sg_metadata']) _result = _data['start_frame'], _data['end_frame'] elif mode == 'shot': _shot = tk2.get_shot(_out.path) if not _shot: return None _data = get_single(tank.platform.current_engine().shotgun.find( 'Shot', filters=[["project", "is", [tk2.get_project_sg_data(_project)]], ["code", "is", [_shot.get_sg_data()['name']]]], fields=["sg_cut_in", "sg_cut_out"]), catch=True) if verbose: print 'SHOT DATA', _shot.get_sg_data() if (_data and _data.get('sg_cut_in') is not None and _data.get('sg_cut_out') is not None): _result = _data['sg_cut_in'], _data['sg_cut_out'] else: _result = None else: raise ValueError(mode) if verbose: pprint.pprint(_data) return _result
def submit(self, local=None, submit=True, modules=None, verbose=1): """Submit this job to qube. Args: local (bool): prepare job for local execute submit (bool): submit to qube modules (mod list): modules to add to sys.path in local mode verbose (int): print process data """ _local = local or os.environ.get('PSYHIVE_FARM_LOCAL_SUBMIT') _uid = self.uid or _get_uid() _tmp_dir = _get_tmp_dir(uid=_uid) _tmp_fmt = '{}/task.{{}}.py'.format(_tmp_dir) _work = tk.cur_work() # Create job _label = '{}: {}'.format(pipe.cur_project().name, self.label) _job = Job(label=_label) _job.worker = "psyhive_mayapy" _job.fixture.environ = _get_job_environ(local=_local) _job.payload = { 'app_version': _get_app_version(), 'py_dir': _tmp_dir} _job.extra['qube.cluster'] = "/3D/{}".format(pipe.cur_project().name) # Setup job for local execute if _local: _job.extra['qube.reservations'] = ( "global_host.qube=1,host.processors={procs:d}".format( procs=self.procs)) _job.extra['qube.hosts'] = os.getenv('COMPUTERNAME') _job.extra['qube.groups'] = "" _job.extra['qube.restrictions'] = "" _job.extra['qube.cluster'] = "" _mods = (modules or []) + [psyhive] for _mod in _mods: _dir = os.path.dirname(os.path.dirname(_mod.__file__)) _path = abs_path(_dir).replace('/', u'\\') _job.fixture.environ['PYTHONPATH'] += ';{}'.format(_path) # Add tasks lprint('TMP FMT', _tmp_fmt, verbose=verbose) for _idx, _task in enumerate(self.tasks): # Write file to disk _n_str = '{:04d}'.format(_idx+1) _tmp_py = _tmp_fmt.format(_n_str) write_file(file_=_tmp_py, text=_task.get_py(tmp_py=_tmp_py)) lprint(' -', _tmp_py, verbose=verbose) _payload = {'pyfile': _tmp_py} # Create work item _work_item = WorkItem(label=_task.label, payload=_payload) _job.work_items.append(_work_item) # Submit _job_graph = JobGraph() _job_graph.add_job(_job) _submitter = QubeSubmitter() if submit: _result = _submitter.submit(_job_graph) lprint('RESULT', _result, verbose=verbose > 1)
def _cache_yetis(yetis, apply_on_complete=False, samples=3, verbose=0): """Cache a list of yeti nodes. Args: yetis (HFnDependencyNode list): nodes to cache apply_on_complete (bool): apply cache on completion samples (int): samples per frame verbose (int): print process data """ from . import yeti_ui print 'CACHE YETIS', yetis _work = tk2.cur_work() _yetis, _outs, _namespaces = _prepare_yetis_and_outputs( yetis=yetis, work=_work) # Get cache path - if multiple namespace need to cache to tmp _tmp_fmt = abs_path('{}/yetiTmp/<NAME>.%04d.cache'.format( tempfile.gettempdir())) if len(_yetis) > 1: _cache_path = _tmp_fmt _tmp_dir = Dir(os.path.dirname(_tmp_fmt)) _tmp_dir.delete(force=True) _tmp_dir.test_path() else: assert len(_outs) == 1 _cache_path = _outs[0].path print "CACHE PATH", _cache_path # Generate caches dprint('GENERATING CACHES', _cache_path) print ' - SAMPLES', samples for _yeti in _yetis: _yeti.plug('cacheFileName').set_val('') _yeti.plug('fileMode').set_val(0) _yeti.plug('overrideCacheWithInputs').set_val(False) cmds.select(_yetis) cmds.pgYetiCommand( writeCache=_cache_path, range=host.t_range(), samples=samples) dprint('GENERATED CACHES', _cache_path) # Move tmp caches to outputs if len(_yetis) > 1: dprint('MOVING CACHES FROM TMP') for _yeti, _out in safe_zip(_yetis, _outs): print ' - MOVING', _out.path _name = str(_yeti).replace(":", "_") _tmp_seq = Seq(_tmp_fmt.replace('<NAME>', _name)) for _frame, _tmp_path in safe_zip( _tmp_seq.get_frames(), _tmp_seq.get_paths()): lprint(' -', _frame, _tmp_path, verbose=verbose) shutil.move(_tmp_path, _out[_frame]) # Apply cache to yeti nodes if apply_on_complete: dprint('APPLYING CACHES TO YETIS') for _yeti, _cache in safe_zip(_yetis, _outs): apply_cache(cache=_cache, yeti=_yeti) qt.notify( 'Cached {:d} yeti node{}.\n\nSee script editor for details.'.format( len(_yetis), get_plural(_yetis)), title='Cache complete', icon=yeti_ui.ICON, parent=yeti_ui.DIALOG) return _outs
def blast(seq, range_=None, res=None, force=False, cam=None, view=False, verbose=0): """Execute a playblast. Args: seq (Seq): output sequence range_ (tuple): start/end frame res (tuple): override image resolution force (bool): overwrite existing images without confirmation cam (str): override camera view (bool): view blast on complete verbose (int): print process data """ from psyhive import host from maya_psyhive import ui # Get res if res: _width, _height = res cmds.setAttr('defaultResolution.width', _width) cmds.setAttr('defaultResolution.height', _height) else: _width = cmds.getAttr('defaultResolution.width') _height = cmds.getAttr('defaultResolution.height') lprint('RES', _width, _height, verbose=verbose) # Get range _rng = range_ or host.t_range() _start, _end = _rng if cam: _panel = ui.get_active_model_panel() cmds.modelEditor(_panel, edit=True, camera=cam) seq.delete(wording='Replace', force=force) seq.test_dir() # Set image format _fmt_mgr = createImageFormats.ImageFormats() _fmt_mgr.pushRenderGlobalsForDesc({ 'jpg': "JPEG", 'exr': "EXR", }[seq.extn]) _filename = '{}/{}'.format(seq.dir, seq.basename) lprint('BLAST FILENAME', _filename, verbose=verbose) cmds.playblast(startTime=_start, endTime=_end, format='image', filename=_filename, viewer=False, width=_width, height=_height, offScreen=True, percent=100) assert seq.get_frames(force=True) _fmt_mgr.popRenderGlobals() if view: seq.view()
def create_attr(attr, value, keyable=True, update=True, locked=False, verbose=0): """Add an attribute. Args: attr (str): attr name (eg. persp1.blah) value (any): attribute value to apply keyable (bool): keyable state of attribute update (bool): update attribute to value provided (default is true) locked (bool): create attr as locked verbose (int): print process data Returns: (str): full attribute name (eg. persp.blah) """ _node, _attr = attr.split('.') # Create attr _type = _class = None _created = False if not cmds.attributeQuery(_attr, node=_node, exists=True): if isinstance(value, qt.HColor): cmds.addAttr(_node, longName=_attr, attributeType='float3', usedAsColor=True) for _chan in 'RGB': print 'ADDING', _attr + _chan cmds.addAttr(_node, longName=_attr + _chan, attributeType='float', parent=_attr) _class = qt.HColor else: _kwargs = { 'longName': _attr, 'keyable': keyable, } if isinstance(value, six.string_types): _kwargs['dataType'] = 'string' _type = 'string' elif isinstance(value, float): _kwargs['attributeType'] = 'float' _kwargs['defaultValue'] = value elif isinstance(value, int): _kwargs['attributeType'] = 'long' _kwargs['defaultValue'] = value else: raise ValueError(value) lprint("ADDING ATTR", _node, _kwargs, verbose=verbose) cmds.addAttr(_node, **_kwargs) _created = True # Apply value _cur_val = get_val(attr, type_=_type, class_=_class) if not _cur_val == value and (_created or update): _kwargs = {} set_val(attr, value) if locked: cmds.setAttr(attr, lock=True) return attr
def add_text(self, text, pos=(0, 0), anchor='TL', col='white', font=None, size=None, verbose=0): """Write text to the image. Args: text (str): text to add pos (tuple|QPoint): text position anchor (str): text anchor col (str|QColor): text colour font (QFont): text font size (int): apply font size verbose (int): print process data """ from psyhive.qt import get_p, get_col lprint("Adding text", text, verbose=verbose) _window = self.window() _pos = get_p(pos) _x, _y = _pos.x(), _pos.y() _w, _h = _window.width(), _window.height() if anchor == 'BL': _rect = QtCore.QRect(_x, 0, _w - _x, _y) _align = Qt.AlignLeft | Qt.AlignBottom elif anchor == 'BR': _rect = QtCore.QRect(0, 0, _x, _y) _align = Qt.AlignRight | Qt.AlignBottom elif anchor == 'B': _rect = QtCore.QRect(0, 0, 2 * _x, _y) _align = Qt.AlignHCenter | Qt.AlignBottom elif anchor == 'C': _rect = QtCore.QRect(0, 0, 2 * _x, 2 * _y) _align = Qt.AlignHCenter | Qt.AlignVCenter elif anchor == 'L': _rect = QtCore.QRect(_x, 0, _w, 2 * _y) _align = Qt.AlignVCenter | Qt.AlignLeft elif anchor == 'R': _rect = QtCore.QRect(0, 0, _x, 2 * _y) _align = Qt.AlignRight | Qt.AlignVCenter elif anchor in ('T', 'TC'): _rect = QtCore.QRect(0, _y, 2 * _x, _h) _align = Qt.AlignHCenter | Qt.AlignTop elif anchor == 'TL': _rect = QtCore.QRect(_x, _y, _w, _h) _align = Qt.AlignLeft | Qt.AlignTop elif anchor == 'TR': _rect = QtCore.QRect(0, _y, _x, _h - _y) _align = Qt.AlignRight | Qt.AlignTop else: raise ValueError('Unhandled anchor: %s' % anchor) if font: self.setFont(font) elif size is not None: _font = QtGui.QFont() _font.setPointSize(size) self.setFont(_font) # Draw text self.setPen(get_col(col or 'white')) self.drawText(_rect, _align, text)
def get_work_icon(work, mode='full', size=50, overlay_size=25, force=False, verbose=0): """Get icon for the given work file. Args: work (CTTWork): work file mode (str): type of icon to build (full/basic) size (int): icon size overlay_size (int): overlay size force (bool): force redraw icon verbose (int): print process data Returns: (str|QPixmap): work file icon """ # Get base icon _uid = work.task lprint('UID', _uid, verbose=verbose) if _uid == 'test': _icon = icons.EMOJI.find('Alembic') else: _random = str_to_seed(_uid) _icon = _random.choice(icons.FRUIT.get_paths()) lprint('ICON', _icon, verbose=verbose) if mode == 'basic': return _icon _random = str_to_seed(work.path) _rotate = _random.random() * 360 _pix = qt.HPixmap(size, size) _pix.fill(qt.HColor(0, 0, 0, 0)) # Add rotated icon as overlay _size_fr = 1 / (2**0.5) _size = _pix.size() * _size_fr _over = qt.HPixmap(_icon).resize(_size) _tfm = QtGui.QTransform() _tfm.rotate(_rotate) _over = _over.transformed(_tfm) _offs = (_pix.size() - _over.size()) / 2 _pix.add_overlay(_over, _offs) # Add overlays _overlays = [] if work.find_seqs(): _over = qt.HPixmap( icons.EMOJI.find('Play button')).resize(overlay_size) _overlays.append(_over) if work.find_publishes(): _over = qt.HPixmap( icons.EMOJI.find('Funeral Urn')).resize(overlay_size) _overlays.append(_over) if work.find_caches(): _over = qt.HPixmap(icons.EMOJI.find('Money bag')).resize(overlay_size) _overlays.append(_over) for _idx, _over in enumerate(_overlays): _offs = (13 * _idx, _pix.height() - 0 * _idx) lprint(' - ADD OVERLAY', _idx, _offs, verbose=verbose) _pix.add_overlay(_over, _offs, anchor='BL') return _pix
def _finalise_standin(node, name, range_, verbose=0): """Finalise new aiStandIn node. Executes updates to be run after abc has loaded (abc loads using deferred evaluation). This includes renaming the transform/shape - if they are renamed before abc load the auto generated abc frame expression errors. Also the frame expression is regenenerated to make the abc loop - if this is generated before abc load then the auto generated expression also errors. Args: node (HFnDependencyNode): aiStandIn node (shape) name (str): intended node name (of transform) range_ (tuple|None): range to loop (if any) verbose (int): print process data """ dprint('FINALISE STANDIN', node, verbose=verbose) lprint(' - RANGE', range_, verbose=verbose) # Fix names _parent = node.get_parent() lprint(' - RENAMING', name, _parent, verbose=verbose) _parent = cmds.rename(_parent, name) lprint(' - PARENT', _parent, verbose=verbose) _node = node.rename(name + "Shape") _plug = _node.plug('frameNumber') # Apply range expression if range_: # Clean frame expression lprint(' - PLUG', _plug, _plug.find_driver(), verbose=verbose) lprint(' - BREAKING CONNECTIONS', verbose=verbose) _plug.break_connections() # Build expression if range_: lprint(' - BUILDING EXPRESSION', verbose=verbose) _str = ('{plug} = ((frame - {start}) % ({end} - {start} + 1)) + ' '{start};').format(start=range_[0], end=range_[1], plug=_plug) lprint(_str, verbose=verbose) _expr = cmds.expression(string=_str, timeDependent=True) lprint(' - CREATED EXPRESSION', _expr, verbose=verbose) return hom.HFnTransform(_parent)
def get_selected(type_=None, class_=None, multi=False, verbose=1): """Get selected node. Unless the multi flag is using, this will error if there isn't exactly one selected node matched. Args: type_ (str): filter nodes by type class_ (class): only return nodes that cast to this class multi (bool): return multiple nodes verbose (int): print process data Returns: (HFnDependencyNode): matching node (HFnDependencyNode list): matching nodes (if multi flag used) (HFnPlug|HPlug list): if class_ is HPlug """ from maya_psyhive import open_maya as hom # Build list of selected nodes _results = [] for _node in hom.CMDS.ls(selection=True): _result = _node _type = _node.object_type() lprint('TESTING', _node, verbose=verbose > 1) # Map transforms to HFnTransform if _type == 'transform': _result = hom.HFnTransform(str(_node)) # Apply type filter if type_: if type_ != 'transform' and _type == 'transform' and _result.shp: _type = _result.shp.object_type() lprint(' - SHAPE TYPE', _type, verbose=verbose > 1) if not _type == type_: lprint(' - REJECTED', type_, _type, verbose=verbose > 1) continue if class_ is hom.HPlug: for _attr in cmds.channelBox('mainChannelBox', query=True, selectedMainAttributes=True) or []: _plug = hom.HPlug('{}.{}'.format(_node, _attr)) _results.append(_plug) continue elif class_: try: _result = class_(str(_node)) except ValueError: lprint(' - CLASS FAIL', class_, verbose=verbose > 1) continue lprint(' - ADDED', verbose=verbose > 1) _results.append(_result) # Get result if multi: return _results return get_single(_results, name='selected object', verbose=verbose)
def build_shader_outputs(output, force=True, verbose=1): """Build shader outputs for the given shade asset. This consists of: - mb file containing just shaders for this asset - yml file containing list of shaders - standin file containing shaders attached to aiStandIn node Args: output (str): path to aiStandIn output force (bool): overrwrite existing files without confirmation verbose (int): print process data Returns: (str): path to output file """ lprint('BUILD aiStandIn MA', output, verbose=verbose) # Get paths for standin + rest cache + shade _out = tk2.TTOutput(output) _shaders = _out.map_to(tk2.TTOutputFile, format='shaders', extension='mb') _yml = _out.map_to(tk2.TTOutputFile, format='shaders', extension='yml') _standin = _out.map_to(tk2.TTOutputFile, format='aistandin', extension='ma') _ver = tk2.TTOutputVersion(output) _rest_cache = get_single(_ver.find(extn='abc', filter_='restCache'), catch=True) _shade = _ver.find_file(extn='mb', format_='maya') lprint(' - VER ', _ver.path, verbose=verbose) lprint(' - SHADE ', _shade.path, verbose=verbose) lprint(' - REST CACHE', _rest_cache, verbose=verbose) lprint(' - STANDIN ', _standin.path, verbose=verbose) lprint(' - SHADERS ', _shaders.path, verbose=verbose) assert not _shade == _out.path # Build aiStandIn node lprint(' - OPENING SHADE SCENE', verbose=verbose) host.open_scene(_shade.path, force=True) build_aistandin_from_shade(archive=_rest_cache, shade=_ShadeScene(), animated=False, name='AIS', deferred=False) # Remove + save aistandin cmds.delete('GEO') host.save_as(file_=_standin.path, force=force) # Remove standin + save shaders if cmds.objExists('AIS'): cmds.delete('AIS') _ses = [ str(_se) for _se in cmds.ls(type='shadingEngine') if _se not in DEFAULT_NODES ] lprint(" - SHADING ENGINES", _ses, verbose=verbose) host.save_as(_shaders.path, force=force) write_yaml(file_=_yml.path, data=_ses, force=True) return _standin.path
def _exec_cache(namespaces, confirm=True, new_scene=False, farm=True, verbose=1): """Execute a recache on the current workfile. Args: namespaces (str list): list of namespaces to recache confirm (bool): confirm before execute new_scene (bool): new scene after recache farm (bool): submit recache to farm verbose (int): print process data """ class _FakeResolver(object): def __init__(self, all_items, conflicts, version): self.user_data = all_items, version self.conflicts = conflicts class _FakeConflict(object): def __init__(self, id_, cache): _user_data = collections.namedtuple('UserData', ['id']) self.id_ = id_ self.user_data = _user_data(id=self.id_) self.resolution = None if cache else _skip def __repr__(self): return '<Conflict:{}>'.format(self.id_) _engine = tank.platform.current_engine() _cache_app = _engine.apps['psy-multi-cache'] check_heart() # Use resolver to limit items to cache _cache_app.init_app() _mod = sys.modules[_cache_app.cache_controller.__module__] _skip = _mod.PublishConflictResolution.SKIP _model = _cache_app.cache_controller.model _all_items = [ _item.item_data for _item in _model.cache_list.selected_items ] lprint(' - ALL ITEMS', len(_all_items), pprint.pformat(_all_items), verbose=verbose > 1) _conflicts = [] for _item in _all_items: _cache = _item.id.replace(":renderCamShape", "") in namespaces _conflict = _FakeConflict(id_=_item.id, cache=_cache) _conflicts.append(_conflict) lprint(' - CONFLICTS', len(_conflicts), pprint.pformat(_conflicts), verbose=verbose > 1) _resolver = _FakeResolver(all_items=_all_items, conflicts=_conflicts, version=_model.version) # Check cache _to_cache = [ _conflict for _conflict in _conflicts if not _conflict.resolution ] if not _to_cache: raise RuntimeError("Nothing found to cache") lprint(' - FOUND {:d} ITEMS TO CACHE'.format(len(_to_cache))) if confirm: qt.ok_cancel('Submit {:d} cache{} to farm?'.format( len(_to_cache), get_plural(_to_cache))) # Execute cache if farm: _cache_app.cache_controller.model.cache_on_farm(resolver=_resolver) else: _cache_app.cache_controller.model.cache(resolver=_resolver) dprint('{} {:d}/{:d} REFS'.format('SUBMITTED' if farm else 'CACHED', len(namespaces), len(_all_items))) if new_scene: cmds.file(new=True, force=True)
def _build_aip_node(shd, merge, meshes, ai_attrs=None, name=None, verbose=0): """Build aiSetParameter node. Args: shd (HFnDependencyNode): shader to apply merge (HFnDependencyNode): merge node to connect output to meshes (HFnDependencyNode list): meshes to apply set param to ai_attrs (dict): override ai attrs to check name (str): override name verbose (int): print process data """ dprint('BUILD AIP', shd, meshes, verbose=verbose) _ai_attrs = ai_attrs if ai_attrs is not None else _AI_ATTRS lprint(' - AI ATTRS', _ai_attrs, verbose=verbose) # Create standin node _aip = hom.CMDS.createNode('aiSetParameter', name='{}_AIP'.format(name or shd.name())) _aip.plug('out').connect(_get_next_idx(merge.plug('inputs'))) if shd: _aip.plug('assignment[0]').set_val("shader = '{}'".format(shd)) lprint(' - AIP', _aip, verbose=verbose) # Determine AIP settings to apply _sels = [] _ai_attr_vals = collections.defaultdict(set) for _mesh in meshes: for _ai_attr in _ai_attrs: _plug = _mesh.plug(_ai_attr) _type = 'string' if _plug.get_type() == 'enum' else None _val = _plug.get_val(type_=_type) lprint(' - READ', _plug, _val, verbose=verbose > 1) if not _type: _default = _plug.get_default() if _default == _val: lprint(' - REJECTED DEFAULT VAL', verbose=verbose > 1) continue _ai_attr_vals[_ai_attr].add(_val) lprint(' - MESH', _mesh, _mesh.namespace, verbose=verbose) _prefix = '*:' if _mesh.namespace else '*/' _tfm = hom.HFnTransform(get_parent(_mesh)) _sels.append('{}{}/*'.format(_prefix, _tfm.clean_name)) # Apply API settings _aip.plug('selection').set_val(' or '.join(_sels)) for _ai_attr, _attr in _ai_attrs.items(): _vals = sorted(_ai_attr_vals[_ai_attr]) lprint(' - AI ATTR', _attr, _ai_attr, _vals, verbose=verbose > 1) _val = get_single(_vals, catch=True) if len(_vals) == 1 and _val not in [None, '']: lprint(' - APPLY', _attr, _val, verbose=verbose > 1) if isinstance(_val, six.string_types): _val = "{} = '{}'".format(_attr, _val) else: _val = "{} = {}".format(_attr, _val) _get_next_idx(_aip.plug('assignment')).set_val(_val) # Read displacement if shd: _add_displacement_override(shd=shd, aip=_aip) return _aip
def _blast_and_find_rigs_outside_frustrum(cam, rigs, kwargs, sample_freq, verbose=1): """Execute blast, checking to find rigs outside frustrum. Args: cam (HFnCamera): camera being blasted through rigs (FileRef list): list of rigs to check kwargs (dict): playblast kwargs sample_freq (int): frame gap between frustrum tests - ie. a value of 5 means the frustrum is sampled every 5 frames verbose (int): print process data Returns: (FileRef list): list of rigs outside frustrum """ _frames = kwargs.pop('frame') _check_frames = range(_frames[0], _frames[-1] + 1, sample_freq) # Blast scene and test rigs in camera _off_cam_rigs = copy.copy(rigs) _progress = qt.ProgressBar(_check_frames, 'Blasting {:d} frames'.format(len(_frames)), col='orchid') _cancelled = False while _check_frames: _frame = _check_frames.pop(0) # Update progress bar if not _progress.isVisible(): _cancelled = True raise StopIteration("Blast cancelled") _progress.next() print 'UPDATED PROGRESS', _progress lprint(' - CHECKING FRAME', _frame, verbose=verbose) cmds.currentTime(_frame) # Remove rigs in camera from list lprint(' - TESTING {:d} RIGS'.format(len(_off_cam_rigs)), _off_cam_rigs, verbose=verbose) for _rig in copy.copy(_off_cam_rigs): if _rig_in_cam(cam, _rig): lprint(' - RIG IN CAMERA:', _rig, verbose=verbose) _off_cam_rigs.remove(_rig) # Blast frames if not _off_cam_rigs: lprint(' - NO RIGS LEFT TO CHECK', verbose=verbose) _check_frames = [] _blast_frames = range(_frame, _frames[-1] + 1) _progress.close() else: _blast_frames = range(_frame, _frame + sample_freq) lprint(' - BLASTING FRAMES', ints_to_str(_blast_frames), verbose=verbose) cmds.playblast(frame=_blast_frames, **kwargs) _progress.close() return _off_cam_rigs
def _connect_widget(self, widget, track_usage_=True, catch_error_=True, disable_btns_on_exec=True, verbose=0): """Connect a widget to callbacks on the parent object. Args: widget (QWidget): widget to connect track_usage_ (bool): apply track usage decorator catch_error_ (bool): apply error catcher decorator disable_btns_on_exec (bool): disable push buttons while executing (this can interfere with custom on the fly enabling/disabling) verbose (int): print process data """ _name = widget.objectName() # See if this element needs connecting if not _name: return _callback = getattr(self, '_callback__' + _name, None) _context = getattr(self, '_context__' + _name, None) _redraw = getattr(self, '_redraw__' + _name, None) if not (_callback or _context or _redraw): return lprint('CONNECTING', _name, verbose=verbose) # Connect callback if _callback: # Wrap callback if isinstance(widget, QtWidgets.QPushButton): if track_usage_: from psyhive.tools import track_usage _callback = track_usage(_callback) if catch_error_: from psyhive.tools import get_error_catcher _catcher = get_error_catcher(exit_on_error=False) _callback = _catcher(_callback) if disable_btns_on_exec: _callback = _disable_while_executing(func=_callback, btn=widget) _callback = wrap_fn(_callback) # To lose args from hook lprint(' - CONNECTING', widget, verbose=verbose) # Find signals to connect to for _hook_name in [ 'clicked', 'currentTextChanged', 'textChanged', ]: _hook = getattr(widget, _hook_name, None) if _hook: _hook.connect(_callback) # Connect context if _context: widget.customContextMenuRequested.connect( _build_context_fn(_context, widget=widget)) widget.setContextMenuPolicy(Qt.CustomContextMenu) # Connect redraw callback if _redraw: lprint(' - CONNECTING REDRAW', widget, verbose=verbose) _mthd = _build_redraw_method(_redraw) widget.redraw = types.MethodType(_mthd, widget)
def _do_create_cache(start, end, file_mode=_FileMode.ONE_FILE_PER_FRAME, update_viewport=True, cache_dir='', cache_per_geo=False, cache_name='', cache_name_as_prefix=False, action=_Action.ADD, force_save=True, sim_rate=1, sample_mult=1, inherit_settings=False, use_float=True, verbose=0): """Create an nCloth cache. Args: start (int): start frame end (int): end frame file_mode (FileMode): file mode update_viewport (bool): update viewport on cache cache_dir (str): force cache dir (empty to use default) cache_per_geo (bool): generate cache xml per geo cache_name (str): name of cache (normally nCloth shape name) cache_name_as_prefix (bool): use cache name as prefix action (Action): cache action force_save (bool): force save even if it overwrites existing files sim_rate (int): the rate at which the cloth simulation is forced to run sample_mult (int): the rate at which samples are written, as a multiple of simulation rate. inherit_settings (bool): whether modifications should be inherited from the cache about to be replaced use_float (bool): whether to store doubles as floats verbose (int): print process data """ _source_custom_n_cache() _start = int(host.t_start()) _end = int(host.t_end()) _work = tk2.cur_work() _args = [None] * 16 _args[0] = 0 # time_range_mode - use args 1/2 _args[1] = start _args[2] = end _args[3] = file_mode _args[4] = int(update_viewport) _args[5] = cache_dir _args[6] = int(cache_per_geo) _args[7] = cache_name _args[8] = int(cache_name_as_prefix) _args[9] = action _args[10] = int(force_save) _args[11] = sim_rate _args[12] = sample_mult _args[13] = int(inherit_settings) _args[14] = int(use_float) _args[15] = "mcx" _cmd = 'PSY_doCreateNclothCache 5 {{ {} }};'.format(', '.join( ['"{}"'.format(_arg) for _arg in _args])) lprint(_cmd, verbose=verbose) mel.eval(_cmd)
"""Tools for ingesting file from outsource vendors.""" from psyhive.utils import lprint, dev_mode from .ing_utils import parse_basename, vendor_from_path, INGESTED_TOKEN from .ing_vendor_file import is_vendor_file, VendorFile from .ing_psy_asset import is_psy_asset, PsyAsset try: # This will fail for outsource vendors from .ing_utils_psy import ( ICON, map_tag_to_shot, map_file_to_psy_asset, map_tag_to_asset) except ImportError: lprint('FAILED TO IMPORT ing_utils_psy', verbose=dev_mode()) try: # This will fail for outsource vendors from .ing_tools import ingest_seqs except ImportError: lprint('FAILED TO IMPORT ing_tools', verbose=dev_mode()) try: # This will fail for outsource vendors from .ing_ingestible import Ingestible except ImportError: lprint('FAILED TO IMPORT ing_ingestible', verbose=dev_mode())