def __init__(self, path, hint=None, tmpl=None, data=None, verbose=0): """Constructor. Args: path (str): path to object hint (str): template name tmpl (TemplatePath): override template object data (dict): override data dict verbose (int): print process data """ _path = abs_path(path) lprint('PATH', _path, verbose=verbose) super(TTBase, self).__init__(_path) self.hint = hint or self.hint self.tmpl = tmpl or get_current_engine().tank.templates[self.hint] self.project = pipe.Project(path) if self.project != pipe.cur_project(): raise ValueError('Not current project ' + self.path) try: self.data = data or self.tmpl.get_fields(self.path) except tank.TankError as _exc: lprint('TANK ERROR', _exc.message, verbose=verbose) raise ValueError("Tank rejected {} {}".format( self.hint, self.path)) lprint('DATA', pprint.pformat(self.data), verbose=verbose) for _key, _val in self.data.items(): _key = _key.lower() if getattr(self, _key, None) is not None: continue setattr(self, _key, _val)
def find_shots(class_=None, filter_=None, sequence=None, mode='disk'): """Find shots in the current job. Args: class_ (class): override shot root class filter_ (str): filter by shot name sequence (str): filter by sequence name mode (str): where to search for shot (disk/sg) Returns: (TTRoot): list of shots """ if mode == 'disk': _seqs = find_sequences() if sequence: _seqs = [_seq for _seq in _seqs if _seq.name == sequence] return sum([ _seq.find_shots(class_=class_, filter_=filter_) for _seq in _seqs ], []) elif mode == 'sg': from psyhive import tk2 _path_fmt = '{}/sequences/{}/{}' _shots = [] for _seq in tk2.get_sg_data(type_='Sequence', fields=['shots', 'code'], limit=0): for _shot in _seq['shots']: _shot_path = _path_fmt.format(pipe.cur_project().path, _seq['code'], _shot['name']) _shot = tk2.TTShot(_shot_path) _shots.append(_shot) return _shots else: raise ValueError(mode)
def map_tag_to_shot(tag): """Map the given tag to a shot in the current project. Args: tag (str): tag to match Returns: (TTShot): shot root """ # Try existing shot on disk _shot = tk2.find_shot(tag, catch=True, mode='sg') if _shot: return _shot # Try shot from sg _data = get_single(tk2.get_sg_data('Shot', code=tag, fields=['sg_sequence']), catch=True) if _data: _path = '{}/sequences/{}/{}'.format(pipe.cur_project().path, _data['sg_sequence']['name'], tag) return tk2.TTShot(_path) return None
def _script_editor_save_to_project(*xargs): """Execute save to project.""" del xargs # Maya callbacks require args # Get current editor _cur_editor = [ _ui for _ui in cmds.lsUI(dumpWidgets=True, long=False) if cmds.cmdScrollFieldExecuter(_ui, query=True, exists=True) and not cmds.cmdScrollFieldExecuter(_ui, query=True, isObscured=True) ][0] # Get file path _src_type = cmds.cmdScrollFieldExecuter(_cur_editor, query=True, sourceType=True) _extn = {'mel': 'mel', 'python': 'py'}[_src_type] _text = cmds.cmdScrollFieldExecuter(_cur_editor, query=True, text=True) _file = get_single( cmds.fileDialog2( fileMode=0, # Single file doesn't need to exist caption="Save Script", okCaption='Save', startingDirectory=pipe.cur_project().maya_scripts_path, fileFilter='{} Files (*.{})'.format(_extn.upper(), _extn)), catch=True) # Write file to disk if _file: write_file(file_=_file, text=_text)
def _script_editor_open_from_project(*xargs): """Execute save to project.""" del xargs # Maya callbacks require args _cmd = '\n'.join([ '$gLastUsedDir = "{}";' 'handleScriptEditorAction "load";' ]).format(pipe.cur_project().maya_scripts_path) mel.eval(_cmd)
def _get_default_browser_dir(): """Get default directory for work file browser. Returns: (str): path to default browser dir """ _cur_work = tk2.cur_work() if _cur_work: return _cur_work.dir return pipe.cur_project().path
def find_sequences(): """Find sequences in the current project. Returns: (TTSequenceRoot): list of sequences """ _seq_path = pipe.cur_project().path + '/sequences' _seqs = [] for _path in find(_seq_path, depth=1): _seq = TTSequenceRoot(_path) _seqs.append(_seq) return _seqs
def find_asset_roots(): """Read asset roots.""" _root = pipe.cur_project().path + '/assets' _roots = [] for _dir in find(_root, depth=3, type_='d'): try: _asset = TTAssetRoot(_dir) except ValueError: continue _roots.append(_asset) return _roots
def _get_abc_range_from_sg(abc, mode='shot', verbose=0): """Read abc frame range from shotgun. Args: abc (str): path to abc file mode (str): where to get range from abc - read bake range of abc shot - read cut in/out range of shot verbose (int): print process data Returns: (tuple|None): frame range (if any) """ _out = tk2.get_output(abc) if not _out: lprint('NO OUTPUT', abc, verbose=verbose) return None _shotgun = tank.platform.current_engine().shotgun _project = pipe.cur_project() if mode == 'abc': _sg_data = get_single( _shotgun.find( "PublishedFile", filters=[ ["project", "is", [tk2.get_project_sg_data(_project)]], ["entity", "is", [tk2.get_shot_sg_data(_out.shot)]], ["sg_format", "is", 'alembic'], ["sg_component_name", "is", _out.output_name], ["version_number", "is", _out.version], ], fields=[ "code", "name", "sg_status_list", "sg_metadata", "path" ])) _data = eval(_sg_data['sg_metadata']) _result = _data['start_frame'], _data['end_frame'] elif mode == 'shot': _shot = tk2.get_shot(_out.path) if not _shot: return None _result = _shot.get_frame_range() if not _result or None in _result: _result = None else: raise ValueError(mode) if verbose: pprint.pprint(_data) return _result
def _get_app_version(): """Get current maya version. Returns: (str): maya version """ try: from psyq.jobs.maya.maya_util import get_app_version except ImportError: pass else: return get_app_version() return pipe.cur_project().read_psylaunch_cfg()['apps']['maya']['default']
def get_project_sg_data(project=None): """Get tank request data for the given project. Args: project (Project): project Returns: (dict): search data """ _project = project or pipe.cur_project() _data = tank.platform.current_engine().shotgun.find( "Project", filters=[["sg_code", "is", _project.name]]) _id = get_single(_data)['id'] return {'type': 'Project', 'id': _id, 'name': _project.name}
def to_psy_work(self, verbose=0): """Get psyop work file for this scene. Args: verbose (int): print process data Returns: (TTWork): work file """ if self.step == 'rig': # Find asset _asset = tk2.TTAsset('{}/assets/3D/prop/{}'.format( pipe.cur_project().path, self.tag)) if not _asset.exists(): _asset = ingest.map_tag_to_asset(self.tag) assert _asset.exists() _work = _asset.map_to( tk2.TTWork, dcc='maya', Step=self.step, Task=self.step, extension='mb', version=self.ver_n) elif self.step in ['animation', 'previz']: lprint(' - TAG', self.tag, verbose=verbose) _shot = ingest.map_tag_to_shot(self.tag) if not _shot: raise RuntimeError('Failed to map {} to shot'.format( self.tag)) lprint(' - SHOT', _shot, verbose=verbose) _work = _shot.map_to( tk2.TTWork, dcc='maya', Step=self.step, Task=self.step, extension='ma', version=self.ver_n) else: raise ValueError('Unhandled step '+self.step) assert _work.version == self.ver_n if _work.exists(): _src = _work.cache_read('vendor_source_file') if not _src: print ' - ALREADY EXISTS ON DISK', _work.path raise ValueError('Version already exist on disk') elif not _src == self.path: print ' - ALREADY INGESTED FROM DIFFERENT SOURCE', _src raise ValueError('Already ingested from a different source') return _work
def summary(self): """Get error summary for email/ticket. Returns: (str): error summary """ return '\n'.join([ 'HOST: {}'.format(host.NAME), 'PROJECT: {}'.format(pipe.cur_project().name), 'SCENE: {}'.format(host.cur_scene()), 'PWD: {}'.format(abs_path(os.getcwd())), 'PLATFORM: {}'.format(sys.platform), '', 'TRACEBACK:', '```', self.traceback.clean_text, '```'])
def find_assets(filter_=None): """Read asset roots. Args: filter_ (str): filter by file path Returns: (TTAsset list): list of assets in this show """ _root = pipe.cur_project().path + '/assets' _roots = [] for _dir in find(_root, depth=3, type_='d', filter_=filter_): try: _asset = TTAsset(_dir) except ValueError: continue _roots.append(_asset) return _roots
def build_cache_fmt(path, namespace='psyhive', root=None, level='tmp', extn='cache'): """Build cache format string for the given namespace. This maps the path to a location in tmp dir. Args: path (str): path of cacheable namespace (str): namespace for cache root (str): force dir for cache (overrides level) level (str): cache level tmp - use temp drive project - cache to project extn (str): cache extension Returns: (str): cache format path """ from .path import Path, abs_path _path = Path(path) if root: _root = root elif level == 'tmp': _root = tempfile.gettempdir() elif level == 'project': from psyhive import pipe _root = '{}/production'.format(pipe.cur_project().path) else: raise ValueError(level) return abs_path('{root}/{namespace}/cache/{dir}/{base}_{{}}.{extn}'.format( root=_root, dir=_path.dir.replace(':', ''), base=_path.basename, extn=extn, namespace=namespace))
def get_recent_work(verbose=0): """Read list of recent work file from tank. Args: verbose (int): print process data Returns: (TTWork list): list of work files """ _settings = QtCore.QSettings('Sgtk', 'psy-multi-fileops') _setting_name = '{}/recent_files'.format(pipe.cur_project().name) _works = [] for _file in _settings.value(_setting_name, []): _path = abs_path(_file['file_path']) lprint('TESTING', _path, verbose=verbose) _work = tk2.obtain_work(_path) if not _work: continue if not _work.dcc == hb_utils.cur_dcc(): continue _works.append(_work) return _works
def _get_cfg_yaml(level, namespace, verbose=0): """Get path to config yaml file. Args: level (str): config level (eg. code/project) namespace (str): namespace name to read from verbose (int): print process data Returns: (File): config yaml """ # Get path to yaml file if level == 'code': _yml = abs_path('../../../cfg/{}.yml'.format(namespace), root=os.path.dirname(__file__)) elif level == 'project': from psyhive import pipe _proj = pipe.cur_project() _yml = '{}/production/psyhive/cfg/{}.yml'.format(_proj.path, namespace) else: raise ValueError(level) lprint('YAML', _yml, verbose=verbose) return File(_yml)
def _get_tmp_root(): """Get tmp dir for qube submissions.""" return abs_path( r'P:\global\distribution\lam\projects\{}/data/psyhive'.format( pipe.cur_project().name))
def read_cache_data(self, force=False): """Read cache data for this shot and store the result. Args: force (bool): force reread data Returns: (dict): shotgun response """ dprint('Finding latest caches', self) _shotgun = tank.platform.current_engine().shotgun _project = pipe.cur_project() # Get shot data try: _shot_data = tk2.get_shot_sg_data(self) except RuntimeError: print 'MISSING FROM SHOTGUN:', self return {} # Request data from shotgun _sg_data = _shotgun.find( "PublishedFile", filters=[ ["project", "is", [tk2.get_project_sg_data(_project)]], ["sg_format", "is", 'alembic'], ["entity", "is", [_shot_data]], ], fields=["code", "name", "sg_status_list", "sg_metadata", "path"]) # Remove omitted and non-latest versions _cache_data = {} for _data in _sg_data: _cache = tk2.TTOutputVersion(_data['path']['local_path']) _data['cache'] = _cache # Store only latest _vers_dir = _cache.parent().path if _vers_dir not in _cache_data: _cache_data[_vers_dir] = _data elif _cache > _cache_data[_vers_dir]['cache']: _cache_data[_vers_dir] = _data # Read asset for latest versions for _name, _data in _cache_data.items(): # Read asset _metadata = eval(_data['sg_metadata'].replace( 'true', 'True').replace('null', 'None').replace( 'false', 'False')) or {} _data['metadata'] = _metadata _rig_path = _metadata.get('rig_path') if not _rig_path: del _cache_data[_name] continue try: _data['asset_ver'] = BCOutputVersion(_rig_path) except ValueError: del _cache_data[_name] continue # Ignore animcache of camera if (_data['asset_ver'].sg_asset_type == 'camera' and _data['cache'].output_type == 'animcache'): del _cache_data[_name] continue _work_file = abs_path(_metadata.get('origin_scene')) _data['origin_scene'] = _work_file _data['work_file'] = tk2.TTWork(_work_file) _data['shot'] = self return sorted(_cache_data.values())
def submit(self, local=None, submit=True, modules=None, verbose=1): """Submit this job to qube. Args: local (bool): prepare job for local execute submit (bool): submit to qube modules (mod list): modules to add to sys.path in local mode verbose (int): print process data """ _local = local or os.environ.get('PSYHIVE_FARM_LOCAL_SUBMIT') _uid = self.uid or _get_uid() _tmp_dir = _get_tmp_dir(uid=_uid) _tmp_fmt = '{}/task.{{}}.py'.format(_tmp_dir) _work = tk.cur_work() # Create job _label = '{}: {}'.format(pipe.cur_project().name, self.label) _job = Job(label=_label) _job.worker = "psyhive_mayapy" _job.fixture.environ = _get_job_environ(local=_local) _job.payload = { 'app_version': _get_app_version(), 'py_dir': _tmp_dir} _job.extra['qube.cluster'] = "/3D/{}".format(pipe.cur_project().name) # Setup job for local execute if _local: _job.extra['qube.reservations'] = ( "global_host.qube=1,host.processors={procs:d}".format( procs=self.procs)) _job.extra['qube.hosts'] = os.getenv('COMPUTERNAME') _job.extra['qube.groups'] = "" _job.extra['qube.restrictions'] = "" _job.extra['qube.cluster'] = "" _mods = (modules or []) + [psyhive] for _mod in _mods: _dir = os.path.dirname(os.path.dirname(_mod.__file__)) _path = abs_path(_dir).replace('/', u'\\') _job.fixture.environ['PYTHONPATH'] += ';{}'.format(_path) # Add tasks lprint('TMP FMT', _tmp_fmt, verbose=verbose) for _idx, _task in enumerate(self.tasks): # Write file to disk _n_str = '{:04d}'.format(_idx+1) _tmp_py = _tmp_fmt.format(_n_str) write_file(file_=_tmp_py, text=_task.get_py(tmp_py=_tmp_py)) lprint(' -', _tmp_py, verbose=verbose) _payload = {'pyfile': _tmp_py} # Create work item _work_item = WorkItem(label=_task.label, payload=_payload) _job.work_items.append(_work_item) # Submit _job_graph = JobGraph() _job_graph.add_job(_job) _submitter = QubeSubmitter() if submit: _result = _submitter.submit(_job_graph) lprint('RESULT', _result, verbose=verbose > 1)
"""Top level tools for maya ingestion.""" from psyhive import qt, host, py_gui, pipe, icons from psyhive.tools import ingest from psyhive.utils import Dir, abs_path, File, get_plural from .ming_vendor_scene import VendorScene ICON = icons.EMOJI.find('Fork and Knife With Plate') PYGUI_TITLE = 'Ingestion tools' PYGUI_COL = 'Green' _VENDOR_IN = r'{}\production\vendor_in'.format(pipe.cur_project().path) py_gui.set_section('Ingest anim', collapse=False) def _get_ingestable_scenes(dir_, filter_): """Find scenes ready for ingestion. Args: dir_ (str): directory to search for scenes filter_ (str): filter_ file list Returns: (VendorScene list, dict): list of ingestible scenes, scene statuses """ # Find scenes _dir = Dir(abs_path(dir_)) print 'READING', _dir.path