def fix_groups(): """Fix groups to follow psyop scene organisation.""" _to_fix = [] for _ref in ref.find_refs(unloaded=False): _top_node = _ref.find_top_node(catch=True) if not _top_node: continue _parent = _top_node.get_parent() if _parent in _GROUPS: continue if '/layout/' in _ref.path: _grp = 'JUNK' elif '/camera/' in _ref.path: _grp = 'CAMERA' elif '/prop/' in _ref.path: _grp = 'PROPS' elif '/character/' in _ref.path: _grp = 'CHAR' else: print 'FAILED', _ref.path continue print _ref, _parent, _grp _to_fix.append((_top_node, _grp)) if not _to_fix: print 'NOTHING TO FIX' return qt.ok_cancel('Group {:d} ref{}?'.format(len(_to_fix), get_plural(_to_fix))) for _top_node, _grp in qt.progress_bar(_to_fix): _top_node.add_to_grp(_grp)
def write_yaml(file_, data, force=False): """Write yaml data to file. Args: file_ (str): path to yaml file data (dict): data to write to yaml force (bool): replace existing without confirmation """ try: import yaml except ImportError: print '[WARNING] write failed - failed to import yaml module' return _file = File(get_path(file_)) if _file.exists(): if not force: from psyhive import qt qt.ok_cancel('Overwrite file?\n\n' + _file.path) _file.delete(force=True) _file.test_dir() with open(_file.path, 'w') as _hook: yaml.dump(data, _hook, default_flow_style=False)
def del_namespace(namespace, force=True): """Delete the given namespace. Args: namespace (str): namespace to delete force (bool): delete nodes without confirmation """ from maya_psyhive import ref if not cmds.namespace(exists=namespace): return _force = force _ref = ref.find_ref(namespace=namespace.lstrip(':'), catch=True) if _ref: _ref.remove(force=_force) _force = True if not _force: qt.ok_cancel( 'Are you sure you want to delete the namespace {}?'.format( namespace)) set_namespace(namespace, clean=True) set_namespace(":") cmds.namespace(removeNamespace=namespace, deleteNamespaceContent=True)
def build_shot_from_template(shot, template, force=False): """Build a scene from the given template. Args: shot (str): name of shot to update to (eg. rnd0080) template (str): path to template work file force (bool): force save new scene with no confirmation """ _shot = tk2.find_shot(shot) _tmpl_work = tk2.get_work(template) if host.cur_scene() != _tmpl_work.path: _tmpl_work.load(force=force) # Make sure we're on default render layer cmds.editRenderLayerGlobals(currentRenderLayer='defaultRenderLayer') _update_assets() _update_abcs(shot=shot) # Update frame range _rng = _shot.get_frame_range() print 'RANGE', _rng if _rng and _rng != (None, None): host.set_range(*_rng) else: print 'FAILED TO UPDATE TIMELINE' # Save scene _shot_work = _tmpl_work.map_to(Shot=_shot.name, Sequence=_shot.sequence).find_next() print 'SHOT WORK', _shot_work if not force: qt.ok_cancel("Save new work file?\n\n" + _shot_work.path) _shot_work.save(comment='Scene built by shot_builder')
def _ingest_check_sg_range(self, force=True): """Check shotgun range matching this scene file. Args: force (bool): update sg range without confirmation """ _scn_rng = self.scene_get_frame_range() _work = self.to_psy_work() # if not _work.shot: # return _shot = _work.get_shot() print 'CHECKING SG RANGE', _shot # _work.load(lazy=True) # _scn_rng = host.t_range(int) _shot_rng = _shot.get_frame_range(use_cut=False) print ' - RANGE scene={} shot={}'.format(_scn_rng, _shot_rng) if _scn_rng == _shot_rng: return if not force: qt.ok_cancel( 'Update shotgun {} frame range to {:d}-{:d}?'.format( _shot.name, int(_scn_rng[0]), int(_scn_rng[1])), title='Update shotgun range') _shot.set_frame_range(_scn_rng, use_cut=False) _shot.set_frame_range(_scn_rng, use_cut=True) # For isaac print _shot.get_frame_range(use_cut=False) assert _shot.get_frame_range(use_cut=False) == _scn_rng
def _ingest_check_work(self, comment, force=False): """Check this file has a corresponding psyop work file. Args: comment (str): save comment force (bool): lose unsaved changes without confirmation """ _work = self.to_psy_work() _src = _work.cache_read('vendor_source_file') if _work.exists() and _src: print 'THIS', self.path print 'SRC', _src if _src != self: raise RuntimeError('Source does not match') return print ' - INGEST WORK', _work.path print ' - COMMENT', comment if not force: qt.ok_cancel('Copy {} to pipeline?\n\n{}\n\n{}'.format( _work.step, self.path, _work.path)) host.open_scene(self, force=force, lazy=True) # Update refs for _ref in qt.progress_bar( ref.find_refs(), 'Updating {:d} ref{}', stack_key='UpdateRefs'): self._ingest_check_ref(_ref) # Save to disk print ' - SAVING WORK', _work.path _work.save(comment=comment, safe=False, force=force) _work.cache_write(tag='vendor_source_file', data=self.path)
def _callback__WorkSaveAs(self): _work_path = self.ui.WorkPath.text() _cur_work = tk2.cur_work() _next_work = tk2.obtain_work(_work_path).find_next() # Apply change task warning if _cur_work: _cur_task = _cur_work.get_work_area(), _cur_work.task _next_task = _next_work.get_work_area(), _next_work.task print 'CUR WORK ', _cur_work print 'NEXT WORK', _next_work if _cur_task != _next_task: _icon = hb_work.get_work_icon(_next_work) qt.ok_cancel( 'Are you sure you want to switch to a different task?' '\n\nCurrent:\n{}\n\nNew:\n{}'.format( _cur_work.path, _next_work.path), title='Switch task', icon=_icon, parent=self) # Save self.ui.WorkSaveAs.setEnabled(False) _comment = qt.read_input( 'Enter comment:', title='Save new version', parent=self) _next_work.save(comment=_comment) # Update ui self._callback__TaskRefresh() self._callback__WorkJumpTo() self.ui.WorkSaveAs.setEnabled(True)
def fix_namespaces(): """Fix namespaces to follow psyop naming.""" _used = [] _to_rename = [] for _ref in ref.find_refs(unloaded=False): if not _find_ref_namespace_issues(_ref): continue _base = _ref.namespace.split('_')[0] _name = _base _idx = 1 while True: check_heart() if not cmds.namespace(exists=_name) and _name not in _used: break _name = '{}_{:d}'.format(_base, _idx) _idx += 1 print _ref, _name _used.append(_name) _to_rename.append((_ref, _name)) if not _to_rename: print 'NOTHING TO FIX' return qt.ok_cancel('Rename {:d} ref{}?'.format(len(_to_rename), get_plural(_to_rename))) for _ref, _name in qt.progress_bar(_to_rename): _ref.rename(_name)
def delete(self, wording='remove', force=False, frames=None, icon=None): """Delete this sequence's frames. The user is asked to confirm before deletion. Args: wording (str): wording for confirmation dialog force (bool): force delete with no confirmation frames (int list): list of frames to delete (if not all) icon (str): override interface icon """ from psyhive import qt _frames = self.get_frames(force=True) if frames: _frames = sorted(set(_frames).intersection(frames)) if not _frames: return if not force: qt.ok_cancel( '{} existing frame{} {} of image sequence?\n\n{}'.format( wording.capitalize(), get_plural(_frames), ints_to_str(_frames), self.path), title='Confirm ' + wording, icon=icon) for _frame in _frames: os.remove(self[_frame]) self.get_frames(force=True)
def update_nk(template, shot, diff=True, force=True): """Update nk template to new shot. Args: template (TTWorkFileBase): template work file shot (TTShotRoot): shot to update to diff (bool): show diffs force (bool): save with no confirmation """ _new_work = template.map_to(Shot=shot.shot).find_next() _start, _end = shot.get_frame_range() _nk = _NkFile(template.path) _update_nk_reads(nk_file=_nk, shot=shot) # Update write nodes for _node in _nk.find_nodes(type_='Write'): for _attr in ['file', 'proxy']: _file = _node.read_attr(_attr) _orig_out = tk.get_output(_file) if not _orig_out: continue print 'ORIG OUT', _orig_out _new_out = _orig_out.map_to(Shot=shot.shot, version=_new_work.version) print 'NEW OUT', _orig_out _node.set_attr(_attr, _new_out.path) print # Update root _root = _nk.find_node(type_='Root') _root.set_attr('name', _new_work.path) _root.set_attr('first_frame', _start) _root.set_attr('last_frame', _end) # Update header _header = _nk.data[0] assert isinstance(_header, six.string_types) _tokens = [_token for _token in re.split('[ "]', _header) if _token] for _token in _tokens: _orig_out = tk.get_output(_token) if not _orig_out: continue _new_out = _orig_out.map_to(Shot=shot.shot, version=_new_work.version) assert _header.count(_token) == 1 _header = _header.replace(_token, _new_out.path) _nk.data[0] = _header if diff: _tmp_nk = File(abs_path('{}/test.nk'.format(tempfile.gettempdir()))) _nk.write(_tmp_nk.path, force=True) _tmp_nk.diff(template.path) # Write new work if not force: qt.ok_cancel('Write new work file?\n\n{}'.format(_new_work.path)) _nk.write(_new_work.path, force=True) _new_work.set_comment(comment='Scene built by shot_builder') print 'WROTE NK:', _new_work.path
def _submit_render(file_=None, layers=None, range_=None, size='Full', force=False): """Submit render. This doesn't handle opening the scene and updating the assets. Args: file_ (str): path to scene to submit layers (list): layers to submit range_ (int tuple): start/end frames size (str): size name (eg. Full, 1/2) force (bool): submit with no confirmation """ _file = file_ or host.cur_scene() _layers = layers or cmds.ls(type='renderLayer') _rng = range_ or host.t_range() print 'SUBMIT RENDER', _file # Build settings _start, _end = _rng _settings = render_settings.RenderSubmitSettings() _settings.render_layers = _layers _settings.render_layer_mode = render_job.RenderLayerMode.CUSTOM _settings.range_start = _start _settings.range_end = _end _settings.frame_source = render_job.FrameSource.FRAME_RANGE _settings.proxy = _map_size_to_pxy(size) print ' - PROXY', _settings.proxy # Build submittable _render_job = render_job.MayaRenderJob(settings=_settings, scene_path=_file) print ' - RENDER JOB', _render_job print ' - LAYERS', _render_job.render_layers print ' - SCENE PATH', _render_job.scene_path print ' - FRAMES', _render_job.frames _submittable = hooks.default_get_render_submittable_hook(_render_job) print ' - SUBMITTABLE', _submittable # Add publishes to make sure appears in output manager _maya_impl = tk2.find_tank_mod('hosts.maya_impl', app='psy_multi_psyqwrapper') _helper = _maya_impl.MayaPipelineRenderSubmitHelper(_submittable) _helper.ensure_can_register_publishes() _submittable.publishes = _helper.register_publishes() print ' - PUBLISHES', _submittable.publishes # Submit if not force: qt.ok_cancel('Submit?') _submitted = hooks.QubeSubmitter().submit(_submittable) if _submitted: print 'Successfully submitted {:d} job{} to the farm.'.format( len(_submitted), get_plural(_submitted))
def create_ref(file_, namespace, class_=None, force=False): """Create a reference. Args: file_ (str): path to reference namespace (str): reference namespace class_ (type): override FileRef class force (bool): force replace any existing ref Returns: (FileRef): reference """ from psyhive import qt from psyhive import host _file = File(abs_path(file_)) if not _file.exists(): raise OSError("File does not exist: " + _file.path) _class = class_ or FileRef _rng = host.t_range() if _file.extn == 'abc': cmds.loadPlugin('AbcImport', quiet=True) elif _file.extn.lower() == 'fbx': cmds.loadPlugin('fbxmaya', quiet=True) # Test for existing cmds.namespace(set=":") if cmds.namespace(exists=namespace): _ref = find_ref(namespace, catch=True) if _ref: if not force: qt.ok_cancel( 'Replace existing {} reference?'.format(namespace)) _ref.remove(force=True) else: del_namespace(namespace, force=force) # Create the reference _cur_refs = set(cmds.ls(type='reference')) _kwargs = { 'reference': True, 'namespace': namespace, 'options': "v=0;p=17", 'ignoreVersion': True } cmds.file(_file.abs_path(), **_kwargs) # Find new reference node _ref = get_single(set(cmds.ls(type='reference')).difference(_cur_refs)) # Fbx ref seems to update timeline (?) if host.t_range() != _rng: host.set_range(*_rng) return _class(_ref)
def update_all(parent): """Update all yeti nodes to use latest cache. Args: parent (QDialog): parent dialog """ print 'UPDATE ALL YETIS' # Check yetis to update _to_update = [] for _yeti in hom.find_nodes(type_='pgYetiMaya'): print _yeti _file = _yeti.plug('cacheFileName').get_val() if not _file: print ' - NO FILE TO UPDATE' continue print ' - CUR', _file try: _out = tk2.TTOutputFileSeq(_file) except ValueError: print ' - OFF PIPELINE' continue _latest = _out.find_latest() if not _latest: if not _out.exists(): print ' - CUR CACHE MISSING', _out.path else: print ' - NO CACHES FOUND' continue print ' - LATEST', _latest.path if _file != _latest: print ' - NEEDS UPDATE' _to_update.append((_yeti, _latest)) else: print ' - NO UPDATE NEEDED' # Confirm print '{:d} CACHE{} NEED UPDATE'.format(len(_to_update), get_plural(_to_update).upper()) if not _to_update: qt.notify('All caches are up to date', title='Update caches', parent=parent) return qt.ok_cancel('Update {:d} cache{}?'.format(len(_to_update), get_plural(_to_update)), title='Update caches', parent=parent) # Update for _yeti, _latest in qt.progress_bar(_to_update, 'Updating {:d} cache{}'): print _yeti, _latest apply_cache(yeti=_yeti, cache=_latest)
def read_dependencies(self, force=False, confirm=True, new_scene=True, verbose=0): """Read dependencies of this workfile. Args: force (bool): force reread confirm (bool): confirm before replace current scene new_scene (bool): new scene after read verbose (int): print process data Returns: (dict): namespace/path dependencies dict """ # Make sure scene is loaded _replaced_scene = False if not cmds.file(query=True, location=True) == self.path: if confirm: qt.ok_cancel('Open scene to read contents?\n\n{}\n\n' 'Current scene will be lost.'.format(self.path), title='Replace current scene') cmds.file(self.path, open=True, prompt=False, force=True, loadReferenceDepth='none') _replaced_scene = True _deps = {'refs': {}, 'abcs': {}} # Read refs for _ref in ref.find_refs(): if not _ref.path: continue _deps['refs'][_ref.namespace] = _ref.path # Read abcs for _abc in cmds.ls(type='ExocortexAlembicFile'): if ':' not in _abc: continue _ns = str(_abc.split(':')[0]) _path = str(cmds.getAttr(_abc + '.fileName', asString=True)) _deps['abcs'][_ns] = _path if verbose: pprint.pprint(_deps) if new_scene: cmds.file(new=True, force=True) return _deps, _replaced_scene
def switch_selected_rig(rig): """Switch selected rig reference. Args: rig (str): rig name to switch to """ _sel = ref.get_selected(catch=True) if not _sel: qt.notify_warning('No rig selected') return print 'SELECTED', _sel _trg = get_single([_rig for _rig in _find_rigs() if _rig.name == rig]) print 'TARGET', _trg.path qt.ok_cancel('Update "{}" rig to "{}"?'.format(_sel.namespace, _trg.name)) _sel.swap_to(_trg.path)
def _clean_leftover_modules(force=False, verbose=0): """Clean unused tk modules from sys.modules dict. Args: force (bool): remove leftover libs with no confirmation verbose (int): print process data """ _engine = tank.platform.current_engine() # Find leftover modules _to_delete = [] for _app_name in _engine.apps: _other_name = _get_app_other_name(_app_name) _app = _engine.apps[_app_name] _id = _app._TankBundle__module_uid if not _id: lprint('MISSING ID', _app_name, verbose=verbose > 1) continue lprint(_app_name, verbose=verbose) lprint(' -', _other_name, verbose=verbose > 1) lprint(' -', _id, verbose=verbose > 1) for _mod in refresh.find_mods(): if ( _app_name not in _mod.__file__ and _other_name not in _mod.__file__): continue if not _mod.__name__.startswith('tkimp'): continue if not _mod.__name__.startswith(_id): lprint(' - DELETE', _mod, verbose=verbose > 1) _to_delete.append(_mod.__name__) continue _name = '.'.join(_mod.__name__.split('.')[1:]) lprint( ' - {:90} {}'.format(_name, abs_path(_mod.__file__)), verbose=verbose) lprint(verbose=verbose) # Remove modules if _to_delete: if not force: qt.ok_cancel( 'Delete {:d} leftover modules?'.format(len(_to_delete))) for _mod_name in _to_delete: del sys.modules[_mod_name] else: print 'Nothing to clean'
def delete(self, force=False, wording='delete', icon=None): """Delete this directory. Args: force (bool): force delete with no confirmation wording (str): override wording for dialog icon (str): override interface icon """ if not self.exists(): return if not force: from psyhive import qt qt.ok_cancel( "{} this directory?\n\n{}".format( wording.capitalize(), self.path), title='Confirm '+wording, icon=icon) shutil.rmtree(self.path)
def _remove_existing_data(overwrites): """Remove existing ingestion data on items to be replaced. Args: overwrites (tuple list): list of ma/work files """ print 'OVERWRITES:' for _ma, _work in overwrites: print _work.path print ' - CUR', _work.get_vendor_file() print ' - NEW', _ma.path print qt.ok_cancel("Overwrite {:d} work files?".format(len(overwrites))) for _, _work in qt.progress_bar(overwrites, "Cleaning {:d} work{}"): _work.delete_all_data(force=True) print
def write_file(file_, text, force=False): """Write the given text to the given file path. Args: file_ (str): path to write to text (str): text to write force (bool): overwrite any existing file with no warning """ if os.path.exists(file_): if not force: from psyhive import qt qt.ok_cancel('Overwrite file?\n\n' + file_) os.remove(file_) test_path(os.path.dirname(file_)) _file = open(file_, 'w') _file.write(text) _file.close()
def replace_file(source, replace, force=False): """Replace a file with the given source file. By default this will shows a diff and then raise a confirmation dialog. Args: source (str): path to source file replace (str): path to file to replace force (bool): supress diff and confirmation """ from psyhive import qt print source, replace if not force: diff(source, replace) qt.ok_cancel('Replace file with source?\n\nSource:\n\n{}' '\n\nReplace:\n\n{}'.format(source, replace)) shutil.copy(source, replace)
def delete_all_data(self, force=False): """Delete all generated data from this action file. This includes all blasts and exports and the work file itself. Args: force (bool): delete data without confirmation """ if not force: qt.ok_cancel( 'Delete all data?\n\nWork:\n{}\n\nVendor file:\n{}'.format( self.path, self.get_vendor_file())) for _seq in [self.blast, self.face_blast, self.blast_comp]: _seq.delete(force=True) for _file in [self.get_export_fbx(), File(self.processed_mov), self]: _file.delete(force=True)
def find_tank_app(name, catch=True, verbose=0): """Find tank app for the given name. Args: name (str): app name catch (bool): offer to restart tank if app is missing verbose (int): print process data Returns: (SgtkApp): tank app """ _engine = tank.platform.current_engine() if verbose: print 'TANK APPS:' pprint.pprint(_engine.apps.keys()) # Try exact match if name in _engine.apps: if verbose: print '_engine = tank.platform.current_engine()' print '_app = _engine.apps["{}"]'.format(name) return _engine.apps[name] # Try suffix match _suffix_match = get_single([ _key for _key in _engine.apps.keys() if _key.split('-')[-1] == name], catch=True) if _suffix_match: if verbose: print '_engine = tank.platform.current_engine()' print '_app = _engine.apps["{}"]'.format(_suffix_match) return _engine.apps[_suffix_match] if catch: qt.ok_cancel('Could not find tank app "{}".\n\nWould you like to ' 'restart tank?'.format(name), icon=icons.EMOJI.find('Kaaba')) restart_tank() return find_tank_app(name) raise RuntimeError('Could not find tank app '+name)
def _reload_mod(mod, mod_name, execute, delete, catch, sort, verbose): """Reload the given module. Args: mod (module): module to reload mod_name (str): module name execute (bool): execute the reload delete (bool): delete and reimport module on reload catch (bool): no error on fail to reload sort (func): module reload sort function verbose (int): print process data """ # Try to reload _dur = 0.0 if execute: _start = time.time() try: reload(mod) except ImportError as _exc: Traceback().pprint() if not catch: qt.ok_cancel('Failed to reload "{}".\n\nRemove from ' 'sys.path?'.format(mod_name), verbose=0) del sys.modules[mod_name] return _dur = time.time() - _start # Apply delete once reload works if delete: del sys.modules[mod_name] __import__(mod_name, fromlist=mod_name.split('.')) # Print status if len(mod_name) > 53: mod_name = mod_name[:50] + ' ...' lprint('{:<7.02f} {:<55} {:5.02f}s {}'.format(sort(mod_name), mod_name, _dur, abs_path(mod.__file__)), verbose=verbose > 1)
def delete(self, force=False, wording='delete', catch=False): """Delete this file. Args: force (bool): delete with no confirmation wording (str): wording for confirmation dialog catch (bool): no error on fail to delete """ if not self.exists(): return if not force: from psyhive import qt qt.ok_cancel("{} file?\n\n{}".format(wording.capitalize(), self.path)) try: os.remove(self.path) except OSError as _exc: print 'FAILED TO DELETE', self.path if not catch: return
def cache_work_files(data, farm=True, parent=None): """Recache the given list of work files. Args: data (list): work files and namespaces to recache farm (bool): submit recaches to farm parent (QDialog): parent interface (for dialog positioning) """ _pos = parent.get_c() if parent else None qt.ok_cancel('Cache {:d} work file{}?'.format(len(data), get_plural(data)), pos=_pos, parent=parent, title='Confirm cache') for _work_file, _namespaces in qt.ProgressBar(data, "Caching {:d} work file{}", col="DeepSkyBlue", pos=_pos, parent=parent): print 'CACHE', _work_file.path print _namespaces print cache_work_file(work_file=_work_file, namespaces=sorted(_namespaces), farm=farm, parent=parent) # Completed notification if farm: _msg = 'Submitted {:d} work file{} to farm' else: _msg = 'Cached {:d} work file{} locally' qt.notify(_msg.format(len(data), get_plural(data)), pos=_pos, title='Complete', parent=parent)
def ingest_seqs(dir_, vendor, filter_=None, force=False, resubmit_transgens=False): """Ingest images sequences from the given directory. Args: dir_ (str): directory to search vendor (str): name of vendor filter_ (str): apply path filter force (bool): ingest without confirmation resubmit_transgens (bool): resubmit any submitted transgens """ _dir = Dir(abs_path(dir_)) print 'READING', _dir.path assert _dir.exists() _seqs = _dir.find_seqs(filter_=filter_) print ' - FOUND {:d} SEQS'.format(len(_seqs)) # Set vendor _vendor = vendor or vendor_from_path(_dir.path) assert _vendor print ' - VENDOR', _vendor print # Check images _statuses = {} _to_ingest = [] for _idx, _seq in qt.progress_bar( enumerate(_seqs), 'Checking {:d} seq{}'): print '[{:d}/{:d}] PATH {}'.format(_idx+1, len(_seqs), _seq.path) # Check ingestion status _status = _ingestable = None try: _seq = VendorSeq(_seq) except ValueError: _status, _ingestable = 'Fails naming convention', _seq.basename else: assert isinstance(_seq, VendorSeq) _status, _ingestable = _seq.get_ingest_status( resubmit_transgens=resubmit_transgens) print ' - STATUS', _status assert _status assert _ingestable is not None if _ingestable: _to_ingest.append(_seq) _statuses[_seq] = _status # Print summary print '\nSUMMARY:' print '\n'.join([ ' {} - {:d}'.format(_status, _statuses.values().count(_status)) for _status in sorted(set(_statuses.values()))]) print 'FOUND {:d} SEQ{} TO INGEST'.format( len(_to_ingest), get_plural(_to_ingest).upper()) # Show different source warning _diff_src = [ _ for _, _status in _statuses.items() if _status == 'Already ingested from a different source'] if _diff_src: qt.notify_warning( '{:d} of the sequences could not be ingested because they have ' 'already been ingested from a different delivery. This happens ' 'when a vendor provides an update without versioning up.\n\n' 'See the terminal for details.'.format(len(_diff_src))) # Execute ingestion if not _to_ingest: return if not force: qt.ok_cancel( 'Ingest {:d} seq{}?'.format( len(_to_ingest), get_plural(_to_ingest)), verbose=0) for _idx, _seq in qt.progress_bar( enumerate(_to_ingest), 'Ingesting {:d} seq{}', stack_key='IngestSeqs'): print '({:d}/{:d}) [INGESTING] {}'.format( _idx+1, len(_to_ingest), _seq.path) _seq.ingest(vendor=vendor)
def create_workspaces(root, force=False, verbose=0): """Create workspaces within the given root asset/shot. This creates paths on disk for all of the steps which are attached to the root in shotgun. Args: root (TTRoot): asset/shot to create workspaces for force (bool): create workspaces without confirmation verbose (int): print process data """ _proj = pipe.Project(root.path) _tk = tank.Sgtk(_proj.path) _ctx = _tk.context_from_path(_proj.path) # Set filter _filters = [ ['project', 'is', _ctx.project], ['step', 'is_not', None], ['entity', 'is', root.get_sg_data()], ] # Find tasks _sg = tank.platform.current_engine().shotgun _all_tasks = _sg.find('Task', _filters, fields=['project', 'entity', 'step']) _key = lambda t: (t['project']['id'], t['entity']['id'], t['step']['id']) _all_tasks.sort(key=_key) _grouped_by_entity = collections.defaultdict(list) for _task in _all_tasks: _grouped_by_entity[(_task['entity']['type'], _task['entity']['id'], _task['entity']['name'])].append(_task) # Find tasks which need creating _to_create = [] for (_entity_type, _entity_id, _entity_name), _tasks in sorted(_grouped_by_entity.items()): if _entity_type not in ('Asset', 'Shot', 'Sequence'): continue _entity_id_list = [_task['id'] for _task in _tasks] lprint(' - CREATE WORKSPACES', _entity_type, _entity_id, _entity_name, _entity_id_list, verbose=verbose) _to_create.append( (_entity_type, _entity_id, _entity_name, _entity_id_list)) # Execute creation if not force: qt.ok_cancel('Create {:d} workspace{}?'.format(len(_to_create), get_plural(_to_create))) _done = list() for _entity_type, _entity_id, _entity_name, _entity_id_list in _to_create: _key = (_entity_type, _entity_id) if _key in _done: continue _start = time.time() print ' - CREATE WORKSPACES {}'.format('/'.join( [_ctx.project['name'], _entity_type, _entity_name])) _tk.create_filesystem_structure('Task', _entity_id_list) print ' - CREATED WORKSPACES FOR {} ({:.01f}s)'.format( '/'.join([_ctx.project['name'], _entity_type, _entity_name]), time.time() - _start) _done.append(_key)
def batch_submit_shots(step='previz', submitter='/out/submitter1'): """Batch submit shots selected from a list. Args: step (str): step to search for abcs submitter (str): path to submitter rop """ _shots = [_shot.name for _shot in tk2.find_shots()] _shots = qt.multi_select(_shots, title='Select shots', msg='Select shots to submit') # Check shots _missing_cam = [] _missing_rng = [] for _shot in qt.progress_bar(copy.copy(_shots), 'Checking {:d} shot{}'): _shot = tk2.find_shot(_shot) print 'CHECKING', _shot # Check cam _step = _shot.find_step_root(step, catch=True) if not _step: _missing_cam.append(_shot.name) _shots.remove(_shot.name) continue _cam_abc = _step.find_output_file(output_type='camcache', extn='abc', verbose=1, version='latest', catch=True) if not _cam_abc: _missing_cam.append(_shot.name) _shots.remove(_shot.name) continue print ' - CAM', _cam_abc.path # Check frame range _rng = _shot.get_frame_range() print ' - RANGE', _rng if not _rng or None in _rng: _missing_rng.append(_shot.name) _shots.remove(_shot.name) continue # Show warning _msg = '' if _missing_cam: _msg += 'Shots with no {} camera:\n\n {}\n\n'.format( step, '\n '.join(_missing_cam)) if _missing_rng: _msg += 'Shots with no range in shotgun:\n\n {}\n\n'.format( '\n '.join(_missing_rng)) if _msg: _msg += 'These shots will be ignored.' qt.ok_cancel(_msg, title='Warning') # Submit shots for _shot in qt.progress_bar(_shots, 'Submitting {:d} shot{}'): print 'BUILD SCENE', _shot build_scene(shot=_shot, step=step, submitter=submitter) print 'SUBMITTED {:d} SHOT{}'.format(len(_shots), get_plural(_shots).upper())
def _exec_cache(namespaces, confirm=True, new_scene=False, farm=True, verbose=1): """Execute a recache on the current workfile. Args: namespaces (str list): list of namespaces to recache confirm (bool): confirm before execute new_scene (bool): new scene after recache farm (bool): submit recache to farm verbose (int): print process data """ class _FakeResolver(object): def __init__(self, all_items, conflicts, version): self.user_data = all_items, version self.conflicts = conflicts class _FakeConflict(object): def __init__(self, id_, cache): _user_data = collections.namedtuple('UserData', ['id']) self.id_ = id_ self.user_data = _user_data(id=self.id_) self.resolution = None if cache else _skip def __repr__(self): return '<Conflict:{}>'.format(self.id_) _engine = tank.platform.current_engine() _cache_app = _engine.apps['psy-multi-cache'] check_heart() # Use resolver to limit items to cache _cache_app.init_app() _mod = sys.modules[_cache_app.cache_controller.__module__] _skip = _mod.PublishConflictResolution.SKIP _model = _cache_app.cache_controller.model _all_items = [ _item.item_data for _item in _model.cache_list.selected_items ] lprint(' - ALL ITEMS', len(_all_items), pprint.pformat(_all_items), verbose=verbose > 1) _conflicts = [] for _item in _all_items: _cache = _item.id.replace(":renderCamShape", "") in namespaces _conflict = _FakeConflict(id_=_item.id, cache=_cache) _conflicts.append(_conflict) lprint(' - CONFLICTS', len(_conflicts), pprint.pformat(_conflicts), verbose=verbose > 1) _resolver = _FakeResolver(all_items=_all_items, conflicts=_conflicts, version=_model.version) # Check cache _to_cache = [ _conflict for _conflict in _conflicts if not _conflict.resolution ] if not _to_cache: raise RuntimeError("Nothing found to cache") lprint(' - FOUND {:d} ITEMS TO CACHE'.format(len(_to_cache))) if confirm: qt.ok_cancel('Submit {:d} cache{} to farm?'.format( len(_to_cache), get_plural(_to_cache))) # Execute cache if farm: _cache_app.cache_controller.model.cache_on_farm(resolver=_resolver) else: _cache_app.cache_controller.model.cache(resolver=_resolver) dprint('{} {:d}/{:d} REFS'.format('SUBMITTED' if farm else 'CACHED', len(namespaces), len(_all_items))) if new_scene: cmds.file(new=True, force=True)
def ingest_vendor_anim(dir_, vendor=None, force=False, filter_=None, cache_on_farm=True, ignore_extn=False, ignore_dlayers=False, ignore_rlayers=False, ignore_multi_top_nodes=False): """Ingest vendor animation files. Args: dir_ (str): vendor in folder vendor (str): vendor name force (bool): lose current scene changes without confirmation filter_ (str): filter file list cache_on_farm (bool): submit caches to qube ignore_extn (bool): ignore file extension issues ignore_dlayers (bool): ignore display layer issues ignore_rlayers (bool): ignore render layer issues ignore_multi_top_nodes (bool): ignore multiple top node issues """ # Set vendor _vendor = vendor or ingest.vendor_from_path(dir_) assert _vendor print ' - VENDOR', _vendor print # Read ingestible scenes _to_ingest, _statuses = _get_ingestable_scenes(dir_=dir_, filter_=filter_) if not _to_ingest: return if not force: qt.ok_cancel('Ingest {:d} scene{}?'.format(len(_to_ingest), get_plural(_to_ingest)), verbose=0, icon=ingest.ICON, title='Confirm ingestion') print 'HANDLE UNSAVED CHANGES' host.handle_unsaved_changes() print 'HANDLED UNSAVED CHANGES' # Ingest scenes _issues = [] _ingest_kwargs = dict(ignore_extn=ignore_extn, ignore_dlayers=ignore_dlayers, ignore_multi_top_nodes=ignore_multi_top_nodes, ignore_rlayers=ignore_rlayers) for _idx, _scene in qt.progress_bar(enumerate(_to_ingest), 'Ingesting {:d} scene{}', col=PYGUI_COL): print '[{:d}/{:d}] PATH {}'.format(_idx + 1, len(_to_ingest), _scene.path) _scene.check_workspace(force=True) # Check ingestion status assert isinstance(_scene, VendorScene) _scene_isses = _scene.get_ingest_issues(**_ingest_kwargs) if _scene_isses: _issues.append((_scene, _scene_isses)) print ' - CAM', _scene.scene_get_cam() _scene.ingest(vendor=vendor, force=True, cache_on_farm=cache_on_farm) _status, _ = _scene.get_ingest_status() _statuses[_scene] = _status if _issues: print '\n\n[INGESTION ISSUES]\n' for _scene, _scene_issues in _issues: print 'SCENE', _scene.path for _issue in _scene_issues: print ' -', _issue # Print summary print '\n\n[SUMMARY]' print '\n'.join([ ' {} - {:d}'.format(_status, _statuses.values().count(_status)) for _status in sorted(set(_statuses.values())) ]) print '\nFOUND {:d} SCENE{} TO INGEST'.format( len(_to_ingest), get_plural(_to_ingest).upper())