def _redraw__info(self, widget): _passes = self.ui.renders.selected_text() _tasks = self.ui.tasks.selected_text() self._renders = [_render for _render in self._all_renders if _render.output_name in _passes and _render.task in _tasks] self._passes = sorted(set([ _render.output_name for _render in self._renders])) # Find latest work files self._work_files = {} for _render in self._renders: _latest_render = _render.find_latest() _work = _latest_render.map_to( tk2.TTWork, extension='ma', dcc='maya') _latest_work = _work.find_latest() if _work: if _work not in self._work_files: self._work_files[_latest_work] = [] self._work_files[_latest_work].append(_latest_render) widget.setText('Selected: {:d} render{} ({:d} work file{})'.format( len(self._renders), get_plural(self._renders), len(self._work_files), get_plural(self._work_files)))
def _redraw__info(self, widget): self._exports = [] widget.setVisible(not self._uncached_shots) if self._uncached_shots: return _handler = self._get_handler() _shots = self.ui.shots.selected_data() _steps = self.ui.steps.selected_text() _tasks = self.ui.tasks.selected_text() if self._uncached_work_files: _work_files = _handler.find_work_files(shots=_shots, steps=_steps, tasks=_tasks) _uncached_work_files = _handler.find_work_files(shots=_shots, steps=_steps, tasks=_tasks, cached=False) _text = 'Matched:\n {:d} work file{}\n {:d} uncached'.format( len(_work_files), get_plural(_work_files), len(_uncached_work_files)) else: _assets = self.ui.assets.selected_data() _n_exports = 0 self._exports = _handler.find_exports(shots=_shots, steps=_steps, tasks=_tasks, assets=_assets) _n_exports = sum([len(_nss) for _, _nss in self._exports.items()]) _text = 'Matched:\n {:d} work file{}\n {:d} export{}'.format( len(self._exports), get_plural(self._exports), _n_exports, get_plural(range(_n_exports))) widget.setText(_text)
def update_all(parent): """Update all yeti nodes to use latest cache. Args: parent (QDialog): parent dialog """ print 'UPDATE ALL YETIS' # Check yetis to update _to_update = [] for _yeti in hom.find_nodes(type_='pgYetiMaya'): print _yeti _file = _yeti.plug('cacheFileName').get_val() if not _file: print ' - NO FILE TO UPDATE' continue print ' - CUR', _file try: _out = tk2.TTOutputFileSeq(_file) except ValueError: print ' - OFF PIPELINE' continue _latest = _out.find_latest() if not _latest: if not _out.exists(): print ' - CUR CACHE MISSING', _out.path else: print ' - NO CACHES FOUND' continue print ' - LATEST', _latest.path if _file != _latest: print ' - NEEDS UPDATE' _to_update.append((_yeti, _latest)) else: print ' - NO UPDATE NEEDED' # Confirm print '{:d} CACHE{} NEED UPDATE'.format(len(_to_update), get_plural(_to_update).upper()) if not _to_update: qt.notify('All caches are up to date', title='Update caches', parent=parent) return qt.ok_cancel('Update {:d} cache{}?'.format(len(_to_update), get_plural(_to_update)), title='Update caches', parent=parent) # Update for _yeti, _latest in qt.progress_bar(_to_update, 'Updating {:d} cache{}'): print _yeti, _latest apply_cache(yeti=_yeti, cache=_latest)
def fix_groups(): """Fix groups to follow psyop scene organisation.""" _to_fix = [] for _ref in ref.find_refs(unloaded=False): _top_node = _ref.find_top_node(catch=True) if not _top_node: continue _parent = _top_node.get_parent() if _parent in _GROUPS: continue if '/layout/' in _ref.path: _grp = 'JUNK' elif '/camera/' in _ref.path: _grp = 'CAMERA' elif '/prop/' in _ref.path: _grp = 'PROPS' elif '/character/' in _ref.path: _grp = 'CHAR' else: print 'FAILED', _ref.path continue print _ref, _parent, _grp _to_fix.append((_top_node, _grp)) if not _to_fix: print 'NOTHING TO FIX' return qt.ok_cancel('Group {:d} ref{}?'.format(len(_to_fix), get_plural(_to_fix))) for _top_node, _grp in qt.progress_bar(_to_fix): _top_node.add_to_grp(_grp)
def select_text(self, items, catch=True, verbose=0): """The items with text matching the given list. Args: items (str): list of text of items to select catch (bool): no error on fail to select items verbose (int): print process data Raises: (ValueError): if items not found """ lprint('SELECTING TEXT', items, verbose=verbose) _sel_text = [items] if isinstance(items, six.string_types) else items _to_select = [] _all_items = self.all_items() for _idx, _item in enumerate(_all_items): _text = _item.text() lprint(' -', _text, _text in items, verbose=verbose) if _text in _sel_text: _sel_text.remove(_text) _to_select.append(_idx) if _to_select: for _idx, _item in enumerate(_all_items): _item.setSelected(_idx in _to_select) elif not catch: raise ValueError('Unselected item{} - {}'.format( get_plural(_sel_text), items))
def fix_namespaces(): """Fix namespaces to follow psyop naming.""" _used = [] _to_rename = [] for _ref in ref.find_refs(unloaded=False): if not _find_ref_namespace_issues(_ref): continue _base = _ref.namespace.split('_')[0] _name = _base _idx = 1 while True: check_heart() if not cmds.namespace(exists=_name) and _name not in _used: break _name = '{}_{:d}'.format(_base, _idx) _idx += 1 print _ref, _name _used.append(_name) _to_rename.append((_ref, _name)) if not _to_rename: print 'NOTHING TO FIX' return qt.ok_cancel('Rename {:d} ref{}?'.format(len(_to_rename), get_plural(_to_rename))) for _ref, _name in qt.progress_bar(_to_rename): _ref.rename(_name)
def _rerender_work_files(work_files, ranges, passes, size='Full'): """Rerender the given work files on qube. Args: work_files (TTWorkFileBase list): work file list ranges (tuple list): list of start/end frames passes (str list): list of passes to rerender size (str): size name (eg. Full, 1/2) """ _job = farm.MayaPyJob('Submit {:d} render{}'.format( len(work_files), get_plural(work_files))) for _work_file, _range in safe_zip(work_files, ranges): _py = '\n'.join([ 'import os', 'os.environ["USERNAME"] = "******" # For fileops/submit', 'from psyhive import tk2', 'from maya_psyhive.tools import m_batch_rerender', '_path = "{work.path}"', '_range = {range}', '_passes = {passes}', '_size = "{size}"', '_work = tk2.TTWork(_path)', 'm_batch_rerender.rerender_work_file(', ' range_=_range, work_file=_work, passes=_passes,', ' size=_size)', ]).format(work=_work_file, passes=passes, range=_range, user=os.environ['USERNAME'], size=size) _task = farm.MayaPyTask( _py, label='Rerender {}'.format(_work_file.basename)) _job.tasks.append(_task) _job.submit()
def _submit_render(file_=None, layers=None, range_=None, size='Full', force=False): """Submit render. This doesn't handle opening the scene and updating the assets. Args: file_ (str): path to scene to submit layers (list): layers to submit range_ (int tuple): start/end frames size (str): size name (eg. Full, 1/2) force (bool): submit with no confirmation """ _file = file_ or host.cur_scene() _layers = layers or cmds.ls(type='renderLayer') _rng = range_ or host.t_range() print 'SUBMIT RENDER', _file # Build settings _start, _end = _rng _settings = render_settings.RenderSubmitSettings() _settings.render_layers = _layers _settings.render_layer_mode = render_job.RenderLayerMode.CUSTOM _settings.range_start = _start _settings.range_end = _end _settings.frame_source = render_job.FrameSource.FRAME_RANGE _settings.proxy = _map_size_to_pxy(size) print ' - PROXY', _settings.proxy # Build submittable _render_job = render_job.MayaRenderJob(settings=_settings, scene_path=_file) print ' - RENDER JOB', _render_job print ' - LAYERS', _render_job.render_layers print ' - SCENE PATH', _render_job.scene_path print ' - FRAMES', _render_job.frames _submittable = hooks.default_get_render_submittable_hook(_render_job) print ' - SUBMITTABLE', _submittable # Add publishes to make sure appears in output manager _maya_impl = tk2.find_tank_mod('hosts.maya_impl', app='psy_multi_psyqwrapper') _helper = _maya_impl.MayaPipelineRenderSubmitHelper(_submittable) _helper.ensure_can_register_publishes() _submittable.publishes = _helper.register_publishes() print ' - PUBLISHES', _submittable.publishes # Submit if not force: qt.ok_cancel('Submit?') _submitted = hooks.QubeSubmitter().submit(_submittable) if _submitted: print 'Successfully submitted {:d} job{} to the farm.'.format( len(_submitted), get_plural(_submitted))
def cache_work_files(data, farm=True, parent=None): """Recache the given list of work files. Args: data (list): work files and namespaces to recache farm (bool): submit recaches to farm parent (QDialog): parent interface (for dialog positioning) """ _pos = parent.get_c() if parent else None qt.ok_cancel('Cache {:d} work file{}?'.format(len(data), get_plural(data)), pos=_pos, parent=parent, title='Confirm cache') for _work_file, _namespaces in qt.ProgressBar(data, "Caching {:d} work file{}", col="DeepSkyBlue", pos=_pos, parent=parent): print 'CACHE', _work_file.path print _namespaces print cache_work_file(work_file=_work_file, namespaces=sorted(_namespaces), farm=farm, parent=parent) # Completed notification if farm: _msg = 'Submitted {:d} work file{} to farm' else: _msg = 'Cached {:d} work file{} locally' qt.notify(_msg.format(len(data), get_plural(data)), pos=_pos, title='Complete', parent=parent)
def create_workspaces(root, force=False, verbose=0): """Create workspaces within the given root asset/shot. This creates paths on disk for all of the steps which are attached to the root in shotgun. Args: root (TTRoot): asset/shot to create workspaces for force (bool): create workspaces without confirmation verbose (int): print process data """ _proj = pipe.Project(root.path) _tk = tank.Sgtk(_proj.path) _ctx = _tk.context_from_path(_proj.path) # Set filter _filters = [ ['project', 'is', _ctx.project], ['step', 'is_not', None], ['entity', 'is', root.get_sg_data()], ] # Find tasks _sg = tank.platform.current_engine().shotgun _all_tasks = _sg.find('Task', _filters, fields=['project', 'entity', 'step']) _key = lambda t: (t['project']['id'], t['entity']['id'], t['step']['id']) _all_tasks.sort(key=_key) _grouped_by_entity = collections.defaultdict(list) for _task in _all_tasks: _grouped_by_entity[(_task['entity']['type'], _task['entity']['id'], _task['entity']['name'])].append(_task) # Find tasks which need creating _to_create = [] for (_entity_type, _entity_id, _entity_name), _tasks in sorted(_grouped_by_entity.items()): if _entity_type not in ('Asset', 'Shot', 'Sequence'): continue _entity_id_list = [_task['id'] for _task in _tasks] lprint(' - CREATE WORKSPACES', _entity_type, _entity_id, _entity_name, _entity_id_list, verbose=verbose) _to_create.append( (_entity_type, _entity_id, _entity_name, _entity_id_list)) # Execute creation if not force: qt.ok_cancel('Create {:d} workspace{}?'.format(len(_to_create), get_plural(_to_create))) _done = list() for _entity_type, _entity_id, _entity_name, _entity_id_list in _to_create: _key = (_entity_type, _entity_id) if _key in _done: continue _start = time.time() print ' - CREATE WORKSPACES {}'.format('/'.join( [_ctx.project['name'], _entity_type, _entity_name])) _tk.create_filesystem_structure('Task', _entity_id_list) print ' - CREATED WORKSPACES FOR {} ({:.01f}s)'.format( '/'.join([_ctx.project['name'], _entity_type, _entity_name]), time.time() - _start) _done.append(_key)
def _exec_cache(namespaces, confirm=True, new_scene=False, farm=True, verbose=1): """Execute a recache on the current workfile. Args: namespaces (str list): list of namespaces to recache confirm (bool): confirm before execute new_scene (bool): new scene after recache farm (bool): submit recache to farm verbose (int): print process data """ class _FakeResolver(object): def __init__(self, all_items, conflicts, version): self.user_data = all_items, version self.conflicts = conflicts class _FakeConflict(object): def __init__(self, id_, cache): _user_data = collections.namedtuple('UserData', ['id']) self.id_ = id_ self.user_data = _user_data(id=self.id_) self.resolution = None if cache else _skip def __repr__(self): return '<Conflict:{}>'.format(self.id_) _engine = tank.platform.current_engine() _cache_app = _engine.apps['psy-multi-cache'] check_heart() # Use resolver to limit items to cache _cache_app.init_app() _mod = sys.modules[_cache_app.cache_controller.__module__] _skip = _mod.PublishConflictResolution.SKIP _model = _cache_app.cache_controller.model _all_items = [ _item.item_data for _item in _model.cache_list.selected_items ] lprint(' - ALL ITEMS', len(_all_items), pprint.pformat(_all_items), verbose=verbose > 1) _conflicts = [] for _item in _all_items: _cache = _item.id.replace(":renderCamShape", "") in namespaces _conflict = _FakeConflict(id_=_item.id, cache=_cache) _conflicts.append(_conflict) lprint(' - CONFLICTS', len(_conflicts), pprint.pformat(_conflicts), verbose=verbose > 1) _resolver = _FakeResolver(all_items=_all_items, conflicts=_conflicts, version=_model.version) # Check cache _to_cache = [ _conflict for _conflict in _conflicts if not _conflict.resolution ] if not _to_cache: raise RuntimeError("Nothing found to cache") lprint(' - FOUND {:d} ITEMS TO CACHE'.format(len(_to_cache))) if confirm: qt.ok_cancel('Submit {:d} cache{} to farm?'.format( len(_to_cache), get_plural(_to_cache))) # Execute cache if farm: _cache_app.cache_controller.model.cache_on_farm(resolver=_resolver) else: _cache_app.cache_controller.model.cache(resolver=_resolver) dprint('{} {:d}/{:d} REFS'.format('SUBMITTED' if farm else 'CACHED', len(namespaces), len(_all_items))) if new_scene: cmds.file(new=True, force=True)
def batch_submit_shots(step='previz', submitter='/out/submitter1'): """Batch submit shots selected from a list. Args: step (str): step to search for abcs submitter (str): path to submitter rop """ _shots = [_shot.name for _shot in tk2.find_shots()] _shots = qt.multi_select(_shots, title='Select shots', msg='Select shots to submit') # Check shots _missing_cam = [] _missing_rng = [] for _shot in qt.progress_bar(copy.copy(_shots), 'Checking {:d} shot{}'): _shot = tk2.find_shot(_shot) print 'CHECKING', _shot # Check cam _step = _shot.find_step_root(step, catch=True) if not _step: _missing_cam.append(_shot.name) _shots.remove(_shot.name) continue _cam_abc = _step.find_output_file(output_type='camcache', extn='abc', verbose=1, version='latest', catch=True) if not _cam_abc: _missing_cam.append(_shot.name) _shots.remove(_shot.name) continue print ' - CAM', _cam_abc.path # Check frame range _rng = _shot.get_frame_range() print ' - RANGE', _rng if not _rng or None in _rng: _missing_rng.append(_shot.name) _shots.remove(_shot.name) continue # Show warning _msg = '' if _missing_cam: _msg += 'Shots with no {} camera:\n\n {}\n\n'.format( step, '\n '.join(_missing_cam)) if _missing_rng: _msg += 'Shots with no range in shotgun:\n\n {}\n\n'.format( '\n '.join(_missing_rng)) if _msg: _msg += 'These shots will be ignored.' qt.ok_cancel(_msg, title='Warning') # Submit shots for _shot in qt.progress_bar(_shots, 'Submitting {:d} shot{}'): print 'BUILD SCENE', _shot build_scene(shot=_shot, step=step, submitter=submitter) print 'SUBMITTED {:d} SHOT{}'.format(len(_shots), get_plural(_shots).upper())
def check_current_scene(show_dialog=True, verbose=1): """Check current scene for ingestion issues. Args: show_dialog (bool): show status dialog on completion verbose (int): print process data Returns: (str list): list of issues with current file """ _file = File(host.cur_scene()) _issues = [] lprint('FILE', _file, verbose=verbose) lprint(' - BASENAME', _file.basename, verbose=verbose) # Check current scene filename _issues += _find_scene_name_issues(_file) # Check maya version _ver = int(cmds.about(version=True)) if _ver != 2018: _issues.append('Bad maya version {:d}'.format(_ver)) # Check for unwanted node types for _type in ['displayLayer', 'renderLayer']: _lyrs = [ _lyr for _lyr in cmds.ls(type=_type) if _lyr not in DEFAULT_NODES if not cmds.referenceQuery(_lyr, isNodeReferenced=True) ] if _lyrs: _issues.append('Scene has {} layers: {}'.format( _type.replace("Layer", ""), ', '.join(_lyrs))) for _type in ['unknown']: _nodes = [ _node for _node in cmds.ls(type=_type) if _node not in DEFAULT_NODES if not cmds.referenceQuery(_node, isNodeReferenced=True) ] if _nodes: _issues.append('Scene has {} nodes: {}'.format( _type, ', '.join(_nodes))) # Check references _refs = ref.find_refs(unloaded=False) lprint('CHECKING {:d} REFS'.format(len(_refs)), verbose=verbose) for _ref in _refs: lprint(' - CHECKING', _ref, verbose=verbose) _issues += _find_ref_issues(_ref) # Print summary if verbose: print '\nSUMMARY: FOUND {:d} ISSUE{}'.format( len(_issues), get_plural(_issues).upper()) for _idx, _issue in enumerate(_issues): print ' {:5} {}'.format('[{:d}]'.format(_idx + 1), _issue) print if not show_dialog: pass elif not _issues: qt.notify('No issues found.\n\nFile is read to send to psyop.', verbose=0) else: qt.notify_warning( 'This file has {:d} issue{}.\n\nCheck the script editor for ' 'details.'.format(len(_issues), get_plural(_issues)), verbose=0) return _issues
def _get_ingestable_scenes(dir_, filter_): """Find scenes ready for ingestion. Args: dir_ (str): directory to search for scenes filter_ (str): filter_ file list Returns: (VendorScene list, dict): list of ingestible scenes, scene statuses """ # Find scenes _dir = Dir(abs_path(dir_)) print 'READING', _dir.path assert _dir.exists() assert _dir.is_dir() _scenes = [ _file for _file in _dir.find(type_='f', class_=File, filter_=filter_) if _file.extn in ('ma', 'mb') ] print ' - FOUND {:d} SCENES'.format(len(_scenes)) # Check scenes _statuses = {} _to_ingest = [] for _idx, _scene in qt.progress_bar(enumerate(_scenes), 'Checking {:d} scene{}', col=PYGUI_COL): print '[{:d}/{:d}] PATH {}'.format(_idx + 1, len(_scenes), _scene.path) # Check ingestion status _status = _ingestable = None try: _scene = VendorScene(_scene) except ValueError: print ' - FAILS NAMING CONVENTION' _status, _ingestable = 'Fails naming convention', False else: _status, _ingestable = _scene.get_ingest_status() print ' - STATUS', _status assert _status assert _ingestable is not None if _ingestable: assert isinstance(_scene, VendorScene) _to_ingest.append(_scene) _statuses[_scene] = _status # Print list of shots already ingested _already_ingested = [ _scene for _scene, _status in _statuses.items() if _status == 'Already ingested' ] if _already_ingested: print '\n[ALREADY INGESTED] {}\n'.format(', '.join( sorted( set([ _scene.to_psy_work().get_shot().name for _scene in _already_ingested ])))) # Print summary print '\n[SUMMARY]' print '\n'.join([ ' {} - {:d}'.format(_status, _statuses.values().count(_status)) for _status in sorted(set(_statuses.values())) ]) print '\nFOUND {:d} SCENE{} TO INGEST'.format( len(_to_ingest), get_plural(_to_ingest).upper()) return _to_ingest, _statuses
def ingest_vendor_anim(dir_, vendor=None, force=False, filter_=None, cache_on_farm=True, ignore_extn=False, ignore_dlayers=False, ignore_rlayers=False, ignore_multi_top_nodes=False): """Ingest vendor animation files. Args: dir_ (str): vendor in folder vendor (str): vendor name force (bool): lose current scene changes without confirmation filter_ (str): filter file list cache_on_farm (bool): submit caches to qube ignore_extn (bool): ignore file extension issues ignore_dlayers (bool): ignore display layer issues ignore_rlayers (bool): ignore render layer issues ignore_multi_top_nodes (bool): ignore multiple top node issues """ # Set vendor _vendor = vendor or ingest.vendor_from_path(dir_) assert _vendor print ' - VENDOR', _vendor print # Read ingestible scenes _to_ingest, _statuses = _get_ingestable_scenes(dir_=dir_, filter_=filter_) if not _to_ingest: return if not force: qt.ok_cancel('Ingest {:d} scene{}?'.format(len(_to_ingest), get_plural(_to_ingest)), verbose=0, icon=ingest.ICON, title='Confirm ingestion') print 'HANDLE UNSAVED CHANGES' host.handle_unsaved_changes() print 'HANDLED UNSAVED CHANGES' # Ingest scenes _issues = [] _ingest_kwargs = dict(ignore_extn=ignore_extn, ignore_dlayers=ignore_dlayers, ignore_multi_top_nodes=ignore_multi_top_nodes, ignore_rlayers=ignore_rlayers) for _idx, _scene in qt.progress_bar(enumerate(_to_ingest), 'Ingesting {:d} scene{}', col=PYGUI_COL): print '[{:d}/{:d}] PATH {}'.format(_idx + 1, len(_to_ingest), _scene.path) _scene.check_workspace(force=True) # Check ingestion status assert isinstance(_scene, VendorScene) _scene_isses = _scene.get_ingest_issues(**_ingest_kwargs) if _scene_isses: _issues.append((_scene, _scene_isses)) print ' - CAM', _scene.scene_get_cam() _scene.ingest(vendor=vendor, force=True, cache_on_farm=cache_on_farm) _status, _ = _scene.get_ingest_status() _statuses[_scene] = _status if _issues: print '\n\n[INGESTION ISSUES]\n' for _scene, _scene_issues in _issues: print 'SCENE', _scene.path for _issue in _scene_issues: print ' -', _issue # Print summary print '\n\n[SUMMARY]' print '\n'.join([ ' {} - {:d}'.format(_status, _statuses.values().count(_status)) for _status in sorted(set(_statuses.values())) ]) print '\nFOUND {:d} SCENE{} TO INGEST'.format( len(_to_ingest), get_plural(_to_ingest).upper())
def _redraw__remove(self, widget): _to_remove = self.ui.list.selected_data() widget.setText('Remove {:d} rig{}'.format( len(_to_remove), get_plural(_to_remove))) widget.setEnabled(bool(_to_remove))
def ingest_seqs(dir_, vendor, filter_=None, force=False, resubmit_transgens=False): """Ingest images sequences from the given directory. Args: dir_ (str): directory to search vendor (str): name of vendor filter_ (str): apply path filter force (bool): ingest without confirmation resubmit_transgens (bool): resubmit any submitted transgens """ _dir = Dir(abs_path(dir_)) print 'READING', _dir.path assert _dir.exists() _seqs = _dir.find_seqs(filter_=filter_) print ' - FOUND {:d} SEQS'.format(len(_seqs)) # Set vendor _vendor = vendor or vendor_from_path(_dir.path) assert _vendor print ' - VENDOR', _vendor print # Check images _statuses = {} _to_ingest = [] for _idx, _seq in qt.progress_bar( enumerate(_seqs), 'Checking {:d} seq{}'): print '[{:d}/{:d}] PATH {}'.format(_idx+1, len(_seqs), _seq.path) # Check ingestion status _status = _ingestable = None try: _seq = VendorSeq(_seq) except ValueError: _status, _ingestable = 'Fails naming convention', _seq.basename else: assert isinstance(_seq, VendorSeq) _status, _ingestable = _seq.get_ingest_status( resubmit_transgens=resubmit_transgens) print ' - STATUS', _status assert _status assert _ingestable is not None if _ingestable: _to_ingest.append(_seq) _statuses[_seq] = _status # Print summary print '\nSUMMARY:' print '\n'.join([ ' {} - {:d}'.format(_status, _statuses.values().count(_status)) for _status in sorted(set(_statuses.values()))]) print 'FOUND {:d} SEQ{} TO INGEST'.format( len(_to_ingest), get_plural(_to_ingest).upper()) # Show different source warning _diff_src = [ _ for _, _status in _statuses.items() if _status == 'Already ingested from a different source'] if _diff_src: qt.notify_warning( '{:d} of the sequences could not be ingested because they have ' 'already been ingested from a different delivery. This happens ' 'when a vendor provides an update without versioning up.\n\n' 'See the terminal for details.'.format(len(_diff_src))) # Execute ingestion if not _to_ingest: return if not force: qt.ok_cancel( 'Ingest {:d} seq{}?'.format( len(_to_ingest), get_plural(_to_ingest)), verbose=0) for _idx, _seq in qt.progress_bar( enumerate(_to_ingest), 'Ingesting {:d} seq{}', stack_key='IngestSeqs'): print '({:d}/{:d}) [INGESTING] {}'.format( _idx+1, len(_to_ingest), _seq.path) _seq.ingest(vendor=vendor)
def __init__(self, items, title='Processing {:d} item{}', col=None, show=True, pos=None, parent=None, stack_key='progress', plural=None): """Constructor. Args: items (list): list of items to iterate title (str): title for interface col (str): progress bar colour show (bool): show the dialog pos (QPoint): override progress bar position (applied to centre) parent (QDialog): parent dialog stack_key (str): override identifier for this dialog - if an existing progress bar has the same stack key then this will replace it plural (str): override plural str (eg. 'es' for 'passes') """ global _PROGRESS_BARS from psyhive import host, qt # Avoid batch mode seg fault if host.batch_mode(): raise RuntimeError("Cannot create progress bar in batch mode") _items = items if isinstance(_items, (enumerate, collections.Iterable)): _items = list(_items) self.stack_key = stack_key self.items = _items self.counter = 0 self.last_update = time.time() self.durs = [] self.info = '' _parent = parent or host.get_main_window_ptr() _args = [_parent] if _parent else [] super(ProgressBar, self).__init__(*_args) _title = title.format(len(self.items), get_plural(self.items, plural=plural)) self.setWindowTitle(_title) self.resize(408, 54) if pos: _pos = pos - get_p(self.size()) / 2 else: _pos = _get_next_pos(stack_key=stack_key) if _pos: self.move(_pos) _col = col if not _col: _random = str_to_seed(title) _col = _random.choice(qt.NICE_COLS) # Build ui self.grid_lyt = QtWidgets.QGridLayout(self) self.progress_bar = HProgressBar(self) _size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) _size_policy.setHorizontalStretch(0) _size_policy.setVerticalStretch(0) _size_policy.setHeightForWidth( self.progress_bar.sizePolicy().hasHeightForWidth()) self.progress_bar.setSizePolicy(_size_policy) self.progress_bar.setProperty("value", 0) self.grid_lyt.addWidget(self.progress_bar, 0, 0, 1, 1) self.progress_bar.set_col(_col) self._hidden = not show if show: self.show() _PROGRESS_BARS.append(self)
def _redraw__Remove(self): _to_remove = self.ui.List.selected_data() self.ui.Remove.setText('Remove {:d} rig{}'.format( len(_to_remove), get_plural(_to_remove))) self.ui.Remove.setEnabled(bool(_to_remove))
def _cache_yetis(yetis, apply_on_complete=False, samples=3, verbose=0): """Cache a list of yeti nodes. Args: yetis (HFnDependencyNode list): nodes to cache apply_on_complete (bool): apply cache on completion samples (int): samples per frame verbose (int): print process data """ from . import yeti_ui print 'CACHE YETIS', yetis _work = tk2.cur_work() _yetis, _outs, _namespaces = _prepare_yetis_and_outputs( yetis=yetis, work=_work) # Get cache path - if multiple namespace need to cache to tmp _tmp_fmt = abs_path('{}/yetiTmp/<NAME>.%04d.cache'.format( tempfile.gettempdir())) if len(_yetis) > 1: _cache_path = _tmp_fmt _tmp_dir = Dir(os.path.dirname(_tmp_fmt)) _tmp_dir.delete(force=True) _tmp_dir.test_path() else: assert len(_outs) == 1 _cache_path = _outs[0].path print "CACHE PATH", _cache_path # Generate caches dprint('GENERATING CACHES', _cache_path) print ' - SAMPLES', samples for _yeti in _yetis: _yeti.plug('cacheFileName').set_val('') _yeti.plug('fileMode').set_val(0) _yeti.plug('overrideCacheWithInputs').set_val(False) cmds.select(_yetis) cmds.pgYetiCommand( writeCache=_cache_path, range=host.t_range(), samples=samples) dprint('GENERATED CACHES', _cache_path) # Move tmp caches to outputs if len(_yetis) > 1: dprint('MOVING CACHES FROM TMP') for _yeti, _out in safe_zip(_yetis, _outs): print ' - MOVING', _out.path _name = str(_yeti).replace(":", "_") _tmp_seq = Seq(_tmp_fmt.replace('<NAME>', _name)) for _frame, _tmp_path in safe_zip( _tmp_seq.get_frames(), _tmp_seq.get_paths()): lprint(' -', _frame, _tmp_path, verbose=verbose) shutil.move(_tmp_path, _out[_frame]) # Apply cache to yeti nodes if apply_on_complete: dprint('APPLYING CACHES TO YETIS') for _yeti, _cache in safe_zip(_yetis, _outs): apply_cache(cache=_cache, yeti=_yeti) qt.notify( 'Cached {:d} yeti node{}.\n\nSee script editor for details.'.format( len(_yetis), get_plural(_yetis)), title='Cache complete', icon=yeti_ui.ICON, parent=yeti_ui.DIALOG) return _outs