def save_as(self, path, force=False, verbose=0): """Save this pixmap at the given path. Args: path (str): path to save at force (bool): force overwrite with no confirmation verbose (int): print process data """ from psyhive import qt assert self.width() and self.height() _file = File(path) _fmt = {}.get(_file.extn, _file.extn.upper()) lprint("SAVING", path, _fmt, verbose=verbose) if _file.exists(): if not force: _result = qt.yes_no_cancel('Overwrite existing image?\n\n' + path) if _result == 'No': return os.remove(_file.path) test_path(_file.dir) self.save(abs_path(path, win=os.name == 'nt'), format=_fmt, quality=100) assert _file.exists()
def update_nk(template, shot, diff=True, force=True): """Update nk template to new shot. Args: template (TTWorkFileBase): template work file shot (TTShotRoot): shot to update to diff (bool): show diffs force (bool): save with no confirmation """ _new_work = template.map_to(Shot=shot.shot).find_next() _start, _end = shot.get_frame_range() _nk = _NkFile(template.path) _update_nk_reads(nk_file=_nk, shot=shot) # Update write nodes for _node in _nk.find_nodes(type_='Write'): for _attr in ['file', 'proxy']: _file = _node.read_attr(_attr) _orig_out = tk.get_output(_file) if not _orig_out: continue print 'ORIG OUT', _orig_out _new_out = _orig_out.map_to(Shot=shot.shot, version=_new_work.version) print 'NEW OUT', _orig_out _node.set_attr(_attr, _new_out.path) print # Update root _root = _nk.find_node(type_='Root') _root.set_attr('name', _new_work.path) _root.set_attr('first_frame', _start) _root.set_attr('last_frame', _end) # Update header _header = _nk.data[0] assert isinstance(_header, six.string_types) _tokens = [_token for _token in re.split('[ "]', _header) if _token] for _token in _tokens: _orig_out = tk.get_output(_token) if not _orig_out: continue _new_out = _orig_out.map_to(Shot=shot.shot, version=_new_work.version) assert _header.count(_token) == 1 _header = _header.replace(_token, _new_out.path) _nk.data[0] = _header if diff: _tmp_nk = File(abs_path('{}/test.nk'.format(tempfile.gettempdir()))) _nk.write(_tmp_nk.path, force=True) _tmp_nk.diff(template.path) # Write new work if not force: qt.ok_cancel('Write new work file?\n\n{}'.format(_new_work.path)) _nk.write(_new_work.path, force=True) _new_work.set_comment(comment='Scene built by shot_builder') print 'WROTE NK:', _new_work.path
def create_ref(file_, namespace, class_=None, force=False): """Create a reference. Args: file_ (str): path to reference namespace (str): reference namespace class_ (type): override FileRef class force (bool): force replace any existing ref Returns: (FileRef): reference """ from psyhive import qt from psyhive import host _file = File(abs_path(file_)) if not _file.exists(): raise OSError("File does not exist: " + _file.path) _class = class_ or FileRef _rng = host.t_range() if _file.extn == 'abc': cmds.loadPlugin('AbcImport', quiet=True) elif _file.extn.lower() == 'fbx': cmds.loadPlugin('fbxmaya', quiet=True) # Test for existing cmds.namespace(set=":") if cmds.namespace(exists=namespace): _ref = find_ref(namespace, catch=True) if _ref: if not force: qt.ok_cancel( 'Replace existing {} reference?'.format(namespace)) _ref.remove(force=True) else: del_namespace(namespace, force=force) # Create the reference _cur_refs = set(cmds.ls(type='reference')) _kwargs = { 'reference': True, 'namespace': namespace, 'options': "v=0;p=17", 'ignoreVersion': True } cmds.file(_file.abs_path(), **_kwargs) # Find new reference node _ref = get_single(set(cmds.ls(type='reference')).difference(_cur_refs)) # Fbx ref seems to update timeline (?) if host.t_range() != _rng: host.set_range(*_rng) return _class(_ref)
def __init__(self, file_, verbose=0): """Constructor. Args: file_ (str): path to output file verbose (int): print process data """ File.__init__(self, file_) _path = abs_path(file_) _area = get_area(_path) _hint = self.hint_fmt.format(area=_area) super(TTOutputFile, self).__init__(file_, hint=_hint, verbose=verbose)
def map_file_to_psy_asset(file_, step='rig'): """Map off pipeline reference path to a psyop file. Args: file_ (str): file to map step (str): asset step Returns: (TTOutputFile): psyop asset file """ from .. import ingest _file = File(abs_path(file_)) if _file.extn == 'mb' and _file.basename.startswith('camera_rig_main'): _name = 'camera' elif ingest.is_vendor_file(file_): _file = ingest.VendorFile(file_) _name = _file.tag elif ingest.is_psy_asset(file_): _file = ingest.PsyAsset(file_) _name = _file.asset else: return None _asset = tk2.find_asset(asset=_name) _step = _asset.find_step_root(step) try: _file = _step.find_output_file(version='latest', format_='maya', task=step) except ValueError: raise ValueError('failed to find output file - ' + _step.path) return _file
def save_test(self, file_=None, timestamp=True, extn='jpg', verbose=1): """Save test image and copy it to pictures dir. Args: file_ (str): override save file path - this can be used to switch this method with a regular save timestamp (bool): write timestamped file extn (str): test file extension verbose (int): print process data Returns: (str): path to saved file """ if file_: self.save_as(file_, force=True) return file_ _test_img = File(TEST_IMG).apply_extn(extn).path self.save_as(_test_img, verbose=verbose, force=True) _file = _test_img if timestamp: _timestamp_file = abs_path( time.strftime( '~/Documents/My Pictures/tests/%y%m%d_%H%M.{}'.format( extn))) self.save_as(_timestamp_file, verbose=verbose, force=True) _file = _timestamp_file return _file
def cache_read(self, tag, verbose=0): """Read cached data from the given tag. Args: tag (str): data tag to read verbose (int): print process data Returns: (any): cached data """ from psyhive.utils import read_yaml, File _file = File(self.cache_fmt.format(tag)) lprint('READ CACHE FILE', _file.path, verbose=verbose) if _file.extn == 'yml': try: return read_yaml(_file.path) except OSError: return None else: try: return obj_read(file_=_file.path) except OSError: return None
def open_scene(file_, force=False, prompt=False, lazy=False, load_refs=True): """Open the given scene. Args: file_ (str): file to open force (bool): lose unsaved changes without confirmation prompt (bool): show missing reference dialogs lazy (bool): abandon load if scene is already open load_refs (bool): load references """ from psyhive import host _file = get_path(file_) if lazy and host.cur_scene() == _file: return if not force: host.handle_unsaved_changes() if File(_file).extn == 'fbx': load_plugin('fbxmaya') _kwargs = {} if not load_refs: _kwargs['loadReferenceDepth'] = 'none' cmds.file(_file, open=True, force=True, prompt=prompt, ignoreVersion=True, **_kwargs)
def _get_body_ref_movs_data(force=False): """Read reference movs spreadsheet data. Args: force (bool): force reread from disk Returns: (dict): vendor in mov, reference mov and start frame data """ _data_file = '{}/_fr_ref_movs_body.data'.format(_DIR) _data = {} for _line in File(_data_file).read_lines(): _line = _line.strip() if not _line.strip(): continue # Extract blast comp _blast_comp = _line.split('.mov')[0] + '.mov' assert _line.count(_blast_comp) == 1 _line = _line.replace(_blast_comp, '').strip() _blast_comp = abs_path(_blast_comp) # Extract ref mov _body_ref = _line.split('.mov')[0] + '.mov' assert _line.count(_body_ref) == 1 _line = _line.replace(_body_ref, '').strip() _body_ref = abs_path(_body_ref) # Extract start _start = float(_line.strip()) _data[_blast_comp] = _body_ref, _start return _data
def _get_latest_abc(abc): """Get latest path for an abc. This is the latest version of any output file with a special case added to handle to off-pipeline rest cache abcs. Args: abc (str): path to abc Returns: (str): path to latest version """ # Handle regular output file try: _out_file = tk2.TTOutputFile(abc) except ValueError: pass else: return _out_file.find_latest().path # Special handling for rest cache try: _output = tk2.TTOutput(abc) except ValueError: pass else: return get_single(_output.find_latest().find(extn=File(abc).extn))
def _get_scene_name(): """Get current scene name hud text. Returns: (str): scene name hud text """ _cur_scene = host.cur_scene() return 'file: {}'.format(File(_cur_scene).filename if _cur_scene else '')
def _find_ref_issues(ref_): """Find any issues with the given reference. Args: ref_ (FileRef): reference to check Returns: (str list): issues with reference """ _file = File(host.cur_scene()) _issues = [] # Check namespace _issues += _find_ref_namespace_issues(ref_) if not ref_.namespace: return _issues # Check top node _top_node_issues, _junk = _find_ref_top_node_issues(ref_) _issues += _top_node_issues if _junk: return _issues # Check ref file path _ref_file = File(ref_.path) print ' - FILE', _ref_file.path _local_file = File('{}/{}'.format(_file.dir, _ref_file.filename)) if ingest.is_vendor_file(_ref_file): _vendor_file = ingest.VendorFile(_ref_file) print ' - VENDOR FILE' if _vendor_file.step != 'rig': _issues.append("Reference {} is not a rig".format(ref_.namespace)) elif ingest.is_psy_asset(_ref_file): _psy_file = ingest.PsyAsset(_ref_file) print ' - PSYOP FILE' if _psy_file.step != 'rig': _issues.append("Psyop reference {} is not a rig".format( ref_.namespace)) elif not _local_file.exists(): print ' - OFF-PIPELINE FILE' _issues.append("Reference {} has an off-pipline file {} which isn't " "provided in the current directory {}".format( ref_.namespace, _ref_file.filename, _file.dir)) return _issues
def __init__(self, ref_node): """Constructor. Args: ref_node (str): reference node """ self.ref_node = ref_node if not self.path: raise ValueError self.extn = File(self.path).extn
def get_cache_xml(self): """Get cache xml file for the current workspace. Returns: (File): cache xml file """ _work = tk2.cur_work() _cache_xml = File('{}/cache/nCache/{}/{}.xml'.format( _work.get_work_area().path, _work.basename, self)) return _cache_xml
def __init__(self, file_): """Constructor. Args: file_ (str): path to work file """ File.__init__(self, file_) # Get hint _path = abs_path(file_) _dcc = get_dcc(_path) _area = get_area(_path) _hint = self.hint_fmt.format(dcc=_dcc, area=_area) super(TTWork, self).__init__(file_, hint=_hint) self.ver_fmt = '{}/{}'.format( self.dir, self.filename.replace( '_v{:03d}'.format(self.version), '_v{:03d}'))
def get_export_fbx(self, dated=False, anim=False): """Get path to export fbx for this work file. Args: dated (bool): get path to dated fbx (in date folder) anim (bool): get path to anim fbx (in dated anim folder) Returns: (str): path to export fbx """ _char_name = self.get_char_name() _char = {'M': 'Male', 'F': 'Female'}.get(_char_name, _char_name) _fbx_root = Dir(EXPORT_FBX_ROOT) if self.type_ == 'Disposition': _fmt = ('{root}/Dispositions/{char}/{work.disp}/' '{char}_{work.disp}_{work.label}_{work.iter:02d}.fbx') elif self.type_ == 'Vignette': _fmt = ('{root}/{char}/{work.vignette}/' '{char}_{work.vignette}_{work.desc}_{work.iter:02d}.fbx') else: raise ValueError(self) _fbx_path = _fmt.format(root=EXPORT_FBX_ROOT, work=self, char=_char) if dated: _vendor_root = Dir(fr_vendor_ma.MOBURN_ROOT) _vendor_file = self.get_vendor_file() _delivery_dir = _vendor_root.rel_path(_vendor_file).split('/')[0] assert self.get_mtime_fmt('%Y-%m-%d') in _delivery_dir _date_fbx = File('{}/{}/{}'.format(_fbx_root.path, _delivery_dir, _fbx_root.rel_path(_fbx_path))) return _date_fbx if anim: _anim_fbx = File('{}/Anim-{}/{}'.format( _fbx_root.path, time.strftime('%Y-%m-%d'), _fbx_root.rel_path(_fbx_path))) return _anim_fbx return File(_fbx_path)
def test(self): # Test cmp/hash _dir_a = Dir('/a/b/c') _dir_b = Dir('/a/b/c') assert _dir_a == _dir_b assert len({_dir_a, _dir_b}) == 1 # Test override extn _file = File('/tmp/blah.ass.gz', extn='ass.gz') assert _file.extn == 'ass.gz' assert _file.basename == 'blah'
def _save_fbx(file_, force=False): """Save fbx file. Args: file_ (str): fbx path force (bool): replace without confirmation """ _file = File(get_path(file_)) _file.delete(wording='Replace', force=force) for _mel in [ 'FBXExportUpAxis z', 'FBXExportFileVersion -v FBX201800', 'FBXExportSmoothingGroups -v true', 'FBXExportSmoothMesh -v true', 'FBXExportTangents -v true', 'FBXExportSkins -v true', 'FBXExportShapes -v true', 'FBXExportEmbeddedTextures -v false', 'FBXExportApplyConstantKeyReducer -v true', 'FBXExportSplitAnimationIntoTakes -c', 'FBXExport -f "{}"'.format(_file.path), ]: mel.eval(_mel) dprint('Wrote file', _file.nice_size(), _file.path) assert _file.exists()
def cache_write(self, tag, data, bkp=False, verbose=0): """Write data to the given cache. Args: tag (str): tag to store data to data (any): data to store bkp (bool): save timestamped backup file on save (if data changed) verbose (int): print process data Returns: (str): path to cache file """ from psyhive.utils import File, write_yaml _file = File(self.cache_fmt.format(tag)) lprint('WRITE CACHE FILE', _file.path, verbose=verbose) if bkp and _file.exists(): _data = self.cache_read(tag) if _data == data: lprint(' - DATA UNCHANGED, NO BKP REQUIRED') else: lprint(' - STORE BKP', _data) _bkp = '{}/_bkp_{}_{}.{}'.format( _file.dir, _file.basename, time.strftime('%y%m%d_%H%M%S'), _file.extn) lprint(' - BKP', _bkp) shutil.copy(_file.path, _bkp) if _file.extn == 'yml': write_yaml(file_=_file, data=data) else: obj_write(file_=_file.path, obj=data) return _file.path
def _ingest_check_ref(self, ref_): """Check reference. Args: ref_ (FileRef): reference to check """ print ' - CHECKING REF', ref_ print ' - PATH', ref_.path if File(ref_.path).exists(): try: _file = tk2.TTOutputFile(ref_.path) except ValueError: print ' - OFF PIPELINE' return if _file.is_latest(): print ' - IS LATEST' return _file = _file.find_latest() else: _psy_file = ingest.map_file_to_psy_asset(ref_.path) _dlv_file = File('{}/{}'.format( self.dir, File(ref_.path).filename)) _file = _psy_file or _dlv_file if not _file or not File(_file).exists(): print ' - MISSING', _file raise RuntimeError('Missing file {}'.format(_file)) print ' - UPDATING TO', _file assert File(_file).exists() ref_.swap_to(_file)
def read_psylaunch_cfg(self, edit=False, verbose=0): """Read psylaunch config data for this project. Args: edit (bool): open file in editor verbose (int): print process data Returns: (dict): psylaunch config data """ _yaml = abs_path(_PSYLAUNCH_CFG_FMT.format(self.path)) lprint('PSYLAUNCH YAML', _yaml, verbose=verbose) if edit: File(_yaml).edit() return read_yaml(_yaml)
def _init_settings(self, save_settings): """Initiate settings object and read any existing values. Args: save_settings (bool): load/save settings """ if not save_settings: return _settings_file = abs_path('{}/{}.ini'.format( SETTINGS_DIR, File(self.ui_file).basename)) touch(_settings_file) # Check settings writable self.settings = QtCore.QSettings(_settings_file, QtCore.QSettings.IniFormat) self.read_settings()
def get_work(file_, class_=None, catch=True, verbose=0): """Get work file object associated with the given file. If an increment is passed, the associated work file is returned. Args: file_ (str): path to file class_ (type): force workfile type catch (bool): no error if no valid work could be found verbose (int): print process data Returns: (TTWorkFileBase): work file """ _file = File(file_) _inc = file_ and not _file.basename.split("_")[-1].startswith('v') # Get work file class if class_: _class = class_ else: try: _class = _get_work_type(file_=_file, inc=_inc, catch=catch) except ValueError as _exc: if catch: return None raise _exc lprint("CLASS", _class, verbose=verbose) if not _class: return None if _inc: try: return _class(file_).get_work() except ValueError as _exc: if catch: return None raise _exc # Process file as work file try: return _class(file_) except ValueError as _exc: if catch: return None raise _exc
def delete_all_data(self, force=False): """Delete all generated data from this action file. This includes all blasts and exports and the work file itself. Args: force (bool): delete data without confirmation """ if not force: qt.ok_cancel( 'Delete all data?\n\nWork:\n{}\n\nVendor file:\n{}'.format( self.path, self.get_vendor_file())) for _seq in [self.blast, self.face_blast, self.blast_comp]: _seq.delete(force=True) for _file in [self.get_export_fbx(), File(self.processed_mov), self]: _file.delete(force=True)
def _handle_exception(exc, verbose=0): """Handle the given exception. Args: exc (Exception): exception that was raised verbose (int): print process data """ # Handle special exceptions if ( # In case of FileError, jump to file not os.environ.get('EXC_DISABLE_FILE_ERROR') and isinstance(exc, FileError)): print '[FileError]' print ' - MESSAGE:', exc.message print ' - FILE:', exc.file_ File(exc.file_).edit(line_n=exc.line_n) return elif isinstance(exc, qt.DialogCancelled): print '[DialogCancelled]' return elif isinstance(exc, SystemExit) or exc is SystemExit: print '[SystemExit]' return elif isinstance(exc, HandledError): qt.notify_warning(msg=exc.message, icon=exc.icon, title=exc.title) return if not dev_mode(): _pass_exception_to_sentry(exc) # Raise error dialog lprint('HANDLING EXCEPTION', exc, verbose=verbose) lprint('MSG', exc.message, verbose=verbose) lprint('TYPE', type(exc), verbose=verbose) _traceback = Traceback() _traceback.pprint() _app = qt.get_application() _dialog = _ErrDialog( traceback_=_traceback, message=exc.message, type_=type(exc).__name__) if not host.NAME: _app.exec_()
def write(self, path, force=False): """Write updated contents to disk. Args: path (str): path to write to force (bool): write without confirmation """ _text = '' for _element in self.data: if isinstance(_element, six.string_types): _text += _element + '\n' elif isinstance(_element, _NkNode): _text += _element.type_ + ' {\n' for _attr, _val in _element.attrs: _text += ' {} {}\n'.format(_attr, _val) _text += '}\n' else: raise ValueError(_element) File(path).write_text(_text.strip() + '\n', force=force)
def process_movs_for_review(src_dir): """Search dir for groups of 3 input movs to comp into review mov. Args: src_dir (str): dir to search for input movs """ print 'SRC DIR', src_dir _tmp_py = abs_path('{}/process_movs_for_review.py'.format( tempfile.gettempdir())) print ' - TMP PY', _tmp_py _py = '\n'.join([ 'import nuke', 'import psyhive', 'from nuke_psyhive.shows import frasier', 'frasier.process_review_movs(dir_="{dir}")' ]).format(dir=src_dir) print print _py print File(_tmp_py).write_text(_py, force=True) psylaunch.launch_app('nuke', args=['-t', _tmp_py], wait=True)
def _localise_ui_imgs(ui_file): """Localise imgs in ui file. Args: ui_file (str): ui file to localise Returns: (str): path to localised ui file """ _body = read_file(ui_file) _paths = [ _item.strip() for _item in re.split('[<>]', _body) if _item.strip() and "EMOJI" in _item ] # Check for paths that need replacing _replacements = [] for _path in _paths: if not icons.EMOJI.contains(_path): _idx = int(_path.split('.')[-2]) _name = icons.EMOJI.find_emoji(_idx).name _new_path = icons.EMOJI.find(_name) _replacements.append((_path, _new_path)) if not _replacements: return ui_file _tmp_ui = abs_path('{}/psyhive/qt/{}'.format(tempfile.gettempdir(), ui_file.replace(':', ''))) print 'TMP UI', _tmp_ui for _old, _new in _replacements: print _old print _new _body = _body.replace(_old, _new) File(_tmp_ui).write_text(_body, force=True) return _tmp_ui
def __init__(self, ui_file, catch_errors_=True, save_settings=True, load_settings=True, parent=None, dialog_stack_key=None, settings_name=None): """Constructor. Args: ui_file (str): path to ui file catch_errors_ (bool): apply error catcher to callbacks save_settings (bool): load/save settings on open/close load_settings (bool): load settings on init parent (QWidget): override parent widget dialog_stack_key (str): override dialog stack key settings_name (str): override QSettings filename """ from psyhive import host self.ui_file = ui_file self._dialog_stack_key = dialog_stack_key or self.ui_file self._register_in_dialog_stack() super(HUiDialog3, self).__init__(parent=parent or host.get_main_window_ptr()) self._load_ui() self._connect_elements(catch_errors_=catch_errors_) self.init_ui() self._settings_name = settings_name or File(self.ui_file).basename self.disable_save_settings = not save_settings if load_settings: self.load_settings() self.show()
def _get_face_ref_movs_data(force=False): """Read reference movs spreadsheet data. Args: force (bool): force reread from disk Returns: (dict): vendor in mov, reference mov and start frame data """ _data_file = '{}/_fr_ref_movs_face.data'.format(_DIR) _data = {} for _line in File(_data_file).read_lines()[1:]: _line = _line.strip() if not _line.strip(): continue # Extract ma _ma = _line.split('.ma')[0] + '.ma' assert _ma in _line _line = _line.replace(_ma, '').strip() _ma = abs_path(_ma) # Extract mov _mov = _line.split('.mov')[0] + '.mov' assert _mov in _line _line = _line.replace(_mov, '').strip() _mov = abs_path(_mov) # Extract start _tokens = _line.split() _start, _ = _tokens _start = float(_start) _data[_ma] = _mov, _start return _data