def test_find(self): _test_dir = '{}/psyhive/testing/blah'.format(tempfile.gettempdir()) _test_file = abs_path('{}/test.txt'.format(_test_dir)) if os.path.exists(_test_dir): shutil.rmtree(_test_dir) touch(_test_file) assert get_single(find(_test_dir)) == _test_file assert get_single(find(_test_dir, full_path=False)) == 'test.txt'
def test_find(self): # Test depth flag _test_dir = '{}/testing'.format(tempfile.gettempdir()) touch(_test_dir+'/test.txt') touch(_test_dir+'/BLAH/test.txt') touch(_test_dir+'/BLAH/BLEE/test.txt') assert len(find(_test_dir, type_='f', depth=1)) == 1 assert len(find(_test_dir, type_='f', depth=2)) == 2 assert len(find(_test_dir, type_='f', depth=3)) == 3
def find_work(self, force=False, verbose=1): """Find work files within this work area. Args: force (bool): force reread work files from disk verbose (int): print process data Returns: (_CTTWorkFileBase): list of cachable work files """ dprint('FINDING WORK', self.path, verbose=verbose) # Get work dir _tmpl = get_template(self.maya_work_type.hint) _data = copy.copy(self.data) _data['Task'] = 'blah' _data['extension'] = 'mb' _data['version'] = 1 _tmp_path = _tmpl.apply_fields(_data) _tmp_work = self.maya_work_type(_tmp_path) # Find work files + make cachable _works = [] for _file in find(_tmp_work.dir, depth=1, type_='f'): try: _tmp_work = self.maya_work_type(_file) except ValueError: continue lprint(' - ADDING', _file, verbose=verbose > 1) _work = obtain_work(_tmp_work.path) _works.append(_work) return _works
def find_increments(self, verbose=0): """Find increments belonging to this work area. Args: verbose (int): print process data Returns: (TTWorkIncrementBase list): increment files """ _tmp_inc = self.map_to(self.maya_inc_type, Task='blah', increment=0, extension='mb', version=0) lprint('FINDING INCREMENTS', self.maya_inc_type, _tmp_inc, verbose=verbose) _incs = [] for _path in find(_tmp_inc.dir, depth=1, type_='f'): lprint(' - TESTING PATH', _path) try: _inc = self.maya_inc_type(_path) except ValueError: continue _incs.append(_inc) return _incs
def _find_ma_files_to_check(src_dir, ma_filter, work_filter, limit): """Get list of ma files to check. Args: src_dir (str): vendor in directory to search for ma files ma_filter (str): apply filter to ma file path work_filter (str): apply filter to work file path limit (int): limit the number of files to be processed Returns: (FrasierVendorMa list): list of files to check """ assert os.path.exists(src_dir) _mas = find(src_dir, extn='ma', type_='f', filter_=ma_filter, class_=fr_vendor_ma.FrasierVendorMa) if work_filter: _mas = [ _ma for _ma in _mas if passes_filter(_ma.get_work().path, work_filter) ] if limit: _mas = _mas[:limit] return _mas
def find_work_files(self): """Find work files within this shot. Returns: (WorkFile list): work files """ from psyhive import pipe _work_files = [] for _step_path in find(self.path, depth=1): _scenes_path = '{}/work/maya/scenes'.format(_step_path) for _path in find(_scenes_path, depth=1, type_='f'): try: _work_file = pipe.WorkFile(_path) except ValueError: continue _work_files.append(_work_file) return _work_files
def find_latest(self): """Find latest version of this asset file. Returns: (AssetFile): latest version """ _vers = find(self.vers_path, depth=1, type_='d', full_path=False) if not _vers: raise OSError("Missing asset "+self.vers_path) return AssetFile(self.ver_fmt.format(ver_n=int(_vers[-1][1:])))
def _find_rigs(): """Find HSL rigs to switch between. Returns: (File list): list of rig files """ _path = 'P:/projects/frasier_38732V/production/character_rigs' _files = find(_path, depth=1, type_='f', extn='ma', class_=File) for _file in _files: _file.name = _file.basename.replace('SK_', '').replace('_', ' ') return _files
def find_increments(self): """Find increments belonging to this work area. Returns: (TTIncrement list): increment files """ _hint = '{}_{}_increment'.format(self.dcc, self.area) _tmp_inc = self.map_to( hint=_hint, class_=TTIncrement, Task='blah', increment=0, extension=get_extn(self.dcc), version=0) return find(_tmp_inc.dir, depth=1, type_='f', class_=TTIncrement)
def find_sequences(): """Find sequences in the current project. Returns: (TTSequenceRoot): list of sequences """ _seq_path = pipe.cur_project().path + '/sequences' _seqs = [] for _path in find(_seq_path, depth=1): _seq = TTSequenceRoot(_path) _seqs.append(_seq) return _seqs
def find_asset_roots(): """Read asset roots.""" _root = pipe.cur_project().path + '/assets' _roots = [] for _dir in find(_root, depth=3, type_='d'): try: _asset = TTAssetRoot(_dir) except ValueError: continue _roots.append(_asset) return _roots
def process_movs_for_review(dir_=None, filter_=None, verbose=0): """Process review movs. Each set of three input movs in Front/Left/Right/Producer suffix is comped into an output mov using the template nk file. Args: dir_ (str): dir to search from input mov groups filter_ (str): apply file path filter verbose (int): print process data """ # Get dirs to search _today_dir = abs_path(time.strftime( r'P:\projects\frasier_38732V\production\vendor_in\Motion Burner' r'\%Y-%m-%d')) _dirs = set([_today_dir]) if dir_: _dirs.add(abs_path(dir_)) _dirs = sorted(_dirs) print 'SEARCH DIRS:' pprint.pprint(_dirs) _movs = [] for _dir in _dirs: _movs += find(_dir, type_='f', extn='mov', class_=_InputReviewMov, filter_=filter_) # Find review movs _review_movs = {} for _input_mov in sorted(_movs): lprint('ADDING MOV', _input_mov, verbose=verbose) _key = _input_mov.review_mov.path.lower() if _key not in _review_movs: _review_movs[_key] = _input_mov.review_mov _review_movs[_key].input_movs.append(_input_mov) print 'FOUND {:d} REVIEW MOVS'.format(len(_review_movs)) for _path, _review_mov in _review_movs.items(): if _review_mov.exists(): del _review_movs[_path] print '{:d} TO GENERATE'.format(len(_review_movs)) for _idx, _review_mov in enumerate(sorted(_review_movs.values())): print '{:d}/{:d} {}'.format(_idx+1, len(_review_movs), _review_mov) for _input_mov in _review_mov.input_movs: print ' -', _input_mov assert len(_review_mov.input_movs) == 3 _review_mov.generate()
def _read_output_types(self, class_=None): """Read output types in this step root from disk. Args: class_ (class): override output type class Returns: (TTOutputType list): output type list """ from psyhive.tk2.tk_templates.tt_output import TTOutputType _hint = '{}_output_root'.format(self.area) _tmpl = get_template(_hint) _root = _tmpl.apply_fields(self.data) return find(_root, depth=1, class_=class_ or TTOutputType)
def find_vers(self): """Find other versions of this workfile. Returns: (TTWorkFileBase list): versions """ _vers = [] for _file in find(self.dir, extn=self.extn, type_='f', depth=1): try: _work = self.__class__(_file) except ValueError: continue if not _work.task == self.task: continue _vers.append(_work) return _vers
def find_vers(self, catch=False): """Find versions in this output. Args: catch (bool): no error if no versions found Returns: (TTOutputVersionBase list): versions """ _vers = find(self.path, depth=1, type_='d', class_=self.output_version_type) if not _vers: if catch: return None raise OSError("No versions found") return _vers
def find_assets(filter_=None): """Read asset roots. Args: filter_ (str): filter by file path Returns: (TTAsset list): list of assets in this show """ _root = pipe.cur_project().path + '/assets' _roots = [] for _dir in find(_root, depth=3, type_='d', filter_=filter_): try: _asset = TTAsset(_dir) except ValueError: continue _roots.append(_asset) return _roots
def find_projects(filter_=None): """Find projects on disk. Args: filter_ (str): filter projects by name Returns: (Project list): projects """ _projects = [] for _path in find(PROJECTS_ROOT, depth=1, type_='d'): try: _project = Project(_path) except ValueError: continue if not passes_filter(_project.name, filter_): continue _projects.append(_project) return _projects
def find_work(self, class_=None, task=None): """Find work files in this shot area. Args: class_ (class): override work file class task (str): filter works by task Returns: (TTWork list): list of work files """ _class = class_ or TTWork _hint = '{}_{}_work'.format(self.dcc, self.area) _tmpl = get_template(_hint) _test_work = self.map_to( hint=_hint, Task=self.step, extension=get_extn(self.dcc), version=1, class_=TTWork) _works = find(_test_work.dir, depth=1, type_='f', class_=_class) if task: _works = [_work for _work in _works if _work.task == task] return _works
def batch_scale_anim(filter_='', replace=False): """Batch scale face anim fbxs. Fbxs are read from scale folders in: P:/projects/frasier_38732V/production/scaled_fbx Args: filter_ (str): filter fbx list replace (bool): replace existing output files """ # Get latest version of each filename _to_process = {} for _fbx in find(_SCALED_FBX_ROOT, extn='fbx', class_=_FASInputFbx, filter_=filter_, type_='f'): _to_process[_fbx.filename] = _fbx _inputs = sorted(_to_process.values()) print 'FOUND {:d} INPUT FBXS'.format(len(_inputs)) if not replace: _inputs = [ _input for _input in _inputs if not _input.output.exists() or _input.output.cache_read('source') != _input ] print ' - {:d} NEED REPLACING'.format(len(_inputs)) # Generate output fbxs for _input in qt.progress_bar(_inputs, 'Processing {:d} fbx{}'): print _input print _input.anim_scale print _input.output print _input.output.cache_fmt host.open_scene(_input, force=True, lazy=False) scale_face_anim(namespace='', scale=_input.anim_scale) _save_fbx(_input.output, force=True) _input.output.cache_write('source', _input) print
def find_shots(self, filter_=None, verbose=0): """Find shots within this project. Args: filter_ (str): filter shot names verbose (int): print process data Returns: (Shot list): shots """ from psyhive import pipe _shots = [] lprint('SEARCHING', self.seqs_path, verbose=verbose) for _path in find(self.seqs_path, depth=2, type_='d'): try: _shot = pipe.Shot(_path) except ValueError: lprint(' - REJECTED', _path, verbose=verbose) continue if filter_ and not passes_filter(_shot.name, filter_): continue _shots.append(_shot) return _shots
def reset_interface_settings(): """Reset interface settings.""" dprint('RESET SETTINGS', SETTINGS_DIR) for _ini in find(SETTINGS_DIR, depth=1, type_='f', extn='ini'): print ' - REMOVING', _ini os.remove(_ini)