Ejemplo n.º 1
0
def _find_shaders(force=False, parent=None, verbose=0):
    """Search current prject for shader outputs.

    Args:
        force (bool): reread cache from disk
        parent (QWidget): parent widget (for progress bar)
        verbose (int): print process data

    Returns:
        (dict): shader output, shader list
    """
    if force:
        tk2.clear_caches()

    _works = []
    for _asset in qt.progress_bar(tk2.obtain_assets(),
                                  'Reading {:d} asset{}',
                                  parent=parent):
        _shade = _asset.find_step_root('shade', catch=True)
        if not _shade or not _shade.exists():
            continue
        for _work in _shade.find_work():
            _works.append(_work)

    _shd_mbs = {}
    for _work in qt.progress_bar(_works,
                                 'Checking {:d} work file{}',
                                 parent=parent):

        _shd_mb = _work.map_to(tk2.TTOutputFile,
                               extension='mb',
                               output_name='main',
                               output_type='shadegeo',
                               format='shaders')
        _yml = _work.map_to(tk2.TTOutputFile,
                            extension='yml',
                            output_name='main',
                            output_type='shadegeo',
                            format='shaders')
        if not _shd_mb.exists() or not _yml.exists():
            continue

        _shds = read_yaml(_yml.path)
        _shd_mbs[_shd_mb] = _shds

        if verbose:
            print _work
            print _shds
            print

    return _shd_mbs
Ejemplo n.º 2
0
def fix_namespaces():
    """Fix namespaces to follow psyop naming."""
    _used = []
    _to_rename = []
    for _ref in ref.find_refs(unloaded=False):
        if not _find_ref_namespace_issues(_ref):
            continue
        _base = _ref.namespace.split('_')[0]
        _name = _base
        _idx = 1
        while True:
            check_heart()
            if not cmds.namespace(exists=_name) and _name not in _used:
                break
            _name = '{}_{:d}'.format(_base, _idx)
            _idx += 1
        print _ref, _name
        _used.append(_name)
        _to_rename.append((_ref, _name))

    if not _to_rename:
        print 'NOTHING TO FIX'
        return
    qt.ok_cancel('Rename {:d} ref{}?'.format(len(_to_rename),
                                             get_plural(_to_rename)))
    for _ref, _name in qt.progress_bar(_to_rename):
        _ref.rename(_name)
Ejemplo n.º 3
0
 def _callback__Remove(self):
     _to_remove = self.ui.List.selected_data()
     for _rig in qt.progress_bar(
             _to_remove, "Removing {:d} rig{}", col='IndianRed'):
         _rig.remove(force=True)
     cmds.refresh()
     self.close()
Ejemplo n.º 4
0
def _update_abcs(shot='rnd0080', verbose=0):
    """Update abcs to point to the given shot.

    Args:
        shot (str): name of shot to update to (eg. rnd0080)
        verbose (int): print process data
    """
    dprint('CHECKING ABCS')

    _refs_to_remove = set()
    for _exo in qt.progress_bar(hom.CMDS.ls(type='ExocortexAlembicFile'),
                                'Updating {:d} abc{}'):
        lprint('CHECKING EXO', _exo, verbose=verbose)
        _path = _exo.plug('fileName').get_val()
        _status, _to_remove = _update_abc(exo=_exo, shot=shot)
        if _to_remove:
            _refs_to_remove.add(_to_remove)
        print ' - {:60} {:30} {}'.format(_exo, _status, _path)
    print

    if _refs_to_remove:
        dprint('REMOVING {:d} REFS WITH NO {} CACHES'.format(
            len(_refs_to_remove), shot))
        for _ref in _refs_to_remove:
            _ref.remove(force=True)
Ejemplo n.º 5
0
def _update_assets(verbose=0):
    """Update scene assets to latest version.

    Args:
        verbose (int): print process data
    """
    dprint('UPDATING ASSETS')
    for _ref in qt.progress_bar(ref.find_refs(), 'Updating {:d} asset{}'):

        # Find asset
        _asset = tk2.TTOutputFile(_ref.path)
        if not _asset:
            continue

        # Make sure asset is latest
        if not _asset.is_latest():
            lprint(' - CURRENT FILE: {}'.format(_asset.path), verbose=verbose)
            _latest = _asset.find_latest()
            lprint(' - UPDATING TO LATEST: {}'.format(_latest.path),
                   verbose=verbose)
            _ref.swap_to(_latest.path)
            _status = 'updated'
        else:
            _status = 'no update needed'
        print ' - {:25} {:20} {}'.format(_ref.namespace, _status, _ref.path)
    print
Ejemplo n.º 6
0
    def _read_frame_ranges(self, work_files, verbose=0):
        """Read frame range for each work file.

        This reads the frame range from all the selected passes for that
        work files and then takes the overall range from that.

        Args:
            work_files (TTWorkFileBase list): list of work files
            verbose (int): print process data

        Returns:
            (tuple list): list of start/end frames
        """
        _ranges = []
        for _work_file in qt.progress_bar(
                work_files, 'Reading {:d} frame range{}'):
            dprint('READING', _work_file)
            _start, _end = None, None
            for _render in self._work_files[_work_file]:
                lprint(' - TESTING RENDER', _render, verbose=verbose)
                for _seq in _render.find_files(class_=tk2.TTOutputFileSeq):
                    _sstart, _send = _seq.find_range()
                    _start = (_sstart if _start is None
                              else min(_start, _sstart))
                    _end = (_send if _end is None
                            else max(_end, _send))
                    lprint(
                        '   - {:d}-{:d} {}'.format(
                            _sstart, _send, _seq.path),
                        verbose=verbose)
            print ' - RANGE {:d}-{:d} {}'.format(
                _start, _end, _work_file.path)
            _ranges.append((_start, _end))
            lprint(verbose=verbose)
        return _ranges
Ejemplo n.º 7
0
    def _ingest_check_work(self, comment, force=False):
        """Check this file has a corresponding psyop work file.

        Args:
            comment (str): save comment
            force (bool): lose unsaved changes without confirmation
        """
        _work = self.to_psy_work()
        _src = _work.cache_read('vendor_source_file')
        if _work.exists() and _src:
            print 'THIS', self.path
            print 'SRC', _src
            if _src != self:
                raise RuntimeError('Source does not match')
            return

        print ' - INGEST WORK', _work.path

        print '   - COMMENT', comment
        if not force:
            qt.ok_cancel('Copy {} to pipeline?\n\n{}\n\n{}'.format(
                _work.step, self.path, _work.path))

        host.open_scene(self, force=force, lazy=True)

        # Update refs
        for _ref in qt.progress_bar(
                ref.find_refs(), 'Updating {:d} ref{}',
                stack_key='UpdateRefs'):
            self._ingest_check_ref(_ref)

        # Save to disk
        print ' - SAVING WORK', _work.path
        _work.save(comment=comment, safe=False, force=force)
        _work.cache_write(tag='vendor_source_file', data=self.path)
Ejemplo n.º 8
0
def fix_groups():
    """Fix groups to follow psyop scene organisation."""
    _to_fix = []
    for _ref in ref.find_refs(unloaded=False):
        _top_node = _ref.find_top_node(catch=True)
        if not _top_node:
            continue
        _parent = _top_node.get_parent()
        if _parent in _GROUPS:
            continue
        if '/layout/' in _ref.path:
            _grp = 'JUNK'
        elif '/camera/' in _ref.path:
            _grp = 'CAMERA'
        elif '/prop/' in _ref.path:
            _grp = 'PROPS'
        elif '/character/' in _ref.path:
            _grp = 'CHAR'
        else:
            print 'FAILED', _ref.path
            continue
        print _ref, _parent, _grp
        _to_fix.append((_top_node, _grp))

    if not _to_fix:
        print 'NOTHING TO FIX'
        return
    qt.ok_cancel('Group {:d} ref{}?'.format(len(_to_fix), get_plural(_to_fix)))
    for _top_node, _grp in qt.progress_bar(_to_fix):
        _top_node.add_to_grp(_grp)
Ejemplo n.º 9
0
def cache_work_file(work_file,
                    namespaces,
                    confirm=False,
                    new_scene=False,
                    farm=True,
                    parent=None):
    """Recache the given work file.

    The work file is opened, versioned up and the recached.


    Args:
        work_file (TTWorkFileBase): work file to recache
        namespaces (str list): list of assets to recache
        confirm (bool): confirm before execute
        new_scene (bool): new scene after recache
        farm (bool): submit recache to farm
        parent (QDialog): parent interface (for dialog positioning)
    """
    dprint('RECACHING', work_file.path)

    _engine = tank.platform.current_engine()
    _fileops = _engine.apps['psy-multi-fileops']

    # Load the scene
    work_file.load()
    maya.utils.processIdleEvents()
    _fileops.init_app()

    # Update assets
    _updated = []
    for _ns in qt.progress_bar(namespaces,
                               'Updating {:d} asset{}',
                               col='LightSteelBlue',
                               parent=parent):
        _ref = ref.find_ref(_ns, class_=m_pipe.OutputRef)
        if not _ref.is_loaded():
            _ref.load()
        if _ref.update_to_latest():
            _updated.append(_ref.namespace)

    # Version up
    _fileops.init_app()
    maya.utils.processIdleEvents()
    _engine = tank.platform.current_engine()
    _fileops = _engine.apps['psy-multi-fileops']
    _fileops.version_up_workfile()
    maya.utils.processIdleEvents()
    _cur_work = tk2.cur_work(class_=BCWork)
    _cur_work.set_comment('Versioned up by batch cache tool')
    _cur_work.read_dependencies(new_scene=False)

    _exec_cache(namespaces=namespaces,
                new_scene=new_scene,
                confirm=confirm,
                farm=farm)
    cmds.file(new=True, force=True)
Ejemplo n.º 10
0
def update_all(parent):
    """Update all yeti nodes to use latest cache.

    Args:
        parent (QDialog): parent dialog
    """
    print 'UPDATE ALL YETIS'

    # Check yetis to update
    _to_update = []
    for _yeti in hom.find_nodes(type_='pgYetiMaya'):

        print _yeti
        _file = _yeti.plug('cacheFileName').get_val()
        if not _file:
            print ' - NO FILE TO UPDATE'
            continue
        print ' - CUR', _file

        try:
            _out = tk2.TTOutputFileSeq(_file)
        except ValueError:
            print ' - OFF PIPELINE'
            continue
        _latest = _out.find_latest()
        if not _latest:
            if not _out.exists():
                print ' - CUR CACHE MISSING', _out.path
            else:
                print ' - NO CACHES FOUND'
            continue

        print ' - LATEST', _latest.path
        if _file != _latest:
            print ' - NEEDS UPDATE'
            _to_update.append((_yeti, _latest))
        else:
            print ' - NO UPDATE NEEDED'

    # Confirm
    print '{:d} CACHE{} NEED UPDATE'.format(len(_to_update),
                                            get_plural(_to_update).upper())
    if not _to_update:
        qt.notify('All caches are up to date',
                  title='Update caches',
                  parent=parent)
        return
    qt.ok_cancel('Update {:d} cache{}?'.format(len(_to_update),
                                               get_plural(_to_update)),
                 title='Update caches',
                 parent=parent)

    # Update
    for _yeti, _latest in qt.progress_bar(_to_update, 'Updating {:d} cache{}'):
        print _yeti, _latest
        apply_cache(yeti=_yeti, cache=_latest)
Ejemplo n.º 11
0
def blast_and_cache(force_overwrite=False,
                    attach_cache=True,
                    view_blast=True,
                    resolution='Use render globals'):
    """Execute playblast and cache nCloth nodes.

    This allows blasting and caching to happen with a single pass of
    the timeline. Any nCloth nodes that are not enabled are ignored.

    Args:
        force_overwrite (bool): overwrite any existing blasts/caches
            with no confirmation dialog
        attach_cache (bool): attach the caches on completion
        view_blast (bool): view playblast on completion
        resolution (str): blast resolution
    """

    # Get blast resolution
    _res = (None if 'x' not in resolution else
            [int(_val) for _val in resolution.split('x')])

    # Get nCloth nodes
    _n_cloths = [
        _NCloth(str(_n_cloth)) for _n_cloth in hom.CMDS.ls(type='nCloth')
        if _n_cloth.plug('isDynamic').get_val()
    ]
    print 'NCLOTH NODES:', _n_cloths

    # Delete existing
    for _n_cloth in _n_cloths:
        _n_cloth.delete_cache(force=force_overwrite)
    _seq = _get_blast_seq()
    _seq.delete(force=force_overwrite, wording='replace')

    # Execute cache/blast
    _frames = host.t_frames()
    for _idx, _frame in qt.progress_bar(enumerate(_frames),
                                        'Blasting/caching {:d} frame{}',
                                        col='PowderBlue'):
        _action = _Action.REPLACE
        for _n_cloth in _n_cloths:
            cmds.select(_n_cloth)
            _do_create_cache(start=_frame, end=_frame, action=_action)
            assert _n_cloth.get_cache_xml().exists()
        _blast(start=_frame, end=_frame, res=_res)

    if view_blast:
        _seq.view()
    if attach_cache:
        _update_xml_start_frames(n_cloths=_n_cloths, force=True)
        _attach_caches(n_cloths=_n_cloths)
Ejemplo n.º 12
0
    def copy_to(self, seq, parent=None):
        """Copy this sequence to a new location.

        Args:
            seq (Seq): target location
            parent (QDialog): parent dialog for progress bar
        """
        from psyhive import qt
        seq.delete(wording='Replace')
        seq.test_dir()
        for _frame in qt.progress_bar(self.get_frames(),
                                      'Copying {:d} frame{}',
                                      parent=parent):
            shutil.copy(self[_frame], seq[_frame])
Ejemplo n.º 13
0
def _remove_existing_data(overwrites):
    """Remove existing ingestion data on items to be replaced.

    Args:
        overwrites (tuple list): list of ma/work files
    """
    print 'OVERWRITES:'
    for _ma, _work in overwrites:
        print _work.path
        print ' - CUR', _work.get_vendor_file()
        print ' - NEW', _ma.path
        print

    qt.ok_cancel("Overwrite {:d} work files?".format(len(overwrites)))
    for _, _work in qt.progress_bar(overwrites, "Cleaning {:d} work{}"):
        _work.delete_all_data(force=True)
    print
Ejemplo n.º 14
0
def _read_action_works(force=False, progress=True):
    """Read action work files from disk.

    Args:
        force (bool): force reread from disk
        progress (bool): show progress bar on read

    Returns:
        (FrasierWork list): list of all frasier work files
    """
    _works = []
    for _asset in qt.progress_bar(ASSETS.values(),
                                  'Checking {:d} asset{}',
                                  show=progress):
        _anim = _asset.find_step_root('animation', catch=True)
        if not _anim:
            continue
        for _work in _anim.find_work(class_=FrasierWork, dcc='maya'):
            _works.append(_work)
    return _works
Ejemplo n.º 15
0
def batch_scale_anim(filter_='', replace=False):
    """Batch scale face anim fbxs.

    Fbxs are read from scale folders in:

        P:/projects/frasier_38732V/production/scaled_fbx

    Args:
        filter_ (str): filter fbx list
        replace (bool): replace existing output files
    """

    # Get latest version of each filename
    _to_process = {}
    for _fbx in find(_SCALED_FBX_ROOT,
                     extn='fbx',
                     class_=_FASInputFbx,
                     filter_=filter_,
                     type_='f'):
        _to_process[_fbx.filename] = _fbx
    _inputs = sorted(_to_process.values())
    print 'FOUND {:d} INPUT FBXS'.format(len(_inputs))
    if not replace:
        _inputs = [
            _input for _input in _inputs if not _input.output.exists()
            or _input.output.cache_read('source') != _input
        ]
        print ' - {:d} NEED REPLACING'.format(len(_inputs))

    # Generate output fbxs
    for _input in qt.progress_bar(_inputs, 'Processing {:d} fbx{}'):
        print _input
        print _input.anim_scale
        print _input.output
        print _input.output.cache_fmt
        host.open_scene(_input, force=True, lazy=False)
        scale_face_anim(namespace='', scale=_input.anim_scale)
        _save_fbx(_input.output, force=True)
        _input.output.cache_write('source', _input)
        print
Ejemplo n.º 16
0
def _build_shader_overrides(shade, merge, verbose=0):
    """Build shader overrides.

    Each shader has an aiSetParameter node which applies overrides
    for the geometry in the abc which that shader is applied to.

    Args:
        shade (FileRef): shade reference
        merge (HFnDependencyNode): merge node to connect output to
        verbose (int): print process data
    """
    _shds = collections.defaultdict(list)

    # Read shader assignments
    for _mesh in shade.find_meshes():
        if _mesh.clean_name == 'color_switch_Geo':
            continue
        _shd = tex.read_shd(_mesh, allow_base=True)
        if not _shd:
            continue
        _shds[_shd].append(_mesh.shp)

    # Set up AIP node for each shader
    for _shd in qt.progress_bar(sorted(_shds), 'Applying {:d} shader{}'):

        _meshes = _shds[_shd]
        lprint(' - SHD', _shd, _meshes, verbose=verbose)

        # Read SE + arnold shader
        lprint('   - SE', _shd.get_se(), verbose=verbose)
        _ai_shd = get_single(
            _shd.get_se().plug('aiSurfaceShader').list_connections(),
            catch=True)
        if _ai_shd:
            _ai_shd = hom.HFnDependencyNode(_ai_shd)
        lprint('   - AI SHD', _ai_shd, verbose=verbose)
        _shd_node = _ai_shd or _shd.shd

        _build_aip_node(shd=_shd_node, meshes=_meshes, merge=merge)
Ejemplo n.º 17
0
def cache_work_files(data, farm=True, parent=None):
    """Recache the given list of work files.

    Args:
        data (list): work files and namespaces to recache
        farm (bool): submit recaches to farm
        parent (QDialog): parent interface (for dialog positioning)
    """
    _pos = parent.get_c() if parent else None
    qt.ok_cancel('Cache {:d} work file{}?'.format(len(data), get_plural(data)),
                 pos=_pos,
                 parent=parent,
                 title='Confirm cache')

    for _work_file, _namespaces in qt.progress_bar(data,
                                                   "Caching {:d} work file{}",
                                                   col="DeepSkyBlue",
                                                   pos=_pos,
                                                   parent=parent):
        print 'CACHE', _work_file.path
        print _namespaces
        print
        cache_work_file(work_file=_work_file,
                        namespaces=sorted(_namespaces),
                        farm=farm,
                        parent=parent)

    # Completed notification
    if farm:
        _msg = 'Submitted {:d} work file{} to farm'
    else:
        _msg = 'Cached {:d} work file{} locally'
    qt.notify(_msg.format(len(data), get_plural(data)),
              pos=_pos,
              title='Complete',
              parent=parent)
Ejemplo n.º 18
0
def ingest_seqs(dir_, vendor, filter_=None, force=False,
                resubmit_transgens=False):
    """Ingest images sequences from the given directory.

    Args:
        dir_ (str): directory to search
        vendor (str): name of vendor
        filter_ (str): apply path filter
        force (bool): ingest without confirmation
        resubmit_transgens (bool): resubmit any submitted transgens
    """
    _dir = Dir(abs_path(dir_))
    print 'READING', _dir.path
    assert _dir.exists()
    _seqs = _dir.find_seqs(filter_=filter_)
    print ' - FOUND {:d} SEQS'.format(len(_seqs))

    # Set vendor
    _vendor = vendor or vendor_from_path(_dir.path)
    assert _vendor
    print ' - VENDOR', _vendor
    print

    # Check images
    _statuses = {}
    _to_ingest = []
    for _idx, _seq in qt.progress_bar(
            enumerate(_seqs), 'Checking {:d} seq{}'):

        print '[{:d}/{:d}] PATH {}'.format(_idx+1, len(_seqs), _seq.path)

        # Check ingestion status
        _status = _ingestable = None
        try:
            _seq = VendorSeq(_seq)
        except ValueError:
            _status, _ingestable = 'Fails naming convention', _seq.basename
        else:
            assert isinstance(_seq, VendorSeq)
            _status, _ingestable = _seq.get_ingest_status(
                resubmit_transgens=resubmit_transgens)
        print ' - STATUS', _status

        assert _status
        assert _ingestable is not None
        if _ingestable:
            _to_ingest.append(_seq)
        _statuses[_seq] = _status

    # Print summary
    print '\nSUMMARY:'
    print '\n'.join([
        '    {} - {:d}'.format(_status, _statuses.values().count(_status))
        for _status in sorted(set(_statuses.values()))])
    print 'FOUND {:d} SEQ{} TO INGEST'.format(
        len(_to_ingest), get_plural(_to_ingest).upper())

    # Show different source warning
    _diff_src = [
        _ for _, _status in _statuses.items()
        if _status == 'Already ingested from a different source']
    if _diff_src:
        qt.notify_warning(
            '{:d} of the sequences could not be ingested because they have '
            'already been ingested from a different delivery. This happens '
            'when a vendor provides an update without versioning up.\n\n'
            'See the terminal for details.'.format(len(_diff_src)))

    # Execute ingestion
    if not _to_ingest:
        return
    if not force:
        qt.ok_cancel(
            'Ingest {:d} seq{}?'.format(
                len(_to_ingest), get_plural(_to_ingest)),
            verbose=0)
    for _idx, _seq in qt.progress_bar(
            enumerate(_to_ingest), 'Ingesting {:d} seq{}',
            stack_key='IngestSeqs'):
        print '({:d}/{:d}) [INGESTING] {}'.format(
            _idx+1, len(_to_ingest), _seq.path)
        _seq.ingest(vendor=vendor)
Ejemplo n.º 19
0
def batch_submit_shots(step='previz', submitter='/out/submitter1'):
    """Batch submit shots selected from a list.

    Args:
        step (str): step to search for abcs
        submitter (str): path to submitter rop
    """
    _shots = [_shot.name for _shot in tk2.find_shots()]
    _shots = qt.multi_select(_shots,
                             title='Select shots',
                             msg='Select shots to submit')

    # Check shots
    _missing_cam = []
    _missing_rng = []
    for _shot in qt.progress_bar(copy.copy(_shots), 'Checking {:d} shot{}'):
        _shot = tk2.find_shot(_shot)
        print 'CHECKING', _shot

        # Check cam
        _step = _shot.find_step_root(step, catch=True)
        if not _step:
            _missing_cam.append(_shot.name)
            _shots.remove(_shot.name)
            continue
        _cam_abc = _step.find_output_file(output_type='camcache',
                                          extn='abc',
                                          verbose=1,
                                          version='latest',
                                          catch=True)
        if not _cam_abc:
            _missing_cam.append(_shot.name)
            _shots.remove(_shot.name)
            continue
        print ' - CAM', _cam_abc.path

        # Check frame range
        _rng = _shot.get_frame_range()
        print ' - RANGE', _rng
        if not _rng or None in _rng:
            _missing_rng.append(_shot.name)
            _shots.remove(_shot.name)
            continue

    # Show warning
    _msg = ''
    if _missing_cam:
        _msg += 'Shots with no {} camera:\n\n    {}\n\n'.format(
            step, '\n    '.join(_missing_cam))
    if _missing_rng:
        _msg += 'Shots with no range in shotgun:\n\n    {}\n\n'.format(
            '\n    '.join(_missing_rng))
    if _msg:
        _msg += 'These shots will be ignored.'
        qt.ok_cancel(_msg, title='Warning')

    # Submit shots
    for _shot in qt.progress_bar(_shots, 'Submitting {:d} shot{}'):
        print 'BUILD SCENE', _shot
        build_scene(shot=_shot, step=step, submitter=submitter)

    print 'SUBMITTED {:d} SHOT{}'.format(len(_shots),
                                         get_plural(_shots).upper())
Ejemplo n.º 20
0
def drive_shade_geo_from_rig(cache_set, progress=False, verbose=0):
    """Use a rig to drive tmp geo duplicated from its shade asset.

    The shade asset is referenced into the scene, all meshes with
    corresponding meshes in the rig are duplicated and then attached
    to the rig geo using a blendshape. The shade asset is then removed.

    Args:
        cache_set (pm.ObjectSet): cache set from rig being cached
        progress (bool): show progress on bind
        verbose (int): print process data

    Returns:
        (HFnMesh list): list of driven shade geo
    """
    print 'DRIVE SHADE GEO FROM RIG'

    # Get anim rig
    _cache_set = cache_set or nt.ObjectSet(u'archer_rig2:bakeSet')
    print ' - CACHE SET', _cache_set
    if not _cache_set.referenceFile():
        print ' - NO CORRESPONDING RIG'
        raise RuntimeError("No rig found for {}".format(_cache_set))
    _rig = ref.find_ref(_cache_set.referenceFile().namespace)
    print ' - RIG', _rig
    print ' - RIG PATH', _rig.path

    # Find/import tmp shade asset
    _shade_file = get_shade_mb_for_rig(_rig)
    _shade = ref.create_ref(_shade_file.path,
                            namespace='psyhive_tmp',
                            force=True)

    # Duplicate geo and bind to rig
    _bake_geo = []
    _tmp_ns = ':tmp_{}'.format(_rig.namespace)
    set_namespace(_tmp_ns, clean=True)
    for _shade_mesh in qt.progress_bar(_shade.find_nodes(type_='mesh'),
                                       'Binding {:d} geo{}',
                                       col='Tomato',
                                       show=progress):

        # Check there is equivalent mesh in rig
        if _shade_mesh.plug('intermediateObject').get_val():
            continue
        _shade_tfm = hom.HFnTransform(get_parent(_shade_mesh))
        try:
            _rig_tfm = _rig.get_node(_shade_tfm, class_=hom.HFnTransform)
        except ValueError:
            continue

        lprint(' - BINDING MESH', _shade_tfm, '->', _rig_tfm, verbose=verbose)

        # Duplicate mesh
        _dup = _shade_tfm.duplicate()
        lprint('   - DUPLICATING', _shade_tfm, verbose=verbose)
        _dup.parent(world=True)
        _clean_unused_uv_sets(_dup)
        _connect_visibility(_rig_tfm, _dup)
        _bake_geo.append(_dup)

        # Bind to rig
        _blend = hom.CMDS.blendShape(_rig_tfm, _dup)
        _blend.plug('origin').set_enum('world')
        _blend.plug('weight[0]').set_val(1.0)

    _shade.remove(force=True)
    cmds.namespace(set=":")

    if not _bake_geo:
        del_namespace(_tmp_ns)
        raise RuntimeError('No geo was attached - this means none of the '
                           'shade geo matched the rig bakeSet geo.')

    return _bake_geo, _bake_geo
Ejemplo n.º 21
0
def ingest_vendor_anim(dir_,
                       vendor=None,
                       force=False,
                       filter_=None,
                       cache_on_farm=True,
                       ignore_extn=False,
                       ignore_dlayers=False,
                       ignore_rlayers=False,
                       ignore_multi_top_nodes=False):
    """Ingest vendor animation files.

    Args:
        dir_ (str): vendor in folder
        vendor (str): vendor name
        force (bool): lose current scene changes without confirmation
        filter_ (str): filter file list
        cache_on_farm (bool): submit caches to qube
        ignore_extn (bool): ignore file extension issues
        ignore_dlayers (bool): ignore display layer issues
        ignore_rlayers (bool): ignore render layer issues
        ignore_multi_top_nodes (bool): ignore multiple top node issues
    """

    # Set vendor
    _vendor = vendor or ingest.vendor_from_path(dir_)
    assert _vendor
    print ' - VENDOR', _vendor
    print

    # Read ingestible scenes
    _to_ingest, _statuses = _get_ingestable_scenes(dir_=dir_, filter_=filter_)
    if not _to_ingest:
        return
    if not force:
        qt.ok_cancel('Ingest {:d} scene{}?'.format(len(_to_ingest),
                                                   get_plural(_to_ingest)),
                     verbose=0,
                     icon=ingest.ICON,
                     title='Confirm ingestion')
        print 'HANDLE UNSAVED CHANGES'
        host.handle_unsaved_changes()
        print 'HANDLED UNSAVED CHANGES'

    # Ingest scenes
    _issues = []
    _ingest_kwargs = dict(ignore_extn=ignore_extn,
                          ignore_dlayers=ignore_dlayers,
                          ignore_multi_top_nodes=ignore_multi_top_nodes,
                          ignore_rlayers=ignore_rlayers)
    for _idx, _scene in qt.progress_bar(enumerate(_to_ingest),
                                        'Ingesting {:d} scene{}',
                                        col=PYGUI_COL):

        print '[{:d}/{:d}] PATH {}'.format(_idx + 1, len(_to_ingest),
                                           _scene.path)

        _scene.check_workspace(force=True)

        # Check ingestion status
        assert isinstance(_scene, VendorScene)
        _scene_isses = _scene.get_ingest_issues(**_ingest_kwargs)
        if _scene_isses:
            _issues.append((_scene, _scene_isses))
        print ' - CAM', _scene.scene_get_cam()

        _scene.ingest(vendor=vendor, force=True, cache_on_farm=cache_on_farm)
        _status, _ = _scene.get_ingest_status()
        _statuses[_scene] = _status

    if _issues:
        print '\n\n[INGESTION ISSUES]\n'
        for _scene, _scene_issues in _issues:
            print 'SCENE', _scene.path
            for _issue in _scene_issues:
                print ' -', _issue

    # Print summary
    print '\n\n[SUMMARY]'
    print '\n'.join([
        '    {} - {:d}'.format(_status,
                               _statuses.values().count(_status))
        for _status in sorted(set(_statuses.values()))
    ])
    print '\nFOUND {:d} SCENE{} TO INGEST'.format(
        len(_to_ingest),
        get_plural(_to_ingest).upper())
Ejemplo n.º 22
0
def search_files_for_text(files,
                          text=None,
                          filter_=None,
                          win=False,
                          edit=False,
                          verbose=0):
    """Search the contents of the given files for text.

    Args:
        files (str list): list of files to check
        text (str): text to match in each line
        filter_ (str): apply filter to each line
        win (bool): display paths in windows format
        edit (bool): open the first found instance in an editor and exit
        verbose (int): print process data

    Returns:
        (bool): whether search completed successfully - ie. if an instance
            was found and the code was edited then false is returned
    """
    from psyhive import qt

    _found_instance = False
    for _file in qt.progress_bar(files,
                                 'Searching {:d} file{}',
                                 col='Aquamarine',
                                 show=not edit):

        dprint('CHECKING FILE', _file, verbose=verbose)

        _printed_path = False
        for _idx, _line in enumerate(read_file(_file).split('\n')):

            try:
                _text_in_line = text and text in _line
            except UnicodeDecodeError:
                continue

            try:
                _filter_in_line = filter_ and passes_filter(
                    _line, filter_, case_sensitive=True)
            except UnicodeDecodeError:
                continue

            # Check if this line should be printed
            _print_line = False
            if _text_in_line:
                lprint(' - MATCHED TEXT IN LINE', text, verbose=verbose)
                _print_line = True
            elif _filter_in_line:
                lprint(' - MATCHED FILTER IN LINE', filter_, verbose=verbose)
                _print_line = True

            if _print_line:
                if not _printed_path:
                    lprint(abs_path(_file, win=win))
                lprint('{:>6} {}'.format('[{:d}]'.format(_idx + 1),
                                         _line.rstrip()))
                _printed_path = True
                _found_instance = True
                if edit:
                    File(_file).edit(line_n=_idx + 1)
                    return False

        if _printed_path:
            lprint()

    if not _found_instance:
        dprint('No instances found')

    return True
Ejemplo n.º 23
0
def ingest_ma_files_to_pipeline(
        src_dir=('P:/projects/frasier_38732V/production/vendor_in/'
                 'Motion Burner/Delivery_2020-02-12'),
        ma_filter='',
        work_filter='',
        replace=False,
        blast_=False,
        legs_to_ik=False,
        reverse=False,
        limit=0,
        verbose=0):
    """Copy ma file from vendors_in to psyop pipeline.

    This creates a work file for each ma file and  also generates face/body
    blasts.

    Args:
        src_dir (str): vendor in directory to search for ma files
        ma_filter (str): apply filter to ma file path
        work_filter (str): apply filter to work file path
        replace (bool): overwrite existing files
        blast_ (bool): execute blasts
        legs_to_ik (bool): execute legs ik switch (slow)
        reverse (bool): reverse the list (for parallel processing)
        limit (int): limit the number of files to be processed
        verbose (int): print process data
    """
    _src_dir = abs_path(src_dir)
    print 'SRC DIR', _src_dir
    assert os.path.exists(_src_dir)
    _mas = _find_ma_files_to_check(_src_dir, ma_filter, work_filter, limit)

    # Check which mas need processing
    _to_process = []
    _overwrites = []
    _replacing = []
    for _idx, _ma in qt.progress_bar(enumerate(_mas),
                                     'Checking {:d} ma files'):
        lprint('PROCESSING MA {:d}/{:d} {}\n - MA {}'.format(
            _idx + 1, len(_mas), _ma.filename, _ma.path),
               verbose=verbose)

        # Check for overwrite
        _work = _ma.get_work()
        lprint(' - WORK', _work.path, verbose=verbose)
        if _work.exists():
            _vendor_file = _work.get_vendor_file()
            if replace or _ma.path != _work.get_vendor_file():
                _overwrites.append((_ma, _work))
                if len(_work.find_vers()) > 1:
                    _replacing.append((_ma, _work))
            elif (_work.blast_comp.exists()
                  and _work.get_export_fbx().exists()
                  and _work.get_export_fbx(dated=True).exists()):
                print ' - COMP BLAST', _work.blast_comp.path
                print ' - FBX', _work.get_export_fbx().path
                print ' - NO PROCESSING NEEDED'
                print
                continue

        _to_process.append([_ma, _work])

    print
    print 'FOUND {:d} FILES TO PROCESS'.format(len(_to_process))
    print

    # Remove any data to be replaced
    if _replacing:
        _text = 'Replacing {:d} files:\n\n'.format(len(_replacing))
        for _ma, _work in _replacing:
            _text += '\n - MA {}\n - WORK {}\n\n'.format(_ma.path, _work.path)
        qt.ok_cancel(_text)
    if _overwrites:
        _remove_existing_data(_overwrites)

    # Execute the ingestion
    if not _to_process:
        return
    qt.ok_cancel('Ingest {:d} files?'.format(len(_to_process)), icon=ICON)
    if reverse:
        _to_process = reversed(_to_process)
    for _ma, _work in qt.progress_bar(_to_process,
                                      'Ingesting {:d} ma{}',
                                      col='LightSkyBlue'):
        _ingest_vendor_ma(ma_=_ma,
                          work=_work,
                          blast_=blast_,
                          legs_to_ik=legs_to_ik)
Ejemplo n.º 24
0
def _get_ingestable_scenes(dir_, filter_):
    """Find scenes ready for ingestion.

    Args:
        dir_ (str): directory to search for scenes
        filter_ (str): filter_ file list

    Returns:
        (VendorScene list, dict): list of ingestible scenes, scene statuses
    """

    # Find scenes
    _dir = Dir(abs_path(dir_))
    print 'READING', _dir.path
    assert _dir.exists()
    assert _dir.is_dir()
    _scenes = [
        _file for _file in _dir.find(type_='f', class_=File, filter_=filter_)
        if _file.extn in ('ma', 'mb')
    ]
    print ' - FOUND {:d} SCENES'.format(len(_scenes))

    # Check scenes
    _statuses = {}
    _to_ingest = []
    for _idx, _scene in qt.progress_bar(enumerate(_scenes),
                                        'Checking {:d} scene{}',
                                        col=PYGUI_COL):

        print '[{:d}/{:d}] PATH {}'.format(_idx + 1, len(_scenes), _scene.path)

        # Check ingestion status
        _status = _ingestable = None
        try:
            _scene = VendorScene(_scene)
        except ValueError:
            print ' - FAILS NAMING CONVENTION'
            _status, _ingestable = 'Fails naming convention', False
        else:
            _status, _ingestable = _scene.get_ingest_status()
        print ' - STATUS', _status

        assert _status
        assert _ingestable is not None

        if _ingestable:
            assert isinstance(_scene, VendorScene)
            _to_ingest.append(_scene)
        _statuses[_scene] = _status

    # Print list of shots already ingested
    _already_ingested = [
        _scene for _scene, _status in _statuses.items()
        if _status == 'Already ingested'
    ]
    if _already_ingested:
        print '\n[ALREADY INGESTED] {}\n'.format(', '.join(
            sorted(
                set([
                    _scene.to_psy_work().get_shot().name
                    for _scene in _already_ingested
                ]))))

    # Print summary
    print '\n[SUMMARY]'
    print '\n'.join([
        '    {} - {:d}'.format(_status,
                               _statuses.values().count(_status))
        for _status in sorted(set(_statuses.values()))
    ])
    print '\nFOUND {:d} SCENE{} TO INGEST'.format(
        len(_to_ingest),
        get_plural(_to_ingest).upper())

    return _to_ingest, _statuses
Ejemplo n.º 25
0
def _generate_blast_comp_mov(work,
                             ref_imgs=True,
                             comp_imgs=True,
                             margin=20,
                             thumb_aspect=0.75):
    """Generate blast comp mov file for the given work.

    Args:
        work (FrasierWork): work file to comp images for
        ref_imgs (bool): generate ref jpgs (disable for debugging)
        comp_imgs (bool): generate comp jpgs (disable for debugging)
        margin (int): face ref/blast overlay margin in pixels
        thumb_aspect (float): aspect ration of face ref/blast overlay
    """
    print 'WORK', work.path

    assert work.blast.exists()
    assert work.face_blast.exists()
    assert not work.blast_comp.exists()

    _start, _end = work.blast.find_range()
    _dur_secs = 1.0 * (_end - _start + 1) / 30

    print ' - BLAST COMP', work.blast_comp.path
    print ' - RANGE {:d}-{:d} ({:.02f}s)'.format(_start, _end, _dur_secs)

    # Generate tmp ref jpgs
    if ref_imgs and work.get_ref_mov():
        _mov, _start = work.get_ref_data()
        _ref_tmp_jpgs = _get_ref_jpgs(mov=_mov, start=_start, secs=_dur_secs)
    else:
        _ref_tmp_jpgs = Seq(
            abs_path('{}/ref_tmp/images.%04d.jpg'.format(
                tempfile.gettempdir())))
        _ref_tmp_jpgs.delete(force=True)
    print ' - REF JPG', _ref_tmp_jpgs

    # Build comp jpgs
    _comp_tmp_jpgs = Seq(
        abs_path('{}/comp_tmp/images.%04d.jpg'.format(tempfile.gettempdir())))
    if comp_imgs:

        _comp_tmp_jpgs.test_dir()
        _comp_tmp_jpgs.delete(force=True)

        for _idx, _src_frame in qt.progress_bar(enumerate(
                work.blast.get_frames()),
                                                'Comping {:d} images',
                                                stack_key='FrasierBlastComp',
                                                col='GreenYellow'):

            _out = qt.HPixmap(work.blast[_src_frame])
            _face = qt.HPixmap(work.face_blast[_src_frame])
            _thumb_w = (1.0 * _out.width() / 3 - margin * 3) / 2
            _thumb_size = qt.get_size(_thumb_w, _thumb_w / thumb_aspect)

            # Add ref overlay
            if _ref_tmp_jpgs:
                _ref = qt.HPixmap(_ref_tmp_jpgs[_idx + 1])
                _ref = _ref.resize(_thumb_size)
                _out.add_overlay(_ref,
                                 pos=(_out.width() * 2 / 3 + margin, margin))

            # Add face blast overlay
            _face_size = qt.get_size(_face.height() * thumb_aspect,
                                     _face.height())
            _face = _face.copy(_face.width() / 2 - _face_size.width() / 2, 0,
                               _face_size.width(), _face_size.height())
            _face = _face.resize(_thumb_size)
            _out.add_overlay(_face,
                             pos=(_out.width() - margin, margin),
                             anchor="TR")

            _out.save(_comp_tmp_jpgs[_idx + 1])
        print ' - WROTE TMP IMAGES', _comp_tmp_jpgs.path
    print ' - COMP IMAGES {} {}'.format(_comp_tmp_jpgs.find_range(force=True),
                                        _comp_tmp_jpgs.path)

    # Compile out mov
    work.blast_comp.test_dir()
    _args = [
        '-r', '30', '-f', 'image2', '-i', _comp_tmp_jpgs.path, '-vcodec',
        'libx264', '-crf', '25', '-pix_fmt', 'yuv420p', work.blast_comp.path
    ]
    print 'launch ffmpeg --', ' '.join(_args)
    psylaunch.launch_app('ffmpeg', args=_args, wait=True)
    assert work.blast_comp.exists()
    print ' - WROTE MOV', work.blast_comp.path
Ejemplo n.º 26
0
def drive_shade_geo_from_rig(cache_set, progress=False, verbose=0):
    """Use a rig to drive tmp geo duplicated from its shade asset.

    The shade asset is referenced into the scene, all meshes with
    corresponding meshes in the rig are duplicated and then attached
    to the rig geo using a blendshape. The shade asset is then removed.

    Args:
        cache_set (pm.ObjectSet): cache set from rig being cached
        progress (bool): show progress on bind
        verbose (int): print process data

    Returns:
        (HFnMesh list): list of driven shade geo
    """
    from psyhive import tk2

    # Get anim rig
    _cache_set = cache_set or nt.ObjectSet(u'archer_rig2:bakeSet')
    print 'CACHE SET', _cache_set
    _rig = ref.find_ref(_cache_set.namespace().strip(':'))
    print 'RIG', _rig
    print 'RIG PATH', _rig.path

    # Find/import tmp shade asset
    _rig_out = tk2.TTOutputName(_rig.path)
    print 'RIG OUT', _rig_out.path
    _shade_out = _rig_out.map_to(Step='shade',
                                 output_type='shadegeo',
                                 Task='shade').find_latest()
    print 'SHADE OUT', _shade_out.path
    if not _shade_out.exists():
        raise RuntimeError("Missing shade file " + _shade_out.path)
    _shade_file = _shade_out.find_file(extn='mb', format_='maya')
    print 'SHADE FILE', _shade_file
    _shade = ref.create_ref(_shade_file.path,
                            namespace='psyhive_tmp',
                            force=True)

    # Duplicate geo and bind to rig
    _bake_geo = []
    _cleanup = []
    set_namespace(':tmp_{}'.format(_rig.namespace), clean=True)
    for _shade_mesh in qt.progress_bar(_shade.find_nodes(type_='mesh'),
                                       'Binding {:d} geo{}',
                                       col='Tomato',
                                       show=progress):

        # Check there is equivalent mesh in rig
        if _shade_mesh.plug('intermediateObject').get_val():
            continue
        _shade_tfm = hom.HFnTransform(get_parent(_shade_mesh))
        try:
            _rig_tfm = _rig.get_node(_shade_tfm, class_=hom.HFnTransform)
        except ValueError:
            continue

        lprint(' - BINDING MESH', _shade_tfm, '->', _rig_tfm, verbose=verbose)

        # Duplicate mesh
        _dup = _shade_tfm.duplicate()
        lprint('   - DUPLICATING', _shade_tfm, verbose=verbose)
        _dup.parent(world=True)
        _clean_unused_uv_sets(_dup)
        _connect_visibility(_rig_tfm, _dup)
        _bake_geo.append(_dup)
        _cleanup.append(_dup)

        # Bind to rig
        _blend = hom.CMDS.blendShape(_rig_tfm, _dup)
        _blend.plug('origin').set_enum('world')
        _blend.plug('weight[0]').set_val(1.0)

    _shade.remove(force=True)

    return _bake_geo, _cleanup