Beispiel #1
0
def apply_cache(cache, yeti=None, ref_=None):
    """Apply a yeti cache.

    Args:
        cache (Seq): cache to apply
        yeti (HFnDependencyNode): for node to apply to
        ref_ (FileRef): reference to apply cache to
    """
    print 'APPLYING CACHE', cache

    # Get yeti node, creating if needed
    if yeti:
        _yeti = yeti
    else:
        assert cache.output_name.count('Yeti_') == 1
        _cache_ns, _tfm_name = cache.output_name.split('Yeti_')
        _yeti_name = _tfm_name + 'Shape'
        _ref = ref_ or ref.find_ref(_cache_ns)
        print ' - REF', _ref
        _yeti = _ref.get_node(_yeti_name, catch=True)
        if not _yeti:
            _top_node = _ref.find_top_node()
            set_namespace(':' + _ref.namespace)
            _yeti = hom.CMDS.createNode('pgYetiMaya', name=_yeti_name)
            if not _yeti.get_parent() == _ref.get_node(_tfm_name):
                _yeti.get_parent().rename(_tfm_name)
            set_namespace(':')
            cmds.parent(_yeti.get_parent(), _top_node)
    print ' - YETI', _yeti

    # Apply cache
    _yeti.plug('cacheFileName').set_val(cache.path)
    _yeti.plug('fileMode').set_val(1)
    _yeti.plug('overrideCacheWithInputs').set_val(False)
Beispiel #2
0
def del_namespace(namespace, force=True):
    """Delete the given namespace.

    Args:
        namespace (str): namespace to delete
        force (bool): delete nodes without confirmation
    """
    from maya_psyhive import ref

    if not cmds.namespace(exists=namespace):
        return

    _force = force
    _ref = ref.find_ref(namespace=namespace.lstrip(':'), catch=True)
    if _ref:
        _ref.remove(force=_force)
        _force = True

    if not _force:
        qt.ok_cancel(
            'Are you sure you want to delete the namespace {}?'.format(
                namespace))
    set_namespace(namespace, clean=True)
    set_namespace(":")
    cmds.namespace(removeNamespace=namespace, deleteNamespaceContent=True)
Beispiel #3
0
def _update_abc(exo, shot, verbose=0):
    """Update abc to point to the given shot.

    Args:
        exo (HFnDependencyNode): exocortex abc node
        shot (str): name of shot to update to (eg. rnd0080)
        verbose (int): print process data

    Returns:
        (tuple): update status, ref to remove (if any)
    """
    _ref = (ref.find_ref(namespace=exo.namespace, catch=True)
            if exo.namespace else None)
    lprint(' - REF', _ref, verbose=verbose)
    _tmpl_abc = exo.plug('fileName').get_val()
    _tmpl_output = tk2.TTOutputFile(_tmpl_abc)
    lprint(' - TMPL ABC', _tmpl_abc, verbose=verbose)
    if not _tmpl_output or not _tmpl_output.shot:
        lprint(' - NO OUTPUT FOUND', exo, _tmpl_abc, verbose=verbose)
        return 'off pipeline', None
    if tk2.TTRoot(_tmpl_output.path).name == shot:
        lprint(' - NO UPDATE NEEDED', _tmpl_abc, verbose=verbose)
        return 'no update needed', None

    # Map to this shot
    _shot_output = _tmpl_output.map_to(Shot=shot).find_latest()
    if not _shot_output:
        lprint(' - NO VERSIONS FOUND', _shot_output, verbose=verbose)
        return 'no {} versions found'.format(shot), _ref

    # Update exocortex node
    lprint(' - SHOT ABC', _shot_output.path, verbose=verbose)
    exo.plug('fileName').set_val(_shot_output.path)
    return 'updated', None
Beispiel #4
0
def cache_work_file(work_file,
                    namespaces,
                    confirm=False,
                    new_scene=False,
                    farm=True,
                    parent=None):
    """Recache the given work file.

    The work file is opened, versioned up and the recached.


    Args:
        work_file (TTWorkFileBase): work file to recache
        namespaces (str list): list of assets to recache
        confirm (bool): confirm before execute
        new_scene (bool): new scene after recache
        farm (bool): submit recache to farm
        parent (QDialog): parent interface (for dialog positioning)
    """
    dprint('RECACHING', work_file.path)

    _engine = tank.platform.current_engine()
    _fileops = _engine.apps['psy-multi-fileops']

    # Load the scene
    work_file.load()
    maya.utils.processIdleEvents()
    _fileops.init_app()

    # Update assets
    _updated = []
    for _ns in qt.progress_bar(namespaces,
                               'Updating {:d} asset{}',
                               col='LightSteelBlue',
                               parent=parent):
        _ref = ref.find_ref(_ns, class_=m_pipe.OutputRef)
        if not _ref.is_loaded():
            _ref.load()
        if _ref.update_to_latest():
            _updated.append(_ref.namespace)

    # Version up
    _fileops.init_app()
    maya.utils.processIdleEvents()
    _engine = tank.platform.current_engine()
    _fileops = _engine.apps['psy-multi-fileops']
    _fileops.version_up_workfile()
    maya.utils.processIdleEvents()
    _cur_work = tk2.cur_work(class_=BCWork)
    _cur_work.set_comment('Versioned up by batch cache tool')
    _cur_work.read_dependencies(new_scene=False)

    _exec_cache(namespaces=namespaces,
                new_scene=new_scene,
                confirm=confirm,
                farm=farm)
    cmds.file(new=True, force=True)
Beispiel #5
0
def load_vendor_ma(path, fix_hik_issues=False, force=False, lazy=False):
    """Load vendor ma file.

    The file is loaded and then the bad rig reference is updated.

    Args:
        path (str): vendor ma file
        fix_hik_issues (bool): check if hik is still driving the motion
            burner skeleton and disable it if it is
        force (bool): lose unsaved changes with no warning
        lazy (bool): don't open scene if it's already open
    """

    # Load scene
    if not lazy or host.cur_scene() != path:

        if not force:
            host.handle_unsaved_changes()

        # Load the scene
        try:
            pause_viewports_on_exec(cmds.file)(path,
                                               open=True,
                                               prompt=False,
                                               force=True)
        except RuntimeError as _exc:
            if "has no '.ai_translator' attribute" in _exc.message:
                pass
            else:
                print '######################'
                print _exc.message
                print '######################'
                raise RuntimeError('Error on loading scene ' + path)

        assert host.get_fps() == 30

    _fix_cr_namespaces()

    # Update rig
    _ref = ref.find_ref(filter_='-camera -cemera')
    if not _ref.path == MOBURN_RIG:
        _ref.swap_to(MOBURN_RIG)
    _ref = _fix_nested_namespace(_ref)
    if not _ref.namespace == 'SK_Tier1_Male_CR':
        _ref.rename('SK_Tier1_Male_CR')

    # Test for hik issues
    if fix_hik_issues:
        _test_for_hik_issues(_ref)
Beispiel #6
0
def _fix_nested_namespace(ref_):
    """Fix nested namespace issues.

    If the rig is in a nested namespace, move it into the root namespace.

    Args:
        ref_ (FileRef): reference to check

    Returns:
        (FileRef): fixed reference
    """
    _ref_node = hom.HFnDependencyNode(ref_.ref_node)
    if not _ref_node.namespace:
        print 'NO NAMESPACE ISSUE TO FIX'
        return ref_

    print 'FIXING NESTED NAMESPACE', _ref_node.namespace
    cmds.namespace(moveNamespace=(_ref_node.namespace, ":"), force=True)
    return ref.find_ref()
Beispiel #7
0
def drive_shade_geo_from_rig(cache_set, progress=False, verbose=0):
    """Use a rig to drive tmp geo duplicated from its shade asset.

    The shade asset is referenced into the scene, all meshes with
    corresponding meshes in the rig are duplicated and then attached
    to the rig geo using a blendshape. The shade asset is then removed.

    Args:
        cache_set (pm.ObjectSet): cache set from rig being cached
        progress (bool): show progress on bind
        verbose (int): print process data

    Returns:
        (HFnMesh list): list of driven shade geo
    """
    print 'DRIVE SHADE GEO FROM RIG'

    # Get anim rig
    _cache_set = cache_set or nt.ObjectSet(u'archer_rig2:bakeSet')
    print ' - CACHE SET', _cache_set
    if not _cache_set.referenceFile():
        print ' - NO CORRESPONDING RIG'
        raise RuntimeError("No rig found for {}".format(_cache_set))
    _rig = ref.find_ref(_cache_set.referenceFile().namespace)
    print ' - RIG', _rig
    print ' - RIG PATH', _rig.path

    # Find/import tmp shade asset
    _shade_file = get_shade_mb_for_rig(_rig)
    _shade = ref.create_ref(_shade_file.path,
                            namespace='psyhive_tmp',
                            force=True)

    # Duplicate geo and bind to rig
    _bake_geo = []
    _tmp_ns = ':tmp_{}'.format(_rig.namespace)
    set_namespace(_tmp_ns, clean=True)
    for _shade_mesh in qt.progress_bar(_shade.find_nodes(type_='mesh'),
                                       'Binding {:d} geo{}',
                                       col='Tomato',
                                       show=progress):

        # Check there is equivalent mesh in rig
        if _shade_mesh.plug('intermediateObject').get_val():
            continue
        _shade_tfm = hom.HFnTransform(get_parent(_shade_mesh))
        try:
            _rig_tfm = _rig.get_node(_shade_tfm, class_=hom.HFnTransform)
        except ValueError:
            continue

        lprint(' - BINDING MESH', _shade_tfm, '->', _rig_tfm, verbose=verbose)

        # Duplicate mesh
        _dup = _shade_tfm.duplicate()
        lprint('   - DUPLICATING', _shade_tfm, verbose=verbose)
        _dup.parent(world=True)
        _clean_unused_uv_sets(_dup)
        _connect_visibility(_rig_tfm, _dup)
        _bake_geo.append(_dup)

        # Bind to rig
        _blend = hom.CMDS.blendShape(_rig_tfm, _dup)
        _blend.plug('origin').set_enum('world')
        _blend.plug('weight[0]').set_val(1.0)

    _shade.remove(force=True)
    cmds.namespace(set=":")

    if not _bake_geo:
        del_namespace(_tmp_ns)
        raise RuntimeError('No geo was attached - this means none of the '
                           'shade geo matched the rig bakeSet geo.')

    return _bake_geo, _bake_geo
Beispiel #8
0
def drive_shade_geo_from_rig(cache_set, progress=False, verbose=0):
    """Use a rig to drive tmp geo duplicated from its shade asset.

    The shade asset is referenced into the scene, all meshes with
    corresponding meshes in the rig are duplicated and then attached
    to the rig geo using a blendshape. The shade asset is then removed.

    Args:
        cache_set (pm.ObjectSet): cache set from rig being cached
        progress (bool): show progress on bind
        verbose (int): print process data

    Returns:
        (HFnMesh list): list of driven shade geo
    """
    from psyhive import tk2

    # Get anim rig
    _cache_set = cache_set or nt.ObjectSet(u'archer_rig2:bakeSet')
    print 'CACHE SET', _cache_set
    _rig = ref.find_ref(_cache_set.namespace().strip(':'))
    print 'RIG', _rig
    print 'RIG PATH', _rig.path

    # Find/import tmp shade asset
    _rig_out = tk2.TTOutputName(_rig.path)
    print 'RIG OUT', _rig_out.path
    _shade_out = _rig_out.map_to(Step='shade',
                                 output_type='shadegeo',
                                 Task='shade').find_latest()
    print 'SHADE OUT', _shade_out.path
    if not _shade_out.exists():
        raise RuntimeError("Missing shade file " + _shade_out.path)
    _shade_file = _shade_out.find_file(extn='mb', format_='maya')
    print 'SHADE FILE', _shade_file
    _shade = ref.create_ref(_shade_file.path,
                            namespace='psyhive_tmp',
                            force=True)

    # Duplicate geo and bind to rig
    _bake_geo = []
    _cleanup = []
    set_namespace(':tmp_{}'.format(_rig.namespace), clean=True)
    for _shade_mesh in qt.progress_bar(_shade.find_nodes(type_='mesh'),
                                       'Binding {:d} geo{}',
                                       col='Tomato',
                                       show=progress):

        # Check there is equivalent mesh in rig
        if _shade_mesh.plug('intermediateObject').get_val():
            continue
        _shade_tfm = hom.HFnTransform(get_parent(_shade_mesh))
        try:
            _rig_tfm = _rig.get_node(_shade_tfm, class_=hom.HFnTransform)
        except ValueError:
            continue

        lprint(' - BINDING MESH', _shade_tfm, '->', _rig_tfm, verbose=verbose)

        # Duplicate mesh
        _dup = _shade_tfm.duplicate()
        lprint('   - DUPLICATING', _shade_tfm, verbose=verbose)
        _dup.parent(world=True)
        _clean_unused_uv_sets(_dup)
        _connect_visibility(_rig_tfm, _dup)
        _bake_geo.append(_dup)
        _cleanup.append(_dup)

        # Bind to rig
        _blend = hom.CMDS.blendShape(_rig_tfm, _dup)
        _blend.plug('origin').set_enum('world')
        _blend.plug('weight[0]').set_val(1.0)

    _shade.remove(force=True)

    return _bake_geo, _cleanup