예제 #1
0
    def on_project_changed(self, index):
        name = model.data(index, "name")
        api.Session["AVALON_PROJECT"] = name

        # Establish a connection to the project database
        self.log("Connecting to %s" % name, level=INFO)

        project = io.find_one({"type": "project"})

        assert project is not None, "This is a bug"

        # Get available project actions and the application actions
        actions = api.discover(api.Action)
        apps = lib.get_apps(project)
        self._registered_actions[:] = actions + apps

        silos = io.distinct("silo")
        self._model.push([
            dict({
                "name": silo,
                "icon": DEFAULTS["icon"]["silo"],
            }) for silo in sorted(silos)
        ])

        frame = project
        frame["project"] = project["_id"]
        frame["environment"] = {"project": name}
        frame["environment"].update({
            "project_%s" % key: str(value)
            for key, value in project["data"].items()
        })

        self._frames.append(frame)
        self.pushed.emit(name)
예제 #2
0
    def init(self):
        terminal.log("initialising..")
        header = "Root"

        self._model.push([
            dict(
                {
                    "_id": project["_id"],
                    "icon": DEFAULTS["icon"]["project"],
                    "name": project["name"],
                }, **project["data"])
            for project in sorted(io.projects(), key=lambda x: x['name'])
            if project["data"].get("visible", True)  # Discard hidden projects
        ])

        # Discover all registered actions
        discovered_actions = api.discover(api.Action)
        self._registered_actions[:] = discovered_actions

        # Validate actions based on compatibility
        actions = self.collect_compatible_actions(discovered_actions)
        self._actions.push(actions)

        self.pushed.emit(header)
        self.navigated.emit()
        terminal.log("ready")
예제 #3
0
def assign_look_by_version(nodes, version_id):
    """Assign nodes a specific published look version by id.

    This assumes the nodes correspond with the asset.

    Args:
        nodes(list): nodes to assign look to
        version_id (bson.ObjectId): database id of the version

    Returns:
        None
    """

    # Get representations of shader file and relationships
    look_representation = io.find_one({
        "type": "representation",
        "parent": version_id,
        "name": "ma"
    })

    json_representation = io.find_one({
        "type": "representation",
        "parent": version_id,
        "name": "json"
    })

    # See if representation is already loaded, if so reuse it.
    host = api.registered_host()
    representation_id = str(look_representation['_id'])
    for container in host.ls():
        if (container['loader'] == "LookLoader"
                and container['representation'] == representation_id):
            log.info("Reusing loaded look ..")
            container_node = container['objectName']
            break
    else:
        log.info("Using look for the first time ..")

        # Load file
        loaders = api.loaders_from_representation(api.discover(api.Loader),
                                                  representation_id)
        Loader = next((i for i in loaders if i.__name__ == "LookLoader"), None)
        if Loader is None:
            raise RuntimeError("Could not find LookLoader, this is a bug")

        # Reference the look file
        with maya.maintained_selection():
            container_node = pipeline.load(Loader, look_representation)

    # Get container members
    shader_nodes = cmds.sets(container_node, query=True)

    # Load relationships
    shader_relation = api.get_representation_path(json_representation)
    with open(shader_relation, "r") as f:
        relationships = json.load(f)

    # Assign relationships
    apply_shaders(relationships, shader_nodes, nodes)
예제 #4
0
    def repair(cls, instance):
        node = instance[0]

        if "Write" in node.Class():
            data = toml.loads(node["avalon"].value())
        else:
            data = get_avalon_knob_data(node)

        # collect reusable data
        data["XYpos"] = (node.xpos(), node.ypos())
        data["input"] = node.input(0)
        data["publish"] = node["publish"].value()
        data["render"] = node["render"].value()
        data["render_farm"] = node["render_farm"].value()
        data["review"] = node["review"].value()
        data["use_limit"] = node["use_limit"].value()
        data["first"] = node["first"].value()
        data["last"] = node["last"].value()

        family = data["family"]
        cls.log.debug("_ orig node family: {}".format(family))

        # define what family of write node should be recreated
        if family == "render":
            Create_name = "CreateWriteRender"
        elif family == "prerender":
            Create_name = "CreateWritePrerender"

        # get appropriate plugin class
        creator_plugin = None
        for Creator in api.discover(api.Creator):
            if Creator.__name__ != Create_name:
                continue

            creator_plugin = Creator

        # delete the legaci write node
        nuke.delete(node)

        # create write node with creator
        new_node_name = data["subset"]
        creator_plugin(new_node_name, data["asset"]).process()

        node = nuke.toNode(new_node_name)
        node.setXYpos(*data["XYpos"])
        node.setInput(0, data["input"])
        node["publish"].setValue(data["publish"])
        node["review"].setValue(data["review"])
        node["use_limit"].setValue(data["use_limit"])
        node["first"].setValue(data["first"])
        node["last"].setValue(data["last"])

        # recreate render targets
        if data["render"]:
            node["render"].setValue("Local")
            if data["render_farm"]:
                node["render"].setValue("On farm")
예제 #5
0
    def __init__(self, remote_root, remote_user, site=None):
        self.jobs = list()
        self.remote_root = remote_root
        self.remote_user = remote_user

        # `AVALON_SFTPC_SITE` deprecated, this is for backward compat
        site = site or api.Session.get("AVALON_SFTPC_SITE")
        assert site, "SFTP site name not provided."
        self.site = site

        self.available_loaders = api.discover(api.Loader)
예제 #6
0
    def discover(self):
        """Set up Actions cache. Run this for each new project."""
        # Discover all registered actions
        actions = api.discover(api.Action)

        # Get available project actions and the application actions
        app_actions = self.get_application_actions()
        actions.extend(app_actions)

        self._registered_actions = actions
        self.items_by_id.clear()
예제 #7
0
    def process(self, context, plugin):

        # Get the errored instances
        failed = []
        for result in context.data["results"]:
            if (result["error"] is not None and result["instance"] is not None
                    and result["instance"] not in failed):
                failed.append(result["instance"])

        # Apply pyblish.logic to get the instances for the plug-in
        instances = pyblish.api.instances_by_plugin(failed, plugin)

        for instance in instances:
            if "Write" in instance[0].Class():
                data = toml.loads(instance[0]["avalon"].value())
            else:
                data = get_avalon_knob_data(instance[0])

            self.log.info(data)

            data["xpos"] = instance[0].xpos()
            data["ypos"] = instance[0].ypos()
            data["input"] = instance[0].input(0)
            data["publish"] = instance[0]["publish"].value()
            data["render"] = instance[0]["render"].value()
            data["render_farm"] = instance[0]["render_farm"].value()
            data["review"] = instance[0]["review"].value()

            # nuke.delete(instance[0])

            task = os.environ["AVALON_TASK"]
            sanitized_task = re.sub('[^0-9a-zA-Z]+', '', task)
            subset_name = "render{}Main".format(sanitized_task.capitalize())

            Create_name = "CreateWriteRender"

            creator_plugin = None
            for Creator in api.discover(api.Creator):
                if Creator.__name__ != Create_name:
                    continue

                creator_plugin = Creator

            # return api.create()
            creator_plugin(data["subset"], data["asset"]).process()

            node = nuke.toNode(data["subset"])
            node.setXYpos(data["xpos"], data["ypos"])
            node.setInput(0, data["input"])
            node["publish"].setValue(data["publish"])
            node["render"].setValue(data["render"])
            node["render_farm"].setValue(data["render_farm"])
            node["review"].setValue(data["review"])
예제 #8
0
    def discover(self):
        """Set up Actions cache. Run this for each new project."""
        if not self.dbcon.Session.get("AVALON_PROJECT"):
            self._registered_actions = list()
            return

        # Discover all registered actions
        actions = api.discover(api.Action)

        # Get available project actions and the application actions
        project_doc = self.dbcon.find_one({"type": "project"})
        app_actions = lib.get_application_actions(project_doc)
        actions.extend(app_actions)

        self._registered_actions = actions
예제 #9
0
def test_avalon_plugin_presets(monkeypatch, printer):

    pype.install()
    api.register_host(Test())
    api.register_plugin(api.Creator, MyTestCreator)
    plugins = api.discover(api.Creator)
    printer("Test if we got our test plugin")
    assert MyTestCreator in plugins
    for p in plugins:
        if p.__name__ == "MyTestCreator":
            printer("Test if we have overriden existing property")
            assert p.my_test_property == "B"
            printer("Test if we have overriden superclass property")
            assert p.active is False
            printer("Test if we have added new property")
            assert p.new_property == "new"
예제 #10
0
def build_layers(sequences):
    if not sequences:
        return

    first = next(iter(sequences.values()))
    root = first["root"]
    start = first["start"]
    end = first["end"]

    Loader = next(Plugin for Plugin in api.discover(api.Loader)
                  if Plugin.__name__ == "RenderLayerLoader")
    Loader.build_sequences(sequences,
                           root,
                           group_name="master",
                           stamp_name="renderLayers",
                           start=start,
                           end=end)
예제 #11
0
    def on_project_changed(self, index):
        name = model.data(index, "name")
        api.Session["AVALON_PROJECT"] = name

        # Establish a connection to the project database
        self.log("Connecting to %s" % name, level=INFO)

        frame = self.current_frame()
        project = io.find_one({"type": "project"})

        assert project is not None, "This is a bug"

        frame["config"] = project["config"]

        # Use project root if exists or default root will be used
        # (NOTE): The root path from `self._root` may have path sep appended
        #         because it's been processed by `os.path.realpath` in
        #         `app.main`
        root = project["data"].get("root", self._root)
        os.environ["AVALON_PROJECTS"] = root
        api.Session["AVALON_PROJECTS"] = root

        # Get available project actions and the application actions
        actions = api.discover(api.Action)
        apps = lib.get_apps(project)
        self._registered_actions[:] = actions + apps

        silos = io.distinct("silo")
        self._model.push([
            dict({
                "name": silo,
                "icon": DEFAULTS["icon"]["silo"],
            })
            for silo in sorted(silos)
            if self._get_silo_visible(silo)
        ])

        frame["project"] = project["_id"]
        frame["environment"]["project"] = name
        frame["environment"].update({
            "project_%s" % key: str(value)
            for key, value in project["data"].items()
        })

        self._frames.append(frame)
        self.pushed.emit(name)
예제 #12
0
    def effect_loader(self, representation):
        """
        Gets Loader plugin for effects

        Arguments:
            representation (dict): avalon db entity

        """
        context = representation["context"]

        loader_name = "LoadLuts"

        loader_plugin = None
        for Loader in api.discover(api.Loader):
            if Loader.__name__ != loader_name:
                continue

            loader_plugin = Loader

        return api.load(Loader=loader_plugin,
                        representation=representation["_id"])
예제 #13
0
    def init(self):
        terminal.log("initialising..")
        header = "Root"

        def project_visible(data):
            return data.get("visible", True)  # Discard hidden projects

        def project_member(data):
            user = getpass.getuser().lower()
            member = data.get("role", dict()).get("member", list())
            return user in member

        project_active = (project_member
                          if os.getenv("AVALON_LAUNCHER_USE_PROJECT_MEMBER")
                          else project_visible)

        self._model.push([
            dict({
                "_id": project["_id"],
                "icon": DEFAULTS["icon"]["project"],
                "name": project["name"],
            }, **project["data"])
            for project in sorted(io.projects(), key=lambda x: x['name'])
            if project_active(project["data"])
        ])

        frame = {"environment": {}}
        self._frames[:] = [frame]

        # Discover all registered actions
        discovered_actions = api.discover(api.Action)
        self._registered_actions[:] = discovered_actions

        # Validate actions based on compatibility
        actions = self.collect_compatible_actions(discovered_actions)
        self._actions.push(actions)

        self.pushed.emit(header)
        self.navigated.emit()
        terminal.log("ready")
예제 #14
0
def load_look(look, overload=False):
    """Load look subset if it's not been loaded
    """
    representation = io.find_one({
        "type": "representation",
        "parent": look["versionId"],
        "name": "LookDev"
    })
    representation_id = str(representation["_id"])

    is_loaded = False
    for container in lib.lsAttrs({
            "id": AVALON_CONTAINER_ID,
            "loader": "LookLoader",
            "representation": representation_id
    }):
        if overload:
            is_loaded = True
            log.info("Overload look ..")
            break

        log.info("Reusing loaded look ..")
        return parse_container(container)

    if not is_loaded:
        # Not loaded
        log.info("Using look for the first time ..")

    loaders = api.loaders_from_representation(api.discover(api.Loader),
                                              representation_id)
    Loader = next((i for i in loaders if i.__name__ == "LookLoader"), None)
    if Loader is None:
        raise RuntimeError("Could not find LookLoader, this is a bug")

    container = api.load(Loader,
                         representation,
                         options={"overload": overload})
    return container
예제 #15
0
    def write_create(self):
        """
        Create render write

        Arguments:
            representation (dict): avalon db entity

        """
        task = self.data_tmp["task"]
        sanitized_task = re.sub('[^0-9a-zA-Z]+', '', task)
        subset_name = "render{}Main".format(sanitized_task.capitalize())

        Create_name = "CreateWriteRender"

        creator_plugin = None
        for Creator in api.discover(api.Creator):
            if Creator.__name__ != Create_name:
                continue

            creator_plugin = Creator

        # return api.create()
        return creator_plugin(subset_name, self._asset).process()
예제 #16
0
    def read_loader(self, representation):
        """
        Gets Loader plugin for image sequence or mov

        Arguments:
            representation (dict): avalon db entity

        """
        context = representation["context"]

        loader_name = "LoadSequence"
        if "mov" in context["representation"]:
            loader_name = "LoadMov"

        loader_plugin = None
        for Loader in api.discover(api.Loader):
            if Loader.__name__ != loader_name:
                continue

            loader_plugin = Loader

        return api.load(Loader=loader_plugin,
                        representation=representation["_id"])
예제 #17
0
    def on_project_changed(self, index):
        name = model.data(index, "name")
        api.Session["AVALON_PROJECT"] = name

        # Establish a connection to the project database
        self.log("Connecting to %s" % name, level=INFO)

        project = io.find_one({"type": "project"})

        assert project is not None, "This is a bug"

        # Get available project actions and the application actions
        actions = api.discover(api.Action)
        apps = lib.get_apps(project)
        self._registered_actions[:] = actions + apps

        db_assets = io.find({"type": "asset"})
        # Backwadrs compatbility with silo
        silos = db_assets.distinct("silo")
        if silos and None in silos:
            silos = None

        if not silos:
            assets = list()
            for asset in db_assets.sort("name", 1):
                # _not_set_ is for cases when visualParent is not used
                vis_p = asset.get("data", {}).get("visualParent", "_not_set_")
                if vis_p is None:
                    assets.append(asset)
                elif vis_p == "_not_set_":
                    assets.append(asset)

            self._model.push([
                dict({
                    "_id": asset["_id"],
                    "name": asset["name"],
                    "type": asset["type"],
                    "icon": DEFAULTS["icon"]["asset"]
                }) for asset in assets
            ])

        else:
            self._model.push([
                dict({
                    "name": silo,
                    "icon": DEFAULTS["icon"]["silo"],
                    "type": "silo"
                }) for silo in sorted(silos)
            ])

        frame = project
        frame["project"] = project["_id"]
        frame["environment"] = {
            "project": {
                'name': name,
                'code': project.get('data', {}).get('code')
            }
        }
        frame["environment"].update({
            "project_%s" % key: str(value)
            for key, value in project["data"].items()
        })

        self._frames.append(frame)
        self.pushed.emit(name)
예제 #18
0
    def _process(self, libpath, layout_container, container_name,
                 representation, actions, parent):
        with open(libpath, "r") as fp:
            data = json.load(fp)

        scene = bpy.context.scene
        layout_collection = bpy.data.collections.new(container_name)
        scene.collection.children.link(layout_collection)

        all_loaders = api.discover(api.Loader)

        avalon_container = bpy.data.collections.get(
            blender.pipeline.AVALON_CONTAINERS)

        for element in data:
            reference = element.get('reference')
            family = element.get('family')

            loaders = api.loaders_from_representation(all_loaders, reference)
            loader = self._get_loader(loaders, family)

            if not loader:
                continue

            instance_name = element.get('instance_name')

            element_container = api.load(loader,
                                         reference,
                                         namespace=instance_name)

            if not element_container:
                continue

            avalon_container.children.unlink(element_container)
            layout_container.children.link(element_container)

            element_metadata = element_container.get(
                blender.pipeline.AVALON_PROPERTY)

            # Unlink the object's collection from the scene collection and
            # link it in the layout collection
            element_collection = element_metadata.get('obj_container')
            scene.collection.children.unlink(element_collection)
            layout_collection.children.link(element_collection)

            objects = element_metadata.get('objects')
            element_metadata['instance_name'] = instance_name

            objects_to_transform = []

            creator_plugin = get_creator_by_name(self.animation_creator_name)
            if not creator_plugin:
                raise ValueError("Creator plugin \"{}\" was not found.".format(
                    self.animation_creator_name))

            if family == 'rig':
                for o in objects:
                    if o.type == 'ARMATURE':
                        objects_to_transform.append(o)
                        # Create an animation subset for each rig
                        o.select_set(True)
                        asset = api.Session["AVALON_ASSET"]
                        c = api.create(creator_plugin,
                                       name="animation_" +
                                       element_collection.name,
                                       asset=asset,
                                       options={"useSelection": True},
                                       data={"dependencies": representation})
                        scene.collection.children.unlink(c)
                        parent.children.link(c)
                        o.select_set(False)
                        break
            elif family == 'model':
                objects_to_transform = objects

            for o in objects_to_transform:
                self.set_transform(o, element.get('transform'))

                if actions:
                    if o.type == 'ARMATURE':
                        action = actions.get(instance_name, None)

                        if action:
                            if o.animation_data is None:
                                o.animation_data_create()
                            o.animation_data.action = action

        return layout_collection
예제 #19
0
def update_scene(set_container, containers, current_data, new_data, new_file):
    """Updates the hierarchy, assets and their matrix

    Updates the following withing the scene:
        * Setdress hierarchy alembic
        * Matrix
        * Parenting
        * Representations

    It removes any assets which are not present in the new build data

    Args:
        set_container (dict): the setdress container of the scene
        containers (list): the list of containers under the setdress container
        current_data (dict): the current build data of the setdress
        new_data (dict): the new build data of the setdres

    Returns:
        processed_containers (list): all new and updated containers

    """

    from colorbleed.maya.lib import DEFAULT_MATRIX, get_container_transforms

    set_namespace = set_container['namespace']

    # Update the setdress hierarchy alembic
    set_root = get_container_transforms(set_container, root=True)
    set_hierarchy_root = cmds.listRelatives(set_root, fullPath=True)[0]
    set_hierarchy_reference = cmds.referenceQuery(set_hierarchy_root,
                                                  referenceNode=True)
    new_alembic = new_file.replace(".json", ".abc")
    assert os.path.exists(new_alembic), "%s does not exist." % new_alembic
    with unlocked(cmds.listRelatives(set_root, ad=True, fullPath=True)):
        cmds.file(new_alembic,
                  loadReference=set_hierarchy_reference,
                  type="Alembic")

    identity = DEFAULT_MATRIX[:]

    processed_namespaces = set()
    processed_containers = list()

    new_lookup = _instances_by_namespace(new_data)
    old_lookup = _instances_by_namespace(current_data)
    for container in containers:
        container_ns = container['namespace']

        # Consider it processed here, even it it fails we want to store that
        # the namespace was already available.
        processed_namespaces.add(container_ns)
        processed_containers.append(container['objectName'])

        if container_ns in new_lookup:
            root = get_container_transforms(container, root=True)
            if not root:
                log.error("Can't find root for %s", container['objectName'])
                continue

            old_instance = old_lookup.get(container_ns, {})
            new_instance = new_lookup[container_ns]

            # Update the matrix
            # check matrix against old_data matrix to find local overrides
            current_matrix = cmds.xform(root,
                                        query=True,
                                        matrix=True,
                                        objectSpace=True)

            original_matrix = old_instance.get("matrix", identity)
            has_matrix_override = not matrix_equals(current_matrix,
                                                    original_matrix)

            if has_matrix_override:
                log.warning("Matrix override preserved on %s", container_ns)
            else:
                new_matrix = new_instance.get("matrix", identity)
                cmds.xform(root, matrix=new_matrix, objectSpace=True)

            # Update the parenting
            if old_instance.get("parent", None) != new_instance["parent"]:

                parent = to_namespace(new_instance['parent'], set_namespace)
                if not cmds.objExists(parent):
                    log.error("Can't find parent %s", parent)
                    continue

                # Set the new parent
                cmds.lockNode(root, lock=False)
                root = cmds.parent(root, parent, relative=True)
                cmds.lockNode(root, lock=True)

            # Update the representation
            representation_current = container['representation']
            representation_old = old_instance['representation']
            representation_new = new_instance['representation']
            has_representation_override = (representation_current !=
                                           representation_old)

            if representation_new != representation_current:

                if has_representation_override:
                    log.warning(
                        "Your scene had local representation "
                        "overrides within the set. New "
                        "representations not loaded for %s.", container_ns)
                    continue

                # We check it against the current 'loader' in the scene instead
                # of the original data of the package that was loaded because
                # an Artist might have made scene local overrides
                if new_instance['loader'] != container['loader']:
                    log.warning(
                        "Loader is switched - local edits will be "
                        "lost. Removing: %s", container_ns)

                    # Remove this from the "has been processed" list so it's
                    # considered as new element and added afterwards.
                    processed_containers.pop()
                    processed_namespaces.remove(container_ns)
                    api.remove(container)
                    continue

                # Check whether the conversion can be done by the Loader.
                # They *must* use the same asset, subset and Loader for
                # `api.update` to make sense.
                old = io.find_one({"_id": io.ObjectId(representation_current)})
                new = io.find_one({"_id": io.ObjectId(representation_new)})
                is_valid = compare_representations(old=old, new=new)
                if not is_valid:
                    log.error("Skipping: %s. See log for details.",
                              container_ns)
                    continue

                new_version = new["context"]["version"]
                api.update(container, version=new_version)

        else:
            # Remove this container because it's not in the new data
            log.warning("Removing content: %s", container_ns)
            api.remove(container)

    # Add new assets
    all_loaders = api.discover(api.Loader)
    for representation_id, instances in new_data.items():

        # Find the compatible loaders
        loaders = api.loaders_from_representation(all_loaders,
                                                  representation_id)
        for instance in instances:

            # Already processed in update functionality
            if instance['namespace'] in processed_namespaces:
                continue

            container = _add(instance=instance,
                             representation_id=representation_id,
                             loaders=loaders,
                             namespace=set_container['namespace'],
                             root=set_root)

            # Add to the setdress container
            cmds.sets(container, addElement=set_container['objectName'])

            processed_containers.append(container)

    return processed_containers
예제 #20
0
def swap_to_published_model(*args):
    """Hide the working model and load the published version of it

    This is for the case that artist was working on model and lookDev all
    together, while publishing turntable require the model to be published
    version.

    Using this tool could load the latest version via the instance that was
    used to publish this model.

    """
    MSG = "Please select '|ROOT' node, and '|ROOT' node only."

    selection = cmds.ls(selection=True, long=True, type="transform")
    assert len(selection) == 1, MSG

    root = selection[0]
    assert root.endswith("|ROOT"), MSG

    instances = lib.lsAttrs({
        "id": "pyblish.avalon.instance",
        "family": "reveries.model"
    })

    project = api.Session.get("AVALON_PROJECT")
    asset = None
    subset = None
    for set_ in cmds.listSets(object=root) or []:
        if set_ in instances:
            asset = cmds.getAttr(set_ + ".asset")
            subset = cmds.getAttr(set_ + ".subset")
            break

    assert project is not None, "Project undefined, this is not right."
    assert asset and subset, "Model instance not found."
    assert len(instances) == 1, "Too many model instances in scene."

    representation = io.locate([project, asset, subset, -1, "mayaBinary"])

    Loaders = api.discover(api.Loader)
    Loader = next(
        (loader for loader in Loaders if loader.__name__ == "ModelLoader"),
        None)

    assert Loader is not None, "ModelLoader not found, this is a bug."
    assert representation is not None, "Representation not found."

    container = api.load(Loader, representation)

    group = pipeline.get_group_from_container(container["objectName"])

    parent = cmds.listRelatives(root, parent=True)
    if parent:
        cmds.parent(group, parent)

    # Re-assign shaders
    nodes = cmds.listRelatives(root, allDescendents=True, fullPath=True)
    shader_by_id = lib.serialise_shaders(nodes)
    lib.apply_shaders(shader_by_id)

    # Hide unpublished model
    cmds.setAttr(root + ".visibility", False)
예제 #21
0
def load_package(filepath, name, namespace=None):
    """Load a package that was gathered elsewhere.

    A package is a group of published instances, possibly with additional data
    in a hierarchy.

    """

    if namespace is None:
        # Define a unique namespace for the package
        namespace = os.path.basename(filepath).split(".")[0]
        unique_namespace(namespace)
    assert isinstance(namespace, basestring)

    # Load the setdress package data
    with open(filepath, "r") as fp:
        data = json.load(fp)

    # Load the setdress alembic hierarchy
    #   We import this into the namespace in which we'll load the package's
    #   instances into afterwards.
    alembic = filepath.replace(".json", ".abc")
    hierarchy = cmds.file(alembic,
                          reference=True,
                          namespace=namespace,
                          returnNewNodes=True,
                          groupReference=True,
                          groupName="{}:{}".format(namespace, name),
                          typ="Alembic")

    # Get the top root node (the reference group)
    root = "{}:{}".format(namespace, name)

    containers = []
    all_loaders = api.discover(api.Loader)
    for representation_id, instances in data.items():

        # Find the compatible loaders
        loaders = api.loaders_from_representation(all_loaders,
                                                  representation_id)

        for instance in instances:
            container = _add(instance=instance,
                             representation_id=representation_id,
                             loaders=loaders,
                             namespace=namespace,
                             root=root)
            containers.append(container)

    # TODO: Do we want to cripple? Or do we want to add a 'parent' parameter?
    # Cripple the original avalon containers so they don't show up in the
    # manager
    # for container in containers:
    #     cmds.setAttr("%s.id" % container,
    #                  "colorbleed.setdress.container",
    #                  type="string")

    # TODO: Lock all loaded nodes
    #   This is to ensure the hierarchy remains unaltered by the artists
    # for node in nodes:
    #      cmds.lockNode(node, lock=True)

    return containers + hierarchy
예제 #22
0
    def load(self, context, name, namespace, options):
        from avalon import api, pipeline
        from avalon.unreal import lib
        from avalon.unreal import pipeline as unreal_pipeline
        import unreal

        # Create directory for asset and avalon container
        root = "/Game/Avalon/Assets"
        asset = context.get('asset').get('name')
        suffix = "_CON"

        tools = unreal.AssetToolsHelpers().get_asset_tools()
        asset_dir, container_name = tools.create_unique_asset_name(
            "{}/{}".format(root, asset), suffix="")

        container_name += suffix

        unreal.EditorAssetLibrary.make_directory(asset_dir)

        libpath = self.fname

        with open(libpath, "r") as fp:
            data = json.load(fp)

        all_loaders = api.discover(api.Loader)

        for element in data:
            reference = element.get('_id')

            loaders = api.loaders_from_representation(all_loaders, reference)
            loader = None
            for l in loaders:
                if l.__name__ == "AnimationFBXLoader":
                    loader = l
                    break

            if not loader:
                continue

            instance_name = element.get('instance_name')

            api.load(loader,
                     reference,
                     namespace=instance_name,
                     options=element)

        # Create Asset Container
        lib.create_avalon_container(container=container_name, path=asset_dir)

        data = {
            "schema": "openpype:container-2.0",
            "id": pipeline.AVALON_CONTAINER_ID,
            "asset": asset,
            "namespace": asset_dir,
            "container_name": container_name,
            "loader": str(self.__class__.__name__),
            "representation": context["representation"]["_id"],
            "parent": context["representation"]["parent"],
            "family": context["representation"]["context"]["family"]
        }
        unreal_pipeline.imprint("{}/{}".format(asset_dir, container_name),
                                data)

        asset_content = unreal.EditorAssetLibrary.list_assets(
            asset_dir, recursive=True, include_folder=True)

        return asset_content
예제 #23
0
    def process(self, name, namespace, context, data):
        from maya import cmds
        from avalon import maya, api

        cmds.loadPlugin("atomImportExport.mll", quiet=True)

        # Load the rig using the RigLoader
        loader = {
            Loader.__name__: Loader
            for Loader in api.discover(avalon.maya.Loader)
        }.get("RigLoader", None)
        if loader is None:
            raise RuntimeError("Unable to find RigLoader")

        rig = context["representation"]["dependencies"][0]
        container = maya.load(
            loader,
            rig,
            name=name,
            namespace=namespace,

            # Skip creation of Animation instance
            data={"post_process": False})

        try:
            control_set = next(node
                               for node in cmds.sets(container, query=True)
                               if node.endswith("controls_SET"))
        except StopIteration:
            raise TypeError("%s is missing controls_SET")

        cmds.select(control_set)
        options = ";".join([
            "",
            "",
            "targetTime=3",
            "option=insert",
            "match=hierarchy",
            "selected=selectedOnly",
            "search=",
            "replace=",
            "prefix=",
            "suffix=",
            "mapFile=",
        ])

        cmds.select(
            control_set,
            replace=True,

            # Support controllers being embedded in
            # additional selection sets.
            noExpand=False)

        nodes = cmds.file(
            self.fname,
            i=True,
            type="atomImport",
            renameAll=True,
            namespace=namespace,
            options=options,
            returnNewNodes=True,
        )

        self[:] = nodes + cmds.sets(container, query=True) + [container]

        # Trigger post process only if it's not been set to disabled
        if data.get("post_process", True):
            self._post_process(name, namespace, context, data)