Пример #1
0
    def process(self, instance):
        from avalon import api
        from avalon.vendor import requests

        # Dependencies
        AVALON_LOCATION = api.Session["AVALON_LOCATION"]
        AVALON_USERNAME = api.Session["AVALON_USERNAME"]
        AVALON_PASSWORD = api.Session["AVALON_PASSWORD"]

        for src in instance.data["output"]:
            assert src.startswith(api.registered_root()), (
                "Output didn't reside on root, this is a bug"
            )

            dst = src.replace(
                api.registered_root(),
                AVALON_LOCATION + "/upload"
            ).replace("\\", "/")

            self.log.info("Uploading %s -> %s" % (src, dst))

            auth = requests.auth.HTTPBasicAuth(
                AVALON_USERNAME, AVALON_PASSWORD
            )

            with open(src) as f:
                response = requests.put(
                    dst,
                    data=f,
                    auth=auth,
                    headers={"Content-Type": "application/octet-stream"}
                )

                if not response.ok:
                    raise Exception(response.text)
Пример #2
0
    def create_version_data(self, context, instance):
        """Create the data collection for the version

        Args:
            context: the current context
            instance: the current instance being published

        Returns:
            dict: the required information with instance.data as key
        """

        families = []
        current_families = instance.data.get("families", list())
        instance_family = instance.data.get("family", None)

        if instance_family is not None:
            families.append(instance_family)
        families += current_families

        self.log.debug("Registered root: {}".format(api.registered_root()))
        # create relative source path for DB
        try:
            source = instance.data['source']
        except KeyError:
            source = context.data["currentFile"]
            source = source.replace(os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"),
                                    api.registered_root())
            relative_path = os.path.relpath(source, api.registered_root())
            source = os.path.join("{root}", relative_path).replace("\\", "/")

        self.log.debug("Source: {}".format(source))
        version_data = {
            "families": families,
            "time": context.data["time"],
            "author": context.data["user"],
            "source": source,
            "comment": context.data.get("comment"),
            "machine": context.data.get("machine"),
            "fps": context.data.get("fps", instance.data.get("fps"))
        }

        intent_value = instance.context.data.get("intent")
        if intent_value and isinstance(intent_value, dict):
            intent_value = intent_value.get("value")

        if intent_value:
            version_data["intent"] = intent_value

        # Include optional data if present in
        optionals = [
            "frameStart", "frameEnd", "step", "handles", "handleEnd",
            "handleStart", "sourceHashes"
        ]
        for key in optionals:
            if key in instance.data:
                version_data[key] = instance.data[key]

        return version_data
    def create_version_data(self, context, instance):
        """Create the data collection for the version

        Args:
            context: the current context
            instance: the current instance being published

        Returns:
            dict: the required information with instance.data as key
        """

        families = []
        current_families = instance.data.get("families", list())
        instance_family = instance.data.get("family", None)

        if instance_family is not None:
            families.append(instance_family)
        families += current_families

        # create relative source path for DB
        source = context.data["currentMaking"]
        source = source.replace(api.registered_root(), "{root}")
        source = source.replace("\\", "/")

        work_dir = api.Session.get("AVALON_WORKDIR")
        work_dir = work_dir.replace(api.registered_root(), "{root}")
        work_dir = work_dir.replace("\\", "/")

        hash_val = context.data["sourceFingerprint"]["currentHash"]

        version_data = {
            "families": families,
            "time": context.data["time"],
            "author": context.data["user"],
            "task": api.Session.get("AVALON_TASK"),
            "source": source,
            "workDir": work_dir,
            "hash": hash_val,
            "comment": context.data.get("comment"),
            "dependencies": instance.data.get("dependencies", dict()),
            "dependents": dict(),
        }

        # Include optional data if present in
        optionals = [
            "startFrame",
            "endFrame",
            "step",
            "handles",
            "hasUnversionedSurfaces",
        ]
        for key in optionals:
            if key in instance.data:
                version_data[key] = instance.data[key]

        return version_data
Пример #4
0
def setup():
    pyblish_maya.setup()
    api.install(maya)
    io.install("test")

    self._tempdir = tempfile.mkdtemp()
    api.register_root(self._tempdir)

    schema.validate(self._config)
    schema.validate(self._inventory)

    inventory.save(name=PROJECT_NAME,
                   config=self._config,
                   inventory=self._inventory)

    project = io.find_one({"type": "project", "name": PROJECT_NAME})
    asset = io.find_one({
        "type": "asset",
        "parent": project["_id"],
        "name": ASSET_NAME
    })

    # Setup environment
    os.environ["MINDBENDER__PROJECT"] = str(project["_id"])
    os.environ["MINDBENDER__ASSET"] = str(asset["_id"])
    os.environ["MINDBENDER_PROJECT"] = PROJECT_NAME
    os.environ["MINDBENDER_ASSET"] = asset["name"]
    os.environ["MINDBENDER_ASSETPATH"] = (
        "{root}/{project}/{silo}/{asset}".format(root=api.registered_root(),
                                                 project=PROJECT_NAME,
                                                 asset=ASSET_NAME,
                                                 silo="assets"))
    os.environ["MINDBENDER_SILO"] = "assets"
Пример #5
0
def setup():
    self._tempdir = tempfile.mkdtemp()
    api.register_root(self._tempdir)

    # Setup environment
    os.environ["AVALON_CONFIG"] = "polly"
    os.environ["AVALON_PROJECT"] = PROJECT_NAME
    os.environ["AVALON_ASSET"] = ASSET_NAME
    os.environ["AVALON_TASK"] = TASK_NAME
    os.environ["AVALON_ASSETPATH"] = ("{root}/{project}/{silo}/{asset}".format(
        root=api.registered_root(),
        project=PROJECT_NAME,
        asset=ASSET_NAME,
        silo="assets"))
    os.environ["AVALON_SILO"] = "assets"

    api.install(maya)
    io.activate_project(PROJECT_NAME)

    schema.validate(self._config)
    schema.validate(self._inventory)

    inventory.save(name=PROJECT_NAME,
                   config=self._config,
                   inventory=self._inventory)
    def run(self):
        from avalon import api, io
        import maya.standalone as standalone
        import pyblish.util

        standalone.initialize(name="python")

        # Get project root path and rig source files.
        jobs = dict()
        root = api.registered_root()
        for rig_version, rig_subset in self.rig_versions.items():
            version_id = io.ObjectId(rig_version)
            latest_ver = io.find_one({"type": "version", "_id": version_id})
            rig_source = latest_ver["data"]["source"].format(root=root)
            rig_source = rig_source.replace("\\", "/")
            if rig_source not in jobs:
                jobs[rig_source] = list()
            # One source scene may contains multiple rig subsets.
            jobs[rig_source].append(rig_subset)

        # Run publish process, till extraction
        for source, rig_subsets in jobs.items():
            self._publish(source, rig_subsets)

        # Run final integration only if all extraction succeed
        for context in self.contexts:
            context.data["_autoPublishingSkipUnlock"] = True
            pyblish.util.integrate(context=context)

        standalone.uninitialize()
Пример #7
0
    def create_version_data(self, context, instance):
        """Create the data collection for the version

        Args:
            context: the current context
            instance: the current instance being published

        Returns:
            dict: the required information with instance.data as key
        """
        # create relative source path for DB
        source = context.data["currentMaking"]
        source = source.replace(api.registered_root(), "{root}")
        source = source.replace("\\", "/")

        work_dir = api.Session.get("AVALON_WORKDIR")
        work_dir = work_dir.replace(api.registered_root(), "{root}")
        work_dir = work_dir.replace("\\", "/")

        version_data = {
            "time": context.data["time"],
            "author": context.data["user"],
            "task": api.Session.get("AVALON_TASK"),
            "source": source,
            "workDir": work_dir,
            "comment": context.data.get("comment"),
            "dependencies": instance.data.get("dependencies", dict()),
            "dependents": dict(),
        }

        # Include optional data if present in
        optionals = [
            "startFrame",
            "endFrame",
            "step",
            "handles",
            "hasUnversionedSurfaces",
            "deadlineJobId",
        ]
        for key in optionals:
            if key in instance.data:
                version_data[key] = instance.data[key]

        return version_data
    def process(self, instance):
        import os
        from avalon import api

        path = os.path.realpath(instance.data["stagingDir"])
        root = os.path.realpath(api.registered_root())

        # If not on same drive
        if os.path.splitdrive(path)[0] != os.path.splitdrive(root)[0]:
            self.log.error("Render sequences should be in '%s'." % root)
            raise Exception("Please move render sequences to '%s' drive."
                            % root)
Пример #9
0
    def create_version_data(self, context, instance):
        """Create the data collection for the version

        Args:
            context: the current context
            instance: the current instance being published

        Returns:
            dict: the required information with instance.data as key
        """

        families = []
        current_families = instance.data.get("families", list())
        instance_family = instance.data.get("family", None)

        if instance_family is not None:
            families.append(instance_family)
        families += current_families

        try:
            source = instance.data['source']
        except KeyError:
            source = context.data["currentFile"]

            relative_path = os.path.relpath(source, api.registered_root())
            source = os.path.join("{root}", relative_path).replace("\\", "/")

        version_data = {
            "families": families,
            "time": context.data["time"],
            "author": context.data["user"],
            "source": source,
            "comment": context.data.get("comment")
        }

        # Include optional data if present in
        optionals = [
            "startFrame", "endFrame", "step", "handles", "colorspace", "fps",
            "outputDir"
        ]

        for key in optionals:
            if key in instance.data:
                version_data[key] = instance.data.get(key, None)

        return version_data
Пример #10
0
    def register(self, instance):

        # Required environment variables
        PROJECT = api.Session["AVALON_PROJECT"]
        ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
        LOCATION = api.Session["AVALON_LOCATION"]

        context = instance.context
        # Atomicity
        #
        # Guarantee atomic publishes - each asset contains
        # an identical set of members.
        #     __
        #    /     o
        #   /       \
        #  |    o    |
        #   \       /
        #    o   __/
        #
        assert all(result["success"] for result in context.data["results"]), (
            "Atomicity not held, aborting.")

        # Assemble
        #
        #       |
        #       v
        #  --->   <----
        #       ^
        #       |
        #
        stagingdir = instance.data.get("stagingDir")
        assert stagingdir, ("Incomplete instance \"%s\": "
                            "Missing reference to staging area." % instance)

        # extra check if stagingDir actually exists and is available

        self.log.debug("Establishing staging directory @ %s" % stagingdir)

        project = io.find_one({"type": "project"},
                              projection={"config.template.publish": True})

        asset = io.find_one({
            "type": "asset",
            "name": ASSET,
            "parent": project["_id"]
        })

        assert all([project, asset]), ("Could not find current project or "
                                       "asset '%s'" % ASSET)

        subset = self.get_subset(asset, instance)

        # get next version
        latest_version = io.find_one(
            {
                "type": "version",
                "parent": subset["_id"]
            }, {"name": True},
            sort=[("name", -1)])

        next_version = 1
        if latest_version is not None:
            next_version += latest_version["name"]

        self.log.info("Verifying version from assumed destination")

        assumed_data = instance.data["assumedTemplateData"]
        assumed_version = assumed_data["version"]
        if assumed_version != next_version:
            raise AttributeError("Assumed version 'v{0:03d}' does not match"
                                 "next version in database "
                                 "('v{1:03d}')".format(assumed_version,
                                                       next_version))

        self.log.debug("Next version: v{0:03d}".format(next_version))

        version_data = self.create_version_data(context, instance)
        version = self.create_version(subset=subset,
                                      version_number=next_version,
                                      locations=[LOCATION],
                                      data=version_data)

        self.log.debug("Creating version ...")
        version_id = io.insert_one(version).inserted_id

        # Write to disk
        #          _
        #         | |
        #        _| |_
        #    ____\   /
        #   |\    \ / \
        #   \ \    v   \
        #    \ \________.
        #     \|________|
        #
        root = api.registered_root()
        template_data = {
            "root": root,
            "project": PROJECT,
            "silo": asset['silo'],
            "asset": ASSET,
            "subset": subset["name"],
            "version": version["name"]
        }

        template_publish = project["config"]["template"]["publish"]

        # Find the representations to transfer amongst the files
        # Each should be a single representation (as such, a single extension)
        representations = []

        for files in instance.data["files"]:

            # Collection
            #   _______
            #  |______|\
            # |      |\|
            # |       ||
            # |       ||
            # |       ||
            # |_______|
            #
            if isinstance(files, list):
                collection = files
                # Assert that each member has identical suffix
                _, ext = os.path.splitext(collection[0])
                assert all(ext == os.path.splitext(name)[1]
                           for name in collection), (
                               "Files had varying suffixes, this is a bug")

                assert not any(os.path.isabs(name) for name in collection)

                template_data["representation"] = ext[1:]

                for fname in collection:

                    src = os.path.join(stagingdir, fname)
                    dst = os.path.join(
                        template_publish.format(**template_data), fname)

                    instance.data["transfers"].append([src, dst])

            else:
                # Single file
                #  _______
                # |      |\
                # |       |
                # |       |
                # |       |
                # |_______|
                #
                fname = files
                assert not os.path.isabs(fname), (
                    "Given file name is a full path")
                _, ext = os.path.splitext(fname)

                template_data["representation"] = ext[1:]

                src = os.path.join(stagingdir, fname)
                dst = template_publish.format(**template_data)

                instance.data["transfers"].append([src, dst])

            representation = {
                "schema": "avalon-core:representation-2.0",
                "type": "representation",
                "parent": version_id,
                "name": ext[1:],
                "data": {},
                "dependencies": instance.data.get("dependencies", "").split(),

                # Imprint shortcut to context
                # for performance reasons.
                "context": {
                    "project": PROJECT,
                    "asset": ASSET,
                    "silo": asset['silo'],
                    "subset": subset["name"],
                    "version": version["name"],
                    "representation": ext[1:]
                }
            }
            representations.append(representation)

        self.log.info("Registering {} items".format(len(representations)))

        io.insert_many(representations)
Пример #11
0
    def extract_workfile(self, instance, staging_scene):
        """Extract a valid workfile for this corresponding publish.

        Args:
            instance (:class:`pyblish.api.Instance`): Instance data.
            staging_scene (str): path of staging scene.

        Returns:
            str: Path to workdir.

        """
        # Since the staging scene was renamed to "scene.xstage" for publish
        # rename the staging scene in the temp stagingdir
        staging_scene = os.path.join(os.path.dirname(staging_scene),
                                     "scene.xstage")

        # Setup the data needed to form a valid work path filename
        anatomy = pype.api.Anatomy()
        project_entity = instance.context.data["projectEntity"]

        data = {
            "root": api.registered_root(),
            "project": {
                "name": project_entity["name"],
                "code": project_entity["data"].get("code", '')
            },
            "asset": instance.data["asset"],
            "hierarchy": pype.api.get_hierarchy(instance.data["asset"]),
            "family": instance.data["family"],
            "task": instance.data.get("task"),
            "subset": instance.data["subset"],
            "version": 1,
            "ext": "zip",
        }

        # Get a valid work filename first with version 1
        file_template = anatomy.templates["work"]["file"]
        anatomy_filled = anatomy.format(data)
        work_path = anatomy_filled["work"]["path"]

        # Get the final work filename with the proper version
        data["version"] = api.last_workfile_with_version(
            os.path.dirname(work_path), file_template, data, [".zip"]
        )[1]

        work_path = anatomy_filled["work"]["path"]
        base_name = os.path.splitext(os.path.basename(work_path))[0]

        staging_work_path = os.path.join(os.path.dirname(staging_scene),
                                         base_name + ".xstage"
                                         )

        # Rename this latest file after the workfile path filename
        os.rename(staging_scene, staging_work_path)

        # Required to set the current directory where the zip will end up
        os.chdir(os.path.dirname(os.path.dirname(staging_scene)))

        # Create the zip file
        zip_filepath = shutil.make_archive(base_name,
                                           "zip",
                                           os.path.dirname(staging_scene)
                                           )
        self.log.info(staging_scene)
        self.log.info(work_path)
        self.log.info(staging_work_path)
        self.log.info(os.path.dirname(os.path.dirname(staging_scene)))
        self.log.info(base_name)
        self.log.info(zip_filepath)

        # Create the work path on disk if it does not exist
        os.makedirs(os.path.dirname(work_path), exist_ok=True)
        shutil.copy(zip_filepath, work_path)

        return work_path
Пример #12
0
    def backwards_compatiblity(self, instance, version):
        """Maintain backwards compatibility with newly published assets

        With the introduction of the database in 2.0, the artist would be
        unable to publish in 2.0 and use the files in 1.0. Therefore, we
        introduce this mechanism which continue to write for 1.0 even
        when writing from the 2.0 pipeline.

        This behaviour is deprecated and is to be removed in a future release.

        """

        import os
        import json
        import errno
        from mindbender import api

        context = instance.context

        # Metadata
        #  _________
        # |         |.key = value
        # |         |
        # |         |
        # |         |
        # |         |
        # |_________|
        #
        stagingdir = instance.data.get("stagingDir")
        fname = os.path.join(stagingdir, ".metadata.json")

        root = os.environ["MINDBENDER_ASSETPATH"]
        instancedir = os.path.join(root, "publish", instance.data["subset"])

        try:
            os.makedirs(instancedir)
        except OSError as e:
            if e.errno != errno.EEXIST:  # Already exists
                self.log.critical("An unexpected error occurred.")
                raise

        latest_version = api.find_latest_version(os.listdir(instancedir)) + 1
        versiondir = os.path.join(instancedir,
                                  api.format_version(latest_version))

        try:
            with open(fname) as f:
                version_1_0 = json.load(f)

        except IOError:
            version_1_0 = dict(
                version,
                **{
                    "schema":
                    "mindbender-core:version-1.0",

                    # Hard-coded during transition
                    "path":
                    versiondir.replace("\\", "/"),
                    "representations":
                    list(),
                    "version":
                    version["name"],

                    # Used to identify family of assets already on disk
                    "families":
                    instance.data.get("families", list()) +
                    [instance.data.get("family")],
                    "time":
                    context.data["time"],
                    "author":
                    context.data["user"],

                    # Record within which silo this asset was made.
                    "silo":
                    os.environ["MINDBENDER_SILO"],

                    # Collected by pyblish-maya
                    "source":
                    os.path.join(
                        "{root}",
                        os.path.relpath(
                            context.data["currentFile"],
                            os.path.join(
                                api.registered_root(),
                                os.environ["MINDBENDER_PROJECT"]))).replace(
                                    "\\", "/"),

                    # Discard database keys
                    "parent":
                    None,
                })

        for filename in instance.data.get("files", list()):
            name, ext = os.path.splitext(filename)
            version_1_0["representations"].append({
                "schema":
                "mindbender-core:representation-1.0",
                "format":
                ext,
                "path":
                os.path.join(
                    "{dirname}",
                    "%s{format}" % name,
                ).replace("\\", "/")
            })

        # Write to disk
        #          _
        #         | |
        #        _| |_
        #    ____\   /
        #   |\    \ / \
        #   \ \    v   \
        #    \ \________.
        #     \|________|
        #
        with open(fname, "w") as f:
            json.dump(version_1_0, f, indent=4)

        self.log.info("Successfully wrote %s." % fname)
Пример #13
0
    def process(self, instance):
        import os
        import errno
        import shutil
        from pprint import pformat

        from avalon import api, io
        from avalon.vendor import filelink

        # Required environment variables
        PROJECT = api.Session["AVALON_PROJECT"]
        ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
        SILO = api.Session["AVALON_SILO"]
        LOCATION = api.Session["AVALON_LOCATION"]

        context = instance.context

        # Atomicity
        #
        # Guarantee atomic publishes - each asset contains
        # an identical set of members.
        #     __
        #    /     o
        #   /       \
        #  |    o    |
        #   \       /
        #    o   __/
        #
        assert all(result["success"] for result in context.data["results"]), (
            "Atomicity not held, aborting.")

        # Assemble
        #
        #       |
        #       v
        #  --->   <----
        #       ^
        #       |
        #
        stagingdir = instance.data.get("stagingDir")
        assert stagingdir, ("Incomplete instance \"%s\": "
                            "Missing reference to staging area." % instance)

        self.log.debug("Establishing staging directory @ %s" % stagingdir)

        project = io.find_one({"type": "project"})
        asset = io.find_one({"name": ASSET})

        assert all([project, asset]), ("Could not find current project or "
                                       "asset '%s'" % ASSET)

        subset = io.find_one({
            "type": "subset",
            "parent": asset["_id"],
            "name": instance.data["subset"]
        })

        if subset is None:
            subset_name = instance.data["subset"]
            self.log.info("Subset '%s' not found, creating.." % subset_name)

            _id = io.insert_one({
                "schema": "avalon-core:subset-2.0",
                "type": "subset",
                "name": subset_name,
                "data": {},
                "parent": asset["_id"]
            }).inserted_id

            subset = io.find_one({"_id": _id})

        latest_version = io.find_one(
            {
                "type": "version",
                "parent": subset["_id"]
            }, {"name": True},
            sort=[("name", -1)])

        next_version = 1
        if latest_version is not None:
            next_version += latest_version["name"]

        self.log.debug("Next version: %i" % next_version)

        version = {
            "schema": "avalon-core:version-2.0",
            "type": "version",
            "parent": subset["_id"],
            "name": next_version,
            "locations": [LOCATION] if LOCATION else [],
            "data": {
                "families": (instance.data.get("families", list()) +
                             [instance.data["family"]]),

                # Enable overriding with current information from instance
                "time":
                instance.data.get("time", context.data["time"]),
                "author":
                instance.data.get("user", context.data["user"]),
                "source":
                instance.data.get("source",
                                  context.data["currentFile"]).replace(
                                      api.registered_root(),
                                      "{root}").replace("\\", "/"),
                "comment":
                context.data.get("comment")
            }
        }

        self.log.debug("Creating version: %s" % pformat(version))
        version_id = io.insert_one(version).inserted_id

        # Write to disk
        #          _
        #         | |
        #        _| |_
        #    ____\   /
        #   |\    \ / \
        #   \ \    v   \
        #    \ \________.
        #     \|________|
        #
        template_data = {
            "root": api.registered_root(),
            "project": PROJECT,
            "silo": SILO,
            "asset": ASSET,
            "subset": subset["name"],
            "version": version["name"],
        }

        template_publish = project["config"]["template"]["publish"]

        if "output" not in instance.data:
            instance.data["output"] = list()

        def copy(src, dst):
            dirname = os.path.dirname(dst)
            try:
                os.makedirs(dirname)
            except OSError as e:
                if e.errno == errno.EEXIST:
                    pass
                else:
                    self.log.critical("An unexpected error occurred.")
                    raise

            try:
                filelink.create(src, dst)
                self.log.info("Linking %s -> %s" % (src, dst))
            except Exception:
                # Revert to a normal copy
                # TODO(marcus): Once filelink is proven stable,
                # improve upon or remove this fallback.
                shutil.copy(src, dst)
                self.log.info("Linking failed, copying %s -> %s" % (src, dst))

        for _ in instance.data["files"]:

            # Collection
            #   _______
            #  |______|\
            # |      |\|
            # |       ||
            # |       ||
            # |       ||
            # |_______|
            #
            if isinstance(_, list):
                collection = _

                # Assert that each member has identical suffix
                _, ext = os.path.splitext(collection[0])
                assert all(ext == os.path.splitext(name)[1]
                           for name in collection), (
                               "Files had varying suffixes, this is a bug")

                template_data["representation"] = ext[1:]

                for fname in collection:
                    src = os.path.join(stagingdir, fname)
                    dst = os.path.join(
                        template_publish.format(**template_data), fname)

                    copy(src, dst)

                    instance.data["output"].append(dst)

            else:
                # Single file
                #  _______
                # |      |\
                # |       |
                # |       |
                # |       |
                # |_______|
                #
                fname = _

                _, ext = os.path.splitext(fname)

                template_data["representation"] = ext[1:]

                src = os.path.join(stagingdir, fname)
                dst = template_publish.format(**template_data)

                copy(src, dst)

                instance.data["output"].append(dst)

            representation = {
                "schema": "avalon-core:representation-2.0",
                "type": "representation",
                "parent": version_id,
                "name": template_data["representation"],
                "data": {},
                "dependencies": instance.data.get("dependencies", "").split(),

                # Imprint shortcut to context for performance reasons.
                "context": {
                    "project": PROJECT,
                    "asset": ASSET,
                    "silo": SILO,
                    "subset": subset["name"],
                    "version": version["name"],
                    "representation": template_data["representation"]
                }
            }

            io.insert_one(representation)

        context.data["published_version"] = str(version_id)

        self.log.info("Successfully integrated \"%s\" to \"%s\"" %
                      (instance, dst))
Пример #14
0
    def process(self, context):
        from maya import cmds
        from avalon import maya, api

        def render_global(attr):
            return cmds.getAttr("defaultRenderGlobals." + attr)

        for layer in cmds.ls(type="renderLayer"):
            if layer.endswith("defaultRenderLayer"):
                continue

            data = {
                "family":
                "Render Layers",
                "families": ["mindbender.renderlayer"],
                "publish":
                cmds.getAttr(layer + ".renderable"),
                "startFrame":
                render_global("startFrame"),
                "endFrame":
                render_global("endFrame"),
                "byFrameStep":
                render_global("byFrameStep"),
                "renderer":
                render_global("currentRenderer"),
                "time":
                context.data["time"],
                "author":
                context.data["user"],
                "source":
                context.data["currentFile"].replace(api.registered_root(),
                                                    "{root}").replace(
                                                        "\\", "/"),
            }

            # Apply each user defined attribute as data
            for attr in cmds.listAttr(layer, userDefined=True) or list():
                try:
                    value = cmds.getAttr(layer + "." + attr)
                except Exception:
                    # Some attributes cannot be read directly,
                    # such as mesh and color attributes. These
                    # are considered non-essential to this
                    # particular publishing pipeline.
                    value = None

                data[attr] = value

            # Include (optional) global settings
            # TODO(marcus): Take into account layer overrides
            try:
                avalon_globals = maya.lsattr("id", "avalon.renderglobals")[0]
            except IndexError:
                pass
            else:
                avalon_globals = maya.read(avalon_globals)
                data["renderGlobals"] = {
                    key: value
                    for key, value in {
                        "Pool": avalon_globals["pool"],
                        "Group": avalon_globals["group"],
                        "Frames": avalon_globals["frames"],
                        "Priority": avalon_globals["priority"],
                    }.items()

                    # Here's the kicker. These globals override defaults
                    # in the submission integrator, but an empty value
                    # means no overriding is made. Otherwise, Frames
                    # would override the default frames set under globals.
                    if value
                }

            instance = context.create_instance(layer)
            instance.data.update(data)
Пример #15
0
    def process(self, instance):
        import os
        import errno
        import shutil
        from pprint import pformat

        from mindbender import api, io

        assert all(
            os.getenv(env)
            for env in ("MINDBENDER__ASSET", "MINDBENDER__PROJECT")), (
                "Missing environment variables\n"
                "This can sometimes happen when an application was launched \n"
                "manually, outside of the pipeline.")

        context = instance.context

        # Atomicity
        #
        # Guarantee atomic publishes - each asset contains
        # an identical set of members.
        #     __
        #    /     o
        #   /       \
        #  |    o    |
        #   \       /
        #    o   __/
        #
        assert all(result["success"] for result in context.data["results"]), (
            "Atomicity not held, aborting.")

        # Assemble
        #
        #       |
        #       v
        #  --->   <----
        #       ^
        #       |
        #
        stagingdir = instance.data.get("stagingDir")
        assert stagingdir, ("Incomplete instance \"%s\": "
                            "Missing reference to staging area." % instance)

        self.log.debug("Establishing staging directory @ %s" % stagingdir)

        project = io.find_one(
            {"_id": io.ObjectId(os.environ["MINDBENDER__PROJECT"])})

        asset = io.find_one(
            {"_id": io.ObjectId(os.environ["MINDBENDER__ASSET"])})

        assert all([project, asset]), "This is bug"

        subset = io.find_one({
            "type": "subset",
            "parent": asset["_id"],
            "name": instance.data["subset"]
        })

        if subset is None:
            self.log.info("Subset '%s' not found, creating.." %
                          instance.data["subset"])

            _id = io.insert_one({
                "schema": "mindbender-core:subset-2.0",
                "type": "subset",
                "name": instance.data["subset"],
                "data": {},
                "parent": asset["_id"]
            }).inserted_id

            subset = io.find_one({"_id": _id})

        all_versions = [0] + [
            version["name"]
            for version in io.find({
                "type": "version",
                "parent": subset["_id"]
            }, {"name": True})
        ]

        next_version = sorted(all_versions)[-1] + 1

        # versiondir = template_versions.format(**template_data)
        self.log.debug("Next version: %i" % next_version)

        version = {
            "schema": "mindbender-core:version-2.0",
            "type": "version",
            "parent": subset["_id"],
            "name": next_version,
            "data": {
                # Used to identify family of assets already on disk
                "families":
                instance.data.get("families", list()) +
                [instance.data.get("family")],
                "time":
                context.data["time"],
                "author":
                context.data["user"],
                "source":
                os.path.join(
                    "{root}",
                    os.path.relpath(context.data["currentFile"],
                                    api.registered_root())).replace("\\", "/"),
            }
        }

        self.backwards_compatiblity(instance, version)

        self.log.debug("Creating version: %s" % pformat(version))
        version_id = io.insert_one(version).inserted_id

        # Write to disk
        #          _
        #         | |
        #        _| |_
        #    ____\   /
        #   |\    \ / \
        #   \ \    v   \
        #    \ \________.
        #     \|________|
        #
        template_data = {
            "root": api.registered_root(),
            "project": os.environ["MINDBENDER_PROJECT"],
            "silo": os.environ["MINDBENDER_SILO"],
            "asset": os.environ["MINDBENDER_ASSET"],
            "subset": subset["name"],
            "version": version["name"],
        }

        template_publish = project["config"]["template"]["publish"]

        for fname in os.listdir(stagingdir):
            name, ext = os.path.splitext(fname)
            template_data["representation"] = ext[1:]

            src = os.path.join(stagingdir, fname)
            dst = template_publish.format(**template_data)

            # Backwards compatibility
            if fname == ".metadata.json":
                dirname = os.path.dirname(dst)
                dst = os.path.join(dirname, ".metadata.json")

            self.log.info("Copying %s -> %s" % (src, dst))

            dirname = os.path.dirname(dst)
            try:
                os.makedirs(dirname)
            except OSError as e:
                if e.errno == errno.EEXIST:
                    pass
                else:
                    self.log.critical("An unexpected error occurred.")
                    raise

            shutil.copy(src, dst)

            representation = {
                "schema": "mindbender-core:representation-2.0",
                "type": "representation",
                "parent": version_id,
                "name": ext[1:],
                "data": {
                    "label": {
                        ".ma": "Maya Ascii",
                        ".source": "Original source file",
                        ".abc": "Alembic"
                    }.get(ext)
                }
            }

            io.insert_one(representation)

        self.log.info("Successfully integrated \"%s\" to \"%s\"" %
                      (instance, dst))
Пример #16
0
    def register(self, instance):

        # Required environment variables
        PROJECT = api.Session["AVALON_PROJECT"]
        ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
        LOCATION = api.Session["AVALON_LOCATION"]

        context = instance.context
        # Atomicity
        #
        # Guarantee atomic publishes - each asset contains
        # an identical set of members.
        #     __
        #    /     o
        #   /       \
        #  |    o    |
        #   \       /
        #    o   __/
        #
        assert all(result["success"] for result in context.data["results"]), (
            "Atomicity not held, aborting.")

        # Assemble
        #
        #       |
        #       v
        #  --->   <----
        #       ^
        #       |
        #
        stagingdir = instance.data.get("stagingDir")
        assert stagingdir, ("Incomplete instance \"%s\": "
                            "Missing reference to staging area." % instance)

        # extra check if stagingDir actually exists and is available

        self.log.debug("Establishing staging directory @ %s" % stagingdir)

        project = io.find_one({"type": "project"})

        asset = io.find_one({
            "type": "asset",
            "name": ASSET,
            "parent": project["_id"]
        })

        assert all([project, asset]), ("Could not find current project or "
                                       "asset '%s'" % ASSET)

        subset = self.get_subset(asset, instance)

        # get next version
        latest_version = io.find_one(
            {
                "type": "version",
                "parent": subset["_id"]
            }, {"name": True},
            sort=[("name", -1)])

        next_version = 1
        if latest_version is not None:
            next_version += latest_version["name"]

        self.log.info("Verifying version from assumed destination")

        assumed_data = instance.data["assumedTemplateData"]
        assumed_version = assumed_data["version"]
        if assumed_version != next_version:
            raise AttributeError("Assumed version 'v{0:03d}' does not match"
                                 "next version in database "
                                 "('v{1:03d}')".format(assumed_version,
                                                       next_version))

        if instance.data.get('version'):
            next_version = int(instance.data.get('version'))

        self.log.debug("Next version: v{0:03d}".format(next_version))

        version_data = self.create_version_data(context, instance)
        version = self.create_version(subset=subset,
                                      version_number=next_version,
                                      locations=[LOCATION],
                                      data=version_data)

        self.log.debug("Creating version ...")
        version_id = io.insert_one(version).inserted_id

        # Write to disk
        #          _
        #         | |
        #        _| |_
        #    ____\   /
        #   |\    \ / \
        #   \ \    v   \
        #    \ \________.
        #     \|________|
        #
        root = api.registered_root()
        hierarchy = ""
        parents = io.find_one({
            "type": 'asset',
            "name": ASSET
        })['data']['parents']
        if parents and len(parents) > 0:
            # hierarchy = os.path.sep.join(hierarchy)
            hierarchy = os.path.join(*parents)

        template_data = {
            "root": root,
            "project": {
                "name": PROJECT,
                "code": project['data']['code']
            },
            "silo": asset['silo'],
            "task": api.Session["AVALON_TASK"],
            "asset": ASSET,
            "family": instance.data['family'],
            "subset": subset["name"],
            "version": int(version["name"]),
            "hierarchy": hierarchy
        }

        # template_publish = project["config"]["template"]["publish"]
        anatomy = instance.context.data['anatomy']

        # Find the representations to transfer amongst the files
        # Each should be a single representation (as such, a single extension)
        representations = []
        destination_list = []

        for files in instance.data["files"]:
            # Collection
            #   _______
            #  |______|\
            # |      |\|
            # |       ||
            # |       ||
            # |       ||
            # |_______|
            #
            if isinstance(files, list):

                src_collections, remainder = clique.assemble(files)
                src_collection = src_collections[0]
                # Assert that each member has identical suffix
                src_head = src_collection.format("{head}")
                src_tail = ext = src_collection.format("{tail}")

                test_dest_files = list()
                for i in [1, 2]:
                    template_data["representation"] = src_tail[1:]
                    template_data["frame"] = src_collection.format(
                        "{padding}") % i
                    anatomy_filled = anatomy.format(template_data)
                    test_dest_files.append(anatomy_filled.render.path)

                dst_collections, remainder = clique.assemble(test_dest_files)
                dst_collection = dst_collections[0]
                dst_head = dst_collection.format("{head}")
                dst_tail = dst_collection.format("{tail}")

                for i in src_collection.indexes:
                    src_padding = src_collection.format("{padding}") % i
                    src_file_name = "{0}{1}{2}".format(src_head, src_padding,
                                                       src_tail)
                    dst_padding = dst_collection.format("{padding}") % i
                    dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail)

                    src = os.path.join(stagingdir, src_file_name)
                    instance.data["transfers"].append([src, dst])

            else:
                # Single file
                #  _______
                # |      |\
                # |       |
                # |       |
                # |       |
                # |_______|
                #

                template_data.pop("frame", None)
                anatomy.pop("frame", None)

                fname = files

                self.log.info("fname: {}".format(fname))

                assert not os.path.isabs(fname), (
                    "Given file name is a full path")
                _, ext = os.path.splitext(fname)

                template_data["representation"] = ext[1:]

                src = os.path.join(stagingdir, fname)

                anatomy_filled = anatomy.format(template_data)
                dst = anatomy_filled.render.path

                instance.data["transfers"].append([src, dst])

            template_data["frame"] = "#####"
            anatomy_filled = anatomy.format(template_data)
            path_to_save = anatomy_filled.render.path
            template = anatomy.render.fullpath
            self.log.debug('ext[1:]: {}'.format(ext[1:]))

            representation = {
                "schema": "pype:representation-2.0",
                "type": "representation",
                "parent": version_id,
                "name": ext[1:],
                "data": {
                    'path': path_to_save,
                    'template': template
                },
                "dependencies": instance.data.get("dependencies", "").split(),

                # Imprint shortcut to context
                # for performance reasons.
                "context": {
                    "root": root,
                    "project": {
                        "name": PROJECT,
                        "code": project['data']['code']
                    },
                    "task": api.Session["AVALON_TASK"],
                    "silo": asset['silo'],
                    "asset": ASSET,
                    "family": instance.data['family'],
                    "subset": subset["name"],
                    "version": int(version["name"]),
                    "hierarchy": hierarchy,
                    "representation": ext[1:]
                }
            }

            destination_list.append(dst)
            instance.data['destination_list'] = destination_list
            representations.append(representation)

        self.log.info("Registering {} items".format(len(representations)))
        io.insert_many(representations)
Пример #17
0
def update(container, version=-1):
    """Update `container` to `version`

    This function relies on a container being referenced. At the time of this
    writing, all assets - models, rigs, animations, shaders - are referenced
    and should pose no problem. But should there be an asset that isn't
    referenced then this function will need to see an update.

    Arguments:
        container (mindbender-core:container-1.0): Container to update,
            from `host.ls()`.
        version (int, optional): Update the container to this version.
            If no version is passed, the latest is assumed.

    """

    node = container["objectName"]

    # Assume asset has been referenced
    reference_node = next((node for node in cmds.sets(node, query=True)
                           if cmds.nodeType(node) == "reference"), None)

    assert reference_node, ("Imported container not supported; "
                            "container must be referenced.")

    representation = io.find_one(
        {"_id": io.ObjectId(container["representation"])})

    assert representation is not None, "This is a bug"

    version_, subset, asset, project = io.parenthood(representation)

    if version == -1:
        new_version = io.find_one({
            "type": "version",
            "parent": subset["_id"]
        },
                                  sort=[("name", -1)])
    else:
        new_version = io.find_one({
            "type": "version",
            "parent": subset["_id"],
            "name": version,
        })

    assert new_version is not None, "This is a bug"

    template_publish = project["config"]["template"]["publish"]
    fname = template_publish.format(
        **{
            "root": api.registered_root(),
            "project": project["name"],
            "asset": asset["name"],
            "silo": asset["silo"],
            "subset": subset["name"],
            "version": new_version["name"],
            "representation": representation["name"],
        })

    file_type = {
        "ma": "mayaAscii",
        "mb": "mayaBinary",
        "abc": "Alembic"
    }.get(representation["name"])

    assert file_type, ("Unsupported representation: %s" % representation)

    assert os.path.exists(fname), "%s does not exist." % fname
    cmds.file(fname, loadReference=reference_node, type=file_type)

    # Update metadata
    cmds.setAttr(container["objectName"] + ".version", new_version["name"])
    cmds.setAttr(container["objectName"] + ".source",
                 new_version["data"]["source"],
                 type="string")
Пример #18
0
    def register(self, instance):
        # Required environment variables
        PROJECT = api.Session["AVALON_PROJECT"]
        ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
        TASK = instance.data.get("task") or api.Session["AVALON_TASK"]
        LOCATION = api.Session["AVALON_LOCATION"]

        context = instance.context
        # Atomicity
        #
        # Guarantee atomic publishes - each asset contains
        # an identical set of members.
        #     __
        #    /     o
        #   /       \
        #  |    o    |
        #   \       /
        #    o   __/
        #
        # for result in context.data["results"]:
        #     if not result["success"]:
        #         self.log.debug(result)
        #         exc_type, exc_value, exc_traceback = result["error_info"]
        #         extracted_traceback = traceback.extract_tb(exc_traceback)[-1]
        #         self.log.debug(
        #             "Error at line {}: \"{}\"".format(
        #                 extracted_traceback[1], result["error"]
        #             )
        #         )
        # assert all(result["success"] for result in context.data["results"]),(
        #     "Atomicity not held, aborting.")

        # Assemble
        #
        #       |
        #       v
        #  --->   <----
        #       ^
        #       |
        #
        stagingdir = instance.data.get("stagingDir")
        if not stagingdir:
            self.log.info('''{} is missing reference to staging
                            directory Will try to get it from
                            representation'''.format(instance))

        # extra check if stagingDir actually exists and is available

        self.log.debug("Establishing staging directory @ %s" % stagingdir)

        # Ensure at least one file is set up for transfer in staging dir.
        repres = instance.data.get("representations", None)
        assert repres, "Instance has no files to transfer"
        assert isinstance(repres, (list, tuple)), (
            "Instance 'files' must be a list, got: {0}".format(repres))

        # FIXME: io is not initialized at this point for shell host
        io.install()
        project = io.find_one({"type": "project"})

        asset = io.find_one({
            "type": "asset",
            "name": ASSET,
            "parent": project["_id"]
        })

        assert all([project, asset]), ("Could not find current project or "
                                       "asset '%s'" % ASSET)

        subset = self.get_subset(asset, instance)

        # get next version
        latest_version = io.find_one(
            {
                "type": "version",
                "parent": subset["_id"]
            }, {"name": True},
            sort=[("name", -1)])

        next_version = 1
        if latest_version is not None:
            next_version += latest_version["name"]

        if instance.data.get('version'):
            next_version = int(instance.data.get('version'))

        # self.log.info("Verifying version from assumed destination")

        # assumed_data = instance.data["assumedTemplateData"]
        # assumed_version = assumed_data["version"]
        # if assumed_version != next_version:
        #     raise AttributeError("Assumed version 'v{0:03d}' does not match"
        #                          "next version in database "
        #                          "('v{1:03d}')".format(assumed_version,
        #                                                next_version))

        self.log.debug("Next version: v{0:03d}".format(next_version))

        version_data = self.create_version_data(context, instance)

        version_data_instance = instance.data.get('versionData')

        if version_data_instance:
            version_data.update(version_data_instance)

        version = self.create_version(subset=subset,
                                      version_number=next_version,
                                      locations=[LOCATION],
                                      data=version_data)

        self.log.debug("Creating version ...")
        existing_version = io.find_one({
            'type': 'version',
            'parent': subset["_id"],
            'name': next_version
        })
        if existing_version is None:
            version_id = io.insert_one(version).inserted_id
        else:
            io.update_many(
                {
                    'type': 'version',
                    'parent': subset["_id"],
                    'name': next_version
                }, {'$set': version})
            version_id = existing_version['_id']
        instance.data['version'] = version['name']

        # Write to disk
        #          _
        #         | |
        #        _| |_
        #    ____\   /
        #   |\    \ / \
        #   \ \    v   \
        #    \ \________.
        #     \|________|
        #
        root = api.registered_root()
        hierarchy = ""
        parents = io.find_one({
            "type": 'asset',
            "name": ASSET
        })['data']['parents']
        if parents and len(parents) > 0:
            # hierarchy = os.path.sep.join(hierarchy)
            hierarchy = os.path.join(*parents)

        anatomy = instance.context.data['anatomy']

        # Find the representations to transfer amongst the files
        # Each should be a single representation (as such, a single extension)
        representations = []
        destination_list = []
        template_name = 'publish'
        if 'transfers' not in instance.data:
            instance.data['transfers'] = []

        for idx, repre in enumerate(instance.data["representations"]):

            # Collection
            #   _______
            #  |______|\
            # |      |\|
            # |       ||
            # |       ||
            # |       ||
            # |_______|
            #
            # create template data for Anatomy
            template_data = {
                "root": root,
                "project": {
                    "name": PROJECT,
                    "code": project['data']['code']
                },
                "silo": asset.get('silo'),
                "task": TASK,
                "asset": ASSET,
                "family": instance.data['family'],
                "subset": subset["name"],
                "version": int(version["name"]),
                "hierarchy": hierarchy
            }

            files = repre['files']
            if repre.get('stagingDir'):
                stagingdir = repre['stagingDir']
            if repre.get('anatomy_template'):
                template_name = repre['anatomy_template']
            template = os.path.normpath(
                anatomy.templates[template_name]["path"])

            sequence_repre = isinstance(files, list)

            if sequence_repre:
                src_collections, remainder = clique.assemble(files)
                self.log.debug("src_tail_collections: {}".format(
                    str(src_collections)))
                src_collection = src_collections[0]

                # Assert that each member has identical suffix
                src_head = src_collection.format("{head}")
                src_tail = src_collection.format("{tail}")

                # fix dst_padding
                valid_files = [x for x in files if src_collection.match(x)]
                padd_len = len(valid_files[0].replace(src_head, "").replace(
                    src_tail, ""))
                src_padding_exp = "%0{}d".format(padd_len)

                test_dest_files = list()
                for i in [1, 2]:
                    template_data["representation"] = repre['ext']
                    template_data["frame"] = src_padding_exp % i
                    anatomy_filled = anatomy.format(template_data)

                    test_dest_files.append(
                        os.path.normpath(
                            anatomy_filled[template_name]["path"]))

                self.log.debug("test_dest_files: {}".format(
                    str(test_dest_files)))

                dst_collections, remainder = clique.assemble(test_dest_files)
                dst_collection = dst_collections[0]
                dst_head = dst_collection.format("{head}")
                dst_tail = dst_collection.format("{tail}")

                index_frame_start = None

                if repre.get("frameStart"):
                    frame_start_padding = len(str(repre.get("frameEnd")))
                    index_frame_start = int(repre.get("frameStart"))

                dst_padding_exp = src_padding_exp
                dst_start_frame = None
                for i in src_collection.indexes:
                    src_padding = src_padding_exp % i

                    # for adding first frame into db
                    if not dst_start_frame:
                        dst_start_frame = src_padding

                    src_file_name = "{0}{1}{2}".format(src_head, src_padding,
                                                       src_tail)

                    dst_padding = src_padding_exp % i

                    if index_frame_start:
                        dst_padding_exp = "%0{}d".format(frame_start_padding)
                        dst_padding = dst_padding_exp % index_frame_start
                        index_frame_start += 1

                    dst = "{0}{1}{2}".format(dst_head, dst_padding,
                                             dst_tail).replace("..", ".")

                    self.log.debug("destination: `{}`".format(dst))
                    src = os.path.join(stagingdir, src_file_name)

                    self.log.debug("source: {}".format(src))
                    instance.data["transfers"].append([src, dst])

                dst = "{0}{1}{2}".format(dst_head, dst_start_frame,
                                         dst_tail).replace("..", ".")
                repre['published_path'] = dst

            else:
                # Single file
                #  _______
                # |      |\
                # |       |
                # |       |
                # |       |
                # |_______|
                #
                template_data.pop("frame", None)
                fname = files
                assert not os.path.isabs(fname), (
                    "Given file name is a full path")

                template_data["representation"] = repre['ext']

                if repre.get("outputName"):
                    template_data["output"] = repre['outputName']

                src = os.path.join(stagingdir, fname)
                anatomy_filled = anatomy.format(template_data)
                dst = os.path.normpath(
                    anatomy_filled[template_name]["path"]).replace("..", ".")

                instance.data["transfers"].append([src, dst])

                repre['published_path'] = dst
                self.log.debug("__ dst: {}".format(dst))

            representation = {
                "schema": "pype:representation-2.0",
                "type": "representation",
                "parent": version_id,
                "name": repre['name'],
                "data": {
                    'path': dst,
                    'template': template
                },
                "dependencies": instance.data.get("dependencies", "").split(),

                # Imprint shortcut to context
                # for performance reasons.
                "context": {
                    "root": root,
                    "project": {
                        "name": PROJECT,
                        "code": project['data']['code']
                    },
                    'task': TASK,
                    "silo": asset.get('silo'),
                    "asset": ASSET,
                    "family": instance.data['family'],
                    "subset": subset["name"],
                    "version": version["name"],
                    "hierarchy": hierarchy,
                    "representation": repre['ext']
                }
            }

            if sequence_repre and repre.get("frameStart"):
                representation['context']['frame'] = repre.get("frameStart")

            self.log.debug("__ representation: {}".format(representation))
            destination_list.append(dst)
            self.log.debug("__ destination_list: {}".format(destination_list))
            instance.data['destination_list'] = destination_list
            representations.append(representation)
            self.log.debug("__ representations: {}".format(representations))

        self.log.debug("__ representations: {}".format(representations))
        for rep in instance.data["representations"]:
            self.log.debug("__ represNAME: {}".format(rep['name']))
            self.log.debug("__ represPATH: {}".format(rep['published_path']))
        io.insert_many(representations)
        # self.log.debug("Representation: {}".format(representations))
        self.log.info("Registered {} items".format(len(representations)))