Esempio n. 1
0
def setup():
    pyblish_maya.setup()
    api.install(maya)
    io.install("test")

    self._tempdir = tempfile.mkdtemp()
    api.register_root(self._tempdir)

    schema.validate(self._config)
    schema.validate(self._inventory)

    inventory.save(name=PROJECT_NAME,
                   config=self._config,
                   inventory=self._inventory)

    project = io.find_one({"type": "project", "name": PROJECT_NAME})
    asset = io.find_one({
        "type": "asset",
        "parent": project["_id"],
        "name": ASSET_NAME
    })

    # Setup environment
    os.environ["MINDBENDER__PROJECT"] = str(project["_id"])
    os.environ["MINDBENDER__ASSET"] = str(asset["_id"])
    os.environ["MINDBENDER_PROJECT"] = PROJECT_NAME
    os.environ["MINDBENDER_ASSET"] = asset["name"]
    os.environ["MINDBENDER_ASSETPATH"] = (
        "{root}/{project}/{silo}/{asset}".format(root=api.registered_root(),
                                                 project=PROJECT_NAME,
                                                 asset=ASSET_NAME,
                                                 silo="assets"))
    os.environ["MINDBENDER_SILO"] = "assets"
Esempio n. 2
0
def test_alembic_export():
    """Exporting Alembic works"""

    cube, generator = cmds.polyCube(name="myCube_GEO")
    transform = cmds.ls(selection=True)

    visibility_keys = [(10, True), (20, False), (30, True)]

    for time, value in visibility_keys:
        cmds.setKeyframe(transform,
                         time=time,
                         attribute="visibility",
                         value=value)

    maya.create("animationDefault",
                family="mindbender.animation",
                options={"useSelection": True})

    cmds.file(save=True)

    publish()

    # Import and test result
    cmds.file(new=True, force=True)

    asset = io.find_one({"type": "asset", "name": ASSET_NAME})

    subset = io.find_one({
        "parent": asset["_id"],
        "type": "subset",
        "name": "animationDefault"
    })

    version = io.find_one({
        "parent": subset["_id"],
        "type": "version",
        "name": 1
    })

    assert version

    representation = io.find_one({
        "parent": version["_id"],
        "type": "representation",
        "name": "abc"
    })

    assert representation is not None

    nodes = maya.load(representation["_id"])
    print("Nodes: %s" % nodes)
    cube = cmds.ls(nodes, type="mesh")
    transform = cmds.listRelatives(cube, parent=True)[0]

    for time, value in visibility_keys:
        cmds.currentTime(time, edit=True)
        assert cmds.getAttr(transform + ".visibility") == value, (
            "Cached visibility did not match original visibility")
Esempio n. 3
0
def load(name):
    """Write config and inventory to `root` from database

    Arguments:
        name (str): Project name

    """

    print("Loading .inventory.toml and .config.toml..")

    project = io.find_one({"type": "project", "name": name})

    if project is None:
        print("No project found, loading defaults..")
        config = {}
        inventory = DEFAULTS["inventory"]

    else:
        config = project["config"]
        inventory = {"schema": "mindbender-core:inventory-1.0"}
        for asset in io.find({"type": "asset", "parent": project["_id"]}):
            silo = asset["silo"]
            data = asset["data"]

            if silo not in inventory:
                inventory[silo] = list()

            inventory[silo].append(dict(data, **{"name": asset["name"]}))

            for key, value in project["data"].items():
                inventory[key] = value

    config = dict(DEFAULTS["config"], **config)

    return config, inventory
Esempio n. 4
0
def test_update():
    """Updating works"""

    transform, generator = cmds.polyCube(name="body_PLY")
    group = cmds.group(transform, name="ROOT")

    cmds.select(group, replace=True)
    maya.create("modelDefault",
                family="mindbender.model",
                options={"useSelection": True})

    # Comply with save validator
    cmds.file(save=True)

    publish()
    publish()
    publish()  # Version 3

    cmds.file(new=True, force=True)

    asset = io.find_one({"type": "asset", "name": ASSET_NAME})

    subset = io.find_one({
        "parent": asset["_id"],
        "type": "subset",
        "name": "modelDefault"
    })

    version = io.find_one({
        "parent": subset["_id"],
        "type": "version",
        "name": 2
    })

    assert version

    representation = io.find_one({
        "parent": version["_id"],
        "type": "representation",
        "name": "ma"
    })

    maya.load(representation["_id"])
    container = next(maya.ls())
    maya.update(container, 3)
Esempio n. 5
0
def _save_config_1_0(project_name, data):
    document = io.find_one({"type": "project", "name": project_name})
    config = document["config"]

    config["apps"] = data.get("apps", [])
    config["tasks"] = data.get("tasks", [])
    config["template"].update(data.get("template", {}))

    schema.validate(document)

    io.save(document)
Esempio n. 6
0
def test_save_project_data():
    """The inventory can take (plain) project data as well"""

    inventory_ = copy.deepcopy(self._inventory)
    inventory_["key"] = "value"

    inventory.save(name=self._project["name"],
                   config=self._config,
                   inventory=inventory_)

    project = io.find_one({"type": "project", "name": PROJECT_NAME})
    assert_equals(project["data"]["key"], "value")
Esempio n. 7
0
def test_modeling():
    """Modeling workflow is functional"""
    transform, generator = cmds.polyCube(name="body_PLY")
    group = cmds.group(transform, name="ROOT")

    cmds.select(group, replace=True)
    maya.create("modelDefault",
                family="mindbender.model",
                options={"useSelection": True})

    # Comply with save validator
    cmds.file(save=True)

    publish()

    asset = io.find_one({"type": "asset", "name": ASSET_NAME})

    assert asset

    subset = io.find_one({
        "parent": asset["_id"],
        "type": "subset",
        "name": "modelDefault"
    })

    assert subset

    version = io.find_one({
        "parent": subset["_id"],
        "type": "version",
    })

    assert version

    assert io.find_one({
        "parent": version["_id"],
        "type": "representation",
        "name": "ma"
    }) is not None
Esempio n. 8
0
def test_save_asset_data():
    """The inventory can take asset data as well"""

    inventory_ = copy.deepcopy(self._inventory)

    asset = inventory_["assets"][0]
    asset.update({"key": "value"})

    inventory.save(name=self._project["name"],
                   config=self._config,
                   inventory=inventory_)

    asset = io.find_one({"type": "asset", "name": asset["name"]})
    print(asset)
    assert_equals(asset["data"]["key"], "value")
Esempio n. 9
0
    def _upload_recursive(parent, children):
        for child in children:
            grandchildren = child.pop("children")
            child["parent"] = parent

            document = io.find_one(
                {key: child[key]
                 for key in ("parent", "type", "name")})

            if document is None:
                _id = io.insert_one(child).inserted_id
                print("+ {0[type]}: '{0[name]}'".format(child))
            elif overwrite:
                _id = document["_id"]
                document.update(child)
                io.save(document)
                print("~ {0[type]}: '{0[name]}'".format(child))
            else:
                _id = document["_id"]
                print("| {0[type]}: '{0[name]}'..".format(child))

            _upload_recursive(_id, grandchildren)
Esempio n. 10
0
def _save_inventory_1_0(project_name, data):
    metadata = {}
    for key, value in data.items():
        if not isinstance(value, list):
            metadata[key] = data.pop(key)

    document = io.find_one({"type": "project", "name": project_name})

    if document is None:
        print("'%s' not found, creating.." % project_name)
        _project = {
            "schema": "mindbender-core:project-2.0",
            "type": "project",
            "name": project_name,
            "data": dict(),
            "config": {
                "template": {},
                "tasks": [],
                "apps": [],
                "copy": {}
            },
            "parent": None,
        }

        schema.validate(_project)
        _id = io.insert_one(_project).inserted_id

        document = io.find_one({"_id": _id})

    print("Updating project data..")
    for key, value in metadata.items():
        document["data"][key] = value

    io.save(document)

    print("Updating assets..")
    added = list()
    updated = list()
    missing = list()
    for silo, assets in data.items():
        for asset in assets:
            asset_doc = io.find_one({
                "name": asset["name"],
                "parent": document["_id"]
            })

            if asset_doc is None:
                asset["silo"] = silo
                asset["data"] = dict(asset)
                missing.append(asset)
                continue

            for key, value in asset.items():
                asset_doc["data"][key] = value

                if key not in asset_doc["data"]:
                    added.append("%s.%s: %s" % (asset["name"], key, value))

                elif asset_doc["data"][key] != value:
                    updated.append(
                        "%s.%s: %s -> %s" %
                        (asset["name"], key, asset_doc["data"][key], value))

            io.save(asset_doc)

    for data in missing:
        print("+ added %s" % data["name"])

        asset = {
            "schema": "mindbender-core:asset-2.0",
            "name": data.pop("name"),
            "silo": data.pop("silo"),
            "parent": document["_id"],
            "type": "asset",
            "data": data
        }

        schema.validate(asset)
        io.insert_one(asset)

    else:
        print("| nothing missing")

    _report(added, missing)
Esempio n. 11
0
def test_save():
    """Saving works well under normal circumstances"""
    config_ = {
        "schema": "mindbender-core:config-1.0",
        "apps": [
            {
                "name": "app1"
            },
            {
                "name": "app2"
            },
        ],
        "tasks": [
            {
                "name": "task1"
            },
            {
                "name": "task2"
            },
        ],
        "template": {
            "work":
            "{root}/{project}/{silo}/{asset}/work/"
            "{task}/{user}/{app}",
            "publish":
            "{root}/{project}/{silo}/{asset}/publish/"
            "{subset}/v{version:0>3}/{subset}.{representation}"
        },
        "copy": {}
    }

    inventory_ = {
        "schema": "mindbender-core:inventory-1.0",
        "assets": [{
            "name": "asset1"
        }, {
            "name": "asset2"
        }],
        "film": [
            {
                "name": "shot1"
            },
            {
                "name": "shot2"
            },
        ]
    }

    schema.validate(config_)
    schema.validate(inventory_)

    _id = io.insert_one(self._project).inserted_id
    project = io.find_one({"_id": _id})

    assert_equals(project["config"], self._project["config"])

    inventory.save(name=self._project["name"],
                   config=config_,
                   inventory=inventory_)

    project = io.find_one({"_id": _id})
    config_.pop("schema")
    assert_equals(project["config"], config_)

    for asset in inventory_["assets"]:
        assert io.find_one({
            "type": "asset",
            "parent": project["_id"],
            "name": asset["name"]
        })
Esempio n. 12
0
    def process(self, instance):
        import os
        import errno
        import shutil
        from pprint import pformat

        from mindbender import api, io

        assert all(
            os.getenv(env)
            for env in ("MINDBENDER__ASSET", "MINDBENDER__PROJECT")), (
                "Missing environment variables\n"
                "This can sometimes happen when an application was launched \n"
                "manually, outside of the pipeline.")

        context = instance.context

        # Atomicity
        #
        # Guarantee atomic publishes - each asset contains
        # an identical set of members.
        #     __
        #    /     o
        #   /       \
        #  |    o    |
        #   \       /
        #    o   __/
        #
        assert all(result["success"] for result in context.data["results"]), (
            "Atomicity not held, aborting.")

        # Assemble
        #
        #       |
        #       v
        #  --->   <----
        #       ^
        #       |
        #
        stagingdir = instance.data.get("stagingDir")
        assert stagingdir, ("Incomplete instance \"%s\": "
                            "Missing reference to staging area." % instance)

        self.log.debug("Establishing staging directory @ %s" % stagingdir)

        project = io.find_one(
            {"_id": io.ObjectId(os.environ["MINDBENDER__PROJECT"])})

        asset = io.find_one(
            {"_id": io.ObjectId(os.environ["MINDBENDER__ASSET"])})

        assert all([project, asset]), "This is bug"

        subset = io.find_one({
            "type": "subset",
            "parent": asset["_id"],
            "name": instance.data["subset"]
        })

        if subset is None:
            self.log.info("Subset '%s' not found, creating.." %
                          instance.data["subset"])

            _id = io.insert_one({
                "schema": "mindbender-core:subset-2.0",
                "type": "subset",
                "name": instance.data["subset"],
                "data": {},
                "parent": asset["_id"]
            }).inserted_id

            subset = io.find_one({"_id": _id})

        all_versions = [0] + [
            version["name"]
            for version in io.find({
                "type": "version",
                "parent": subset["_id"]
            }, {"name": True})
        ]

        next_version = sorted(all_versions)[-1] + 1

        # versiondir = template_versions.format(**template_data)
        self.log.debug("Next version: %i" % next_version)

        version = {
            "schema": "mindbender-core:version-2.0",
            "type": "version",
            "parent": subset["_id"],
            "name": next_version,
            "data": {
                # Used to identify family of assets already on disk
                "families":
                instance.data.get("families", list()) +
                [instance.data.get("family")],
                "time":
                context.data["time"],
                "author":
                context.data["user"],
                "source":
                os.path.join(
                    "{root}",
                    os.path.relpath(context.data["currentFile"],
                                    api.registered_root())).replace("\\", "/"),
            }
        }

        self.backwards_compatiblity(instance, version)

        self.log.debug("Creating version: %s" % pformat(version))
        version_id = io.insert_one(version).inserted_id

        # Write to disk
        #          _
        #         | |
        #        _| |_
        #    ____\   /
        #   |\    \ / \
        #   \ \    v   \
        #    \ \________.
        #     \|________|
        #
        template_data = {
            "root": api.registered_root(),
            "project": os.environ["MINDBENDER_PROJECT"],
            "silo": os.environ["MINDBENDER_SILO"],
            "asset": os.environ["MINDBENDER_ASSET"],
            "subset": subset["name"],
            "version": version["name"],
        }

        template_publish = project["config"]["template"]["publish"]

        for fname in os.listdir(stagingdir):
            name, ext = os.path.splitext(fname)
            template_data["representation"] = ext[1:]

            src = os.path.join(stagingdir, fname)
            dst = template_publish.format(**template_data)

            # Backwards compatibility
            if fname == ".metadata.json":
                dirname = os.path.dirname(dst)
                dst = os.path.join(dirname, ".metadata.json")

            self.log.info("Copying %s -> %s" % (src, dst))

            dirname = os.path.dirname(dst)
            try:
                os.makedirs(dirname)
            except OSError as e:
                if e.errno == errno.EEXIST:
                    pass
                else:
                    self.log.critical("An unexpected error occurred.")
                    raise

            shutil.copy(src, dst)

            representation = {
                "schema": "mindbender-core:representation-2.0",
                "type": "representation",
                "parent": version_id,
                "name": ext[1:],
                "data": {
                    "label": {
                        ".ma": "Maya Ascii",
                        ".source": "Original source file",
                        ".abc": "Alembic"
                    }.get(ext)
                }
            }

            io.insert_one(representation)

        self.log.info("Successfully integrated \"%s\" to \"%s\"" %
                      (instance, dst))