Ejemplo n.º 1
0
def setup():
    pyblish_maya.setup()
    api.install(maya)
    io.install("test")

    self._tempdir = tempfile.mkdtemp()
    api.register_root(self._tempdir)

    schema.validate(self._config)
    schema.validate(self._inventory)

    inventory.save(name=PROJECT_NAME,
                   config=self._config,
                   inventory=self._inventory)

    project = io.find_one({"type": "project", "name": PROJECT_NAME})
    asset = io.find_one({
        "type": "asset",
        "parent": project["_id"],
        "name": ASSET_NAME
    })

    # Setup environment
    os.environ["MINDBENDER__PROJECT"] = str(project["_id"])
    os.environ["MINDBENDER__ASSET"] = str(asset["_id"])
    os.environ["MINDBENDER_PROJECT"] = PROJECT_NAME
    os.environ["MINDBENDER_ASSET"] = asset["name"]
    os.environ["MINDBENDER_ASSETPATH"] = (
        "{root}/{project}/{silo}/{asset}".format(root=api.registered_root(),
                                                 project=PROJECT_NAME,
                                                 asset=ASSET_NAME,
                                                 silo="assets"))
    os.environ["MINDBENDER_SILO"] = "assets"
Ejemplo n.º 2
0
def _write(root, name, data):
    fname = os.path.join(root, ".%s.toml" % name)

    try:
        with open(fname, "w") as f:
            toml.dump(data, f)
            schema.validate(data)
    except IOError:
        raise

    return data
Ejemplo n.º 3
0
def _save_config_1_0(project_name, data):
    document = io.find_one({"type": "project", "name": project_name})
    config = document["config"]

    config["apps"] = data.get("apps", [])
    config["tasks"] = data.get("tasks", [])
    config["template"].update(data.get("template", {}))

    schema.validate(document)

    io.save(document)
Ejemplo n.º 4
0
def test_cli_load():
    """Loading from command-line works well"""

    assert 0 == subprocess.call(
        [sys.executable, "-u", "-m", "mindbender.inventory", "--load"],
        cwd=self._tempdir)

    with open(os.path.join(self._tempdir, ".inventory.toml")) as f:
        inventory_ = toml.load(f)

    with open(os.path.join(self._tempdir, ".config.toml")) as f:
        config_ = toml.load(f)

    schema.validate(inventory_)
    schema.validate(config_)
Ejemplo n.º 5
0
def save(name, config, inventory):
    """Write config and inventory to database from `root`"""
    config = copy.deepcopy(config)
    inventory = copy.deepcopy(inventory)

    handlers = {
        "mindbender-core:inventory-1.0": _save_inventory_1_0,
        "mindbender-core:config-1.0": _save_config_1_0
    }

    for data in (inventory, config):
        try:
            schema_ = data.get("schema")
            handler = handlers[schema_]

        except KeyError:
            raise schema.SchemaError("ERROR: Missing handler for %s)" %
                                     (schema))

        else:
            schema.validate(data)
            print("Saving %s.." % schema_)
            handler(name, data)
Ejemplo n.º 6
0
def test_load():
    """Loading produces compatible results for saving"""

    inventory.save(name=self._project["name"],
                   config=self._config,
                   inventory=self._inventory)

    _config, _inventory = inventory.load(PROJECT_NAME)
    schema.validate(_config)
    schema.validate(_inventory)

    inventory.save(name=self._project["name"],
                   config=_config,
                   inventory=_inventory)

    _config, _inventory = inventory.load(PROJECT_NAME)
    schema.validate(_config)
    schema.validate(_inventory)
Ejemplo n.º 7
0
def _save_inventory_1_0(project_name, data):
    metadata = {}
    for key, value in data.items():
        if not isinstance(value, list):
            metadata[key] = data.pop(key)

    document = io.find_one({"type": "project", "name": project_name})

    if document is None:
        print("'%s' not found, creating.." % project_name)
        _project = {
            "schema": "mindbender-core:project-2.0",
            "type": "project",
            "name": project_name,
            "data": dict(),
            "config": {
                "template": {},
                "tasks": [],
                "apps": [],
                "copy": {}
            },
            "parent": None,
        }

        schema.validate(_project)
        _id = io.insert_one(_project).inserted_id

        document = io.find_one({"_id": _id})

    print("Updating project data..")
    for key, value in metadata.items():
        document["data"][key] = value

    io.save(document)

    print("Updating assets..")
    added = list()
    updated = list()
    missing = list()
    for silo, assets in data.items():
        for asset in assets:
            asset_doc = io.find_one({
                "name": asset["name"],
                "parent": document["_id"]
            })

            if asset_doc is None:
                asset["silo"] = silo
                asset["data"] = dict(asset)
                missing.append(asset)
                continue

            for key, value in asset.items():
                asset_doc["data"][key] = value

                if key not in asset_doc["data"]:
                    added.append("%s.%s: %s" % (asset["name"], key, value))

                elif asset_doc["data"][key] != value:
                    updated.append(
                        "%s.%s: %s -> %s" %
                        (asset["name"], key, asset_doc["data"][key], value))

            io.save(asset_doc)

    for data in missing:
        print("+ added %s" % data["name"])

        asset = {
            "schema": "mindbender-core:asset-2.0",
            "name": data.pop("name"),
            "silo": data.pop("silo"),
            "parent": document["_id"],
            "type": "asset",
            "data": data
        }

        schema.validate(asset)
        io.insert_one(asset)

    else:
        print("| nothing missing")

    _report(added, missing)
Ejemplo n.º 8
0
def extract(root, silo_parent=None):
    """Parse a given project and produce a JSON file of its contents

    Arguments:
        root (str): Absolute path to a file-based project

    """
    def _dirs(path):
        try:
            for base, dirs, files in os.walk(path):
                return list(os.path.join(base, dirname) for dirname in dirs)
        except IOError:
            return list()

        return list()

    name = os.path.basename(root)

    print("Generating project.json..")

    project_obj = {
        "schema": "mindbender-core:project-2.0",
        "name": name,
        "type": "project",
        "data": {},
        "config": {
            "schema": "mindbender-core:config-1.0",
            "apps": copy.deepcopy(DEFAULTS["config"]["apps"]),
            "tasks": copy.deepcopy(DEFAULTS["config"]["tasks"]),
            "template": copy.deepcopy(DEFAULTS["config"]["template"]),
            "copy": {},
        },
        "children": list(),
    }

    # Update template with silo_parent directory of silo
    if silo_parent:
        silo_parent = silo_parent.strip("\\/").rstrip("\\/")

        template = project_obj["config"]["template"]
        for key, value in template.items():
            template[key] = value.replace("{silo}", silo_parent + "/{silo}")

    # Parse .bat file for environment variables
    project_obj.update(_parse_bat(root))

    for silo in ("assets", "film"):
        for asset in _dirs(os.path.join(root, silo_parent or "", silo)):
            asset_obj = {
                "schema": "mindbender-core:asset-2.0",
                "type": "asset",
                "name": os.path.basename(asset),
                "silo": silo,
                "data": {},
                "children": list(),
            }

            schema.validate(asset_obj)

            asset_obj.update(_parse_bat(asset))

            project_obj["children"].append(asset_obj)

            for subset in _dirs(os.path.join(asset, "publish")):
                subset_obj = {
                    "schema": "mindbender-core:subset-2.0",
                    "name": os.path.basename(subset),
                    "type": "subset",
                    "data": {},
                    "children": list(),
                }

                schema.validate(subset_obj)

                asset_obj["children"].append(subset_obj)

                for version in _dirs(subset):
                    try:
                        with open(os.path.join(version,
                                               ".metadata.json")) as f:
                            metadata = json.load(f)
                    except IOError:
                        continue

                    try:
                        number = int(os.path.basename(version)[1:])
                    except ValueError:
                        # Directory not compatible with pipeline
                        # E.g. 001_mySpecialVersion
                        continue

                    try:
                        version_obj = {
                            "schema": "mindbender-core:version-2.0",
                            "type": "version",
                            "name": number,
                            "data": {
                                "families": metadata["families"],
                                "author": metadata["author"],
                                "source": metadata["source"],
                                "time": metadata["time"],
                            },
                            "children": list(),
                        }
                    except KeyError:
                        # Metadata not compatible with pipeline
                        continue

                    schema.validate(version_obj)

                    subset_obj["children"].append(version_obj)

                    for representation in metadata["representations"]:
                        representation_obj = {
                            "schema": "mindbender-core:representation-2.0",
                            "type": "representation",
                            "name": representation["format"].strip("."),
                            "data": {
                                "label": representation["format"].strip("."),
                            },
                            "children": list(),
                        }

                        schema.validate(representation_obj)

                        version_obj["children"].append(representation_obj)

    with open(os.path.join(root, "project.json"), "w") as f:
        json.dump(project_obj, f, indent=4)

    print("Successfully generated %s" % os.path.join(root, "project.json"))
Ejemplo n.º 9
0
def test_save():
    """Saving works well under normal circumstances"""
    config_ = {
        "schema": "mindbender-core:config-1.0",
        "apps": [
            {
                "name": "app1"
            },
            {
                "name": "app2"
            },
        ],
        "tasks": [
            {
                "name": "task1"
            },
            {
                "name": "task2"
            },
        ],
        "template": {
            "work":
            "{root}/{project}/{silo}/{asset}/work/"
            "{task}/{user}/{app}",
            "publish":
            "{root}/{project}/{silo}/{asset}/publish/"
            "{subset}/v{version:0>3}/{subset}.{representation}"
        },
        "copy": {}
    }

    inventory_ = {
        "schema": "mindbender-core:inventory-1.0",
        "assets": [{
            "name": "asset1"
        }, {
            "name": "asset2"
        }],
        "film": [
            {
                "name": "shot1"
            },
            {
                "name": "shot2"
            },
        ]
    }

    schema.validate(config_)
    schema.validate(inventory_)

    _id = io.insert_one(self._project).inserted_id
    project = io.find_one({"_id": _id})

    assert_equals(project["config"], self._project["config"])

    inventory.save(name=self._project["name"],
                   config=config_,
                   inventory=inventory_)

    project = io.find_one({"_id": _id})
    config_.pop("schema")
    assert_equals(project["config"], config_)

    for asset in inventory_["assets"]:
        assert io.find_one({
            "type": "asset",
            "parent": project["_id"],
            "name": asset["name"]
        })