コード例 #1
0
ファイル: integrate_new.py プロジェクト: 3dzayn/pype
    def get_subset(self, asset, instance):
        subset_name = instance.data["subset"]
        subset = io.find_one({
            "type": "subset",
            "parent": asset["_id"],
            "name": subset_name
        })

        if subset is None:
            self.log.info("Subset '%s' not found, creating ..." % subset_name)
            self.log.debug("families.  %s" % instance.data.get('families'))
            self.log.debug("families.  %s" %
                           type(instance.data.get('families')))

            family = instance.data.get("family")
            families = []
            if family:
                families.append(family)

            for _family in (instance.data.get("families") or []):
                if _family not in families:
                    families.append(_family)

            _id = io.insert_one({
                "schema": "openpype:subset-3.0",
                "type": "subset",
                "name": subset_name,
                "data": {
                    "families": families
                },
                "parent": asset["_id"]
            }).inserted_id

            subset = io.find_one({"_id": _id})

        # add group if available
        if instance.data.get("subsetGroup"):
            io.update_many(
                {
                    'type': 'subset',
                    '_id': io.ObjectId(subset["_id"])
                }, {
                    '$set': {
                        'data.subsetGroup': instance.data.get('subsetGroup')
                    }
                })

        # Update families on subset.
        families = [instance.data["family"]]
        families.extend(instance.data.get("families", []))
        io.update_many({
            "type": "subset",
            "_id": io.ObjectId(subset["_id"])
        }, {"$set": {
            "data.families": families
        }})

        return subset
コード例 #2
0
    def update_dependent(self, instance, version_id):

        version_id = str(version_id)
        field = "data.dependents." + version_id

        for version_id_, data in instance.data["dependencies"].items():
            filter_ = {"_id": io.ObjectId(version_id_)}
            update = {"$set": {field: {"count": data["count"]}}}
            io.update_many(filter_, update)
コード例 #3
0
    def import_to_avalon(self, input_data, parent=None):
        for name in input_data:
            self.log.info("input_data[name]: {}".format(input_data[name]))
            entity_data = input_data[name]
            entity_type = entity_data["entity_type"]

            data = {}

            data["inputs"] = entity_data.get("inputs", [])
            data["entityType"] = entity_type

            # Custom attributes.
            for k, val in entity_data.get("custom_attributes", {}).items():
                data[k] = val

            # Tasks.
            tasks = entity_data.get("tasks", [])
            if tasks is not None or len(tasks) > 0:
                data["tasks"] = tasks
            parents = []
            visualParent = None
            # do not store project"s id as visualParent (silo asset)
            if self.project is not None:
                if self.project["_id"] != parent["_id"]:
                    visualParent = parent["_id"]
                    parents.extend(parent.get("data", {}).get("parents", []))
                    parents.append(parent["name"])
            data["visualParent"] = visualParent
            data["parents"] = parents

            # Process project
            if entity_type.lower() == "project":
                entity = io.find_one({"type": "project"})
                # TODO: should be in validator?
                assert (entity is not None), "Did not find project in DB"

                # get data from already existing project
                for key, value in entity.get("data", {}).items():
                    data[key] = value

                self.project = entity
            # Raise error if project or parent are not set
            elif self.project is None or parent is None:
                raise AssertionError("Collected items are not in right order!")
            # Else process assset
            else:
                entity = io.find_one({"type": "asset", "name": name})
                # Create entity if doesn"t exist
                if entity is None:
                    entity = self.create_avalon_asset(name, data)

            # Update entity data with input data
            io.update_many({"_id": entity["_id"]}, {"$set": {"data": data}})

            if "childs" in entity_data:
                self.import_to_avalon(entity_data["childs"], entity)
コード例 #4
0
 def save_range(self):
     asset = self.assets.currentText()
     update = {
         "data.edit_in": self.start.value(),
         "data.edit_out": self.end.value(),
         "data.handles": self.handles.value(),
     }
     io.update_many({
         "type": "asset",
         "name": asset
     },
                    update={"$set": update})
コード例 #5
0
    def process(self, instance):

        context = instance.context

        if not all(result["success"] for result in context.data["results"]):
            self.log.warning("Atomicity not held, aborting.")
            return

        # Integrate representations' to database
        self.log.info("Integrating representations to database ...")

        asset = context.data["assetDoc"]
        subset, version, representations = instance.data["toDatabase"]

        # Write subset if not exists
        filter = {"parent": asset["_id"], "name": subset["name"]}
        if io.find_one(filter) is None:
            io.insert_one(subset)

        # Write version if not exists
        filter = {"parent": subset["_id"], "name": version["name"]}
        existed_version = io.find_one(filter)
        if existed_version is None:
            # Write version and representations to database
            version_id = self.write_database(instance, version,
                                             representations)
            instance.data["insertedVersionId"] = version_id

            # Update dependent
            self.update_dependent(instance, version_id)

        else:
            self.log.info("Version existed, representation file has been "
                          "overwritten.")
            # Update version document "data.time"
            filter_ = {"_id": existed_version["_id"]}
            update = {"$set": {"data.time": context.data["time"]}}
            io.update_many(filter_, update)
            # Update representation documents "data"
            for representation in representations:
                filter_ = {
                    "name": representation["name"],
                    "parent": existed_version["_id"],
                }
                update = {"$set": {"data": representation["data"]}}
                io.update_many(filter_, update)
コード例 #6
0
ファイル: integrate_new.py プロジェクト: Yowza-Animation/pype
    def get_subset(self, asset, instance):
        subset_name = instance.data["subset"]
        subset = io.find_one({
            "type": "subset",
            "parent": asset["_id"],
            "name": subset_name
        })

        if subset is None:
            self.log.info("Subset '%s' not found, creating.." % subset_name)
            self.log.debug("families.  %s" % instance.data.get('families'))
            self.log.debug("families.  %s" %
                           type(instance.data.get('families')))

            _id = io.insert_one({
                "schema": "pype:subset-3.0",
                "type": "subset",
                "name": subset_name,
                "data": {
                    "families": instance.data.get("families", [])
                },
                "parent": asset["_id"]
            }).inserted_id

            subset = io.find_one({"_id": _id})

        # add group if available
        if instance.data.get("subsetGroup"):
            io.update_many(
                {
                    'type': 'subset',
                    '_id': io.ObjectId(subset["_id"])
                }, {
                    '$set': {
                        'data.subsetGroup': instance.data.get('subsetGroup')
                    }
                })

        return subset
コード例 #7
0
ファイル: lib.py プロジェクト: jonike/pype
def get_hierarchy(asset_name=None):
    """
    Obtain asset hierarchy path string from mongo db

    Returns:
        string: asset hierarchy path

    """
    if not asset_name:
        asset_name = io.Session.get("AVALON_ASSET", os.environ["AVALON_ASSET"])

    asset_entity = io.find_one({"type": 'asset', "name": asset_name})

    not_set = "PARENTS_NOT_SET"
    entity_parents = asset_entity.get("data", {}).get("parents", not_set)

    # If entity already have parents then just return joined
    if entity_parents != not_set:
        return "/".join(entity_parents)

    # Else query parents through visualParents and store result to entity
    hierarchy_items = []
    entity = asset_entity
    while True:
        parent_id = entity.get("data", {}).get("visualParent")
        if not parent_id:
            break
        entity = io.find_one({"_id": parent_id})
        hierarchy_items.append(entity["name"])

    # Add parents to entity data for next query
    entity_data = asset_entity.get("data", {})
    entity_data["parents"] = hierarchy_items
    io.update_many({"_id": asset_entity["_id"]},
                   {"$set": {
                       "data": entity_data
                   }})

    return "/".join(hierarchy_items)
コード例 #8
0
    def write_database(self):
        # Check database if still being latest
        for item in self._root_item.children():
            subset_id = item["subsetId"]
            latest_repr = item["representation"]
            latest = io.find_one({
                "type": "version",
                "parent": subset_id
            },
                                 sort=[("name", -1)],
                                 projection={"name": True})
            if not latest_repr["parent"] == latest["_id"]:
                # Found new version, should do refresh
                return

        for item in self._root_item.children():
            if not item["isLatest"]:
                continue

            latest_repr = item["representation"]
            protected = set()

            for node in item.children():
                is_locked = node["isLocked"]
                set_locked = node["setLocked"]

                if set_locked is None:
                    if is_locked:
                        protected.add(node["avalonId"])
                elif set_locked:
                    # Set to Lock
                    protected.add(node["avalonId"])

            io.update_many({"_id": latest_repr["_id"]},
                           {"$set": {
                               "data.modelProtected": list(protected)
                           }})
コード例 #9
0
    def process(self, instance):

        self.transfers = dict(packages=list(), files=list(), hardlinks=list())

        # Check Delegation
        #
        # Contractor completed long-run publish process
        delegated = instance.context.data.get("contractorAccepted")
        # Is delegating long-run publish process
        if instance.data.get("useContractor") and not delegated:
            return

        # Assemble data and create version, representations
        subset, version, representations = self.register(instance)

        # Integrate representations' files to shareable space
        self.log.info("Integrating representations to shareable space ...")
        self.integrate()

        existed = io.find_one({
            "parent": subset["_id"],
            "name": version["name"]
        })
        if existed is not None:
            self.log.info("Version existed, representation file has been "
                          "overwritten.")
            filter_ = {"_id": existed["_id"]}
            update = {"$set": {"data.time": instance.context.data["time"]}}
            io.update_many(filter_, update)
            return

        # Write version and representations to database
        version_id = self.write_database(instance, version, representations)
        instance.data["insertedVersionId"] = version_id

        # Update dependent
        self.update_dependent(instance, version_id)
コード例 #10
0
ファイル: integrate_new.py プロジェクト: Yowza-Animation/pype
    def register(self, instance):
        # Required environment variables
        anatomy_data = instance.data["anatomyData"]

        io.install()

        context = instance.context

        project_entity = instance.data["projectEntity"]

        context_asset_name = context.data["assetEntity"]["name"]

        asset_name = instance.data["asset"]
        asset_entity = instance.data.get("assetEntity")
        if not asset_entity or asset_entity["name"] != context_asset_name:
            asset_entity = io.find_one({
                "type": "asset",
                "name": asset_name,
                "parent": project_entity["_id"]
            })
            assert asset_entity, (
                "No asset found by the name \"{0}\" in project \"{1}\""
            ).format(asset_name, project_entity["name"])

            instance.data["assetEntity"] = asset_entity

            # update anatomy data with asset specific keys
            # - name should already been set
            hierarchy = ""
            parents = asset_entity["data"]["parents"]
            if parents:
                hierarchy = "/".join(parents)
            anatomy_data["hierarchy"] = hierarchy

        task_name = instance.data.get("task")
        if task_name:
            anatomy_data["task"] = task_name

        anatomy_data["family"] = instance.data.get("family")

        stagingdir = instance.data.get("stagingDir")
        if not stagingdir:
            self.log.info(
                ("{0} is missing reference to staging directory."
                 " Will try to get it from representation.").format(instance))

        else:
            self.log.debug(
                "Establishing staging directory @ {0}".format(stagingdir))

        # Ensure at least one file is set up for transfer in staging dir.
        repres = instance.data.get("representations")
        assert repres, "Instance has no files to transfer"
        assert isinstance(
            repres,
            (list,
             tuple)), ("Instance 'files' must be a list, got: {0} {1}".format(
                 str(type(repres)), str(repres)))

        subset = self.get_subset(asset_entity, instance)
        instance.data["subsetEntity"] = subset

        version_number = instance.data["version"]
        self.log.debug("Next version: v{}".format(version_number))

        version_data = self.create_version_data(context, instance)

        version_data_instance = instance.data.get('versionData')
        if version_data_instance:
            version_data.update(version_data_instance)

        # TODO rename method from `create_version` to
        # `prepare_version` or similar...
        version = self.create_version(subset=subset,
                                      version_number=version_number,
                                      data=version_data)

        self.log.debug("Creating version ...")

        new_repre_names_low = [_repre["name"].lower() for _repre in repres]

        existing_version = io.find_one({
            'type': 'version',
            'parent': subset["_id"],
            'name': version_number
        })

        if existing_version is None:
            version_id = io.insert_one(version).inserted_id
        else:
            # Check if instance have set `append` mode which cause that
            # only replicated representations are set to archive
            append_repres = instance.data.get("append", False)

            # Update version data
            # TODO query by _id and
            io.update_many(
                {
                    'type': 'version',
                    'parent': subset["_id"],
                    'name': version_number
                }, {'$set': version})
            version_id = existing_version['_id']

            # Find representations of existing version and archive them
            current_repres = list(
                io.find({
                    "type": "representation",
                    "parent": version_id
                }))
            bulk_writes = []
            for repre in current_repres:
                if append_repres:
                    # archive only duplicated representations
                    if repre["name"].lower() not in new_repre_names_low:
                        continue
                # Representation must change type,
                # `_id` must be stored to other key and replaced with new
                # - that is because new representations should have same ID
                repre_id = repre["_id"]
                bulk_writes.append(DeleteOne({"_id": repre_id}))

                repre["orig_id"] = repre_id
                repre["_id"] = io.ObjectId()
                repre["type"] = "archived_representation"
                bulk_writes.append(InsertOne(repre))

            # bulk updates
            if bulk_writes:
                io._database[io.Session["AVALON_PROJECT"]].bulk_write(
                    bulk_writes)

        version = io.find_one({"_id": version_id})
        instance.data["versionEntity"] = version

        existing_repres = list(
            io.find({
                "parent": version_id,
                "type": "archived_representation"
            }))

        instance.data['version'] = version['name']

        intent_value = instance.context.data.get("intent")
        if intent_value and isinstance(intent_value, dict):
            intent_value = intent_value.get("value")

        if intent_value:
            anatomy_data["intent"] = intent_value

        anatomy = instance.context.data['anatomy']

        # Find the representations to transfer amongst the files
        # Each should be a single representation (as such, a single extension)
        representations = []
        destination_list = []

        if 'transfers' not in instance.data:
            instance.data['transfers'] = []

        template_name = self.template_name_from_instance(instance)

        published_representations = {}
        for idx, repre in enumerate(instance.data["representations"]):
            published_files = []

            # create template data for Anatomy
            template_data = copy.deepcopy(anatomy_data)
            if intent_value is not None:
                template_data["intent"] = intent_value

            resolution_width = repre.get("resolutionWidth")
            resolution_height = repre.get("resolutionHeight")
            fps = instance.data.get("fps")

            if resolution_width:
                template_data["resolution_width"] = resolution_width
            if resolution_width:
                template_data["resolution_height"] = resolution_height
            if resolution_width:
                template_data["fps"] = fps

            files = repre['files']
            if repre.get('stagingDir'):
                stagingdir = repre['stagingDir']

            if repre.get("outputName"):
                template_data["output"] = repre['outputName']

            template = os.path.normpath(
                anatomy.templates[template_name]["path"])

            sequence_repre = isinstance(files, list)
            repre_context = None
            if sequence_repre:
                self.log.debug("files: {}".format(files))
                src_collections, remainder = clique.assemble(files)
                self.log.debug("src_tail_collections: {}".format(
                    str(src_collections)))
                src_collection = src_collections[0]

                # Assert that each member has identical suffix
                src_head = src_collection.format("{head}")
                src_tail = src_collection.format("{tail}")

                # fix dst_padding
                valid_files = [x for x in files if src_collection.match(x)]
                padd_len = len(valid_files[0].replace(src_head, "").replace(
                    src_tail, ""))
                src_padding_exp = "%0{}d".format(padd_len)

                test_dest_files = list()
                for i in [1, 2]:
                    template_data["representation"] = repre['ext']
                    template_data["frame"] = src_padding_exp % i
                    anatomy_filled = anatomy.format(template_data)
                    template_filled = anatomy_filled[template_name]["path"]
                    if repre_context is None:
                        repre_context = template_filled.used_values
                    test_dest_files.append(os.path.normpath(template_filled))
                template_data["frame"] = repre_context["frame"]

                self.log.debug("test_dest_files: {}".format(
                    str(test_dest_files)))

                dst_collections, remainder = clique.assemble(test_dest_files)
                dst_collection = dst_collections[0]
                dst_head = dst_collection.format("{head}")
                dst_tail = dst_collection.format("{tail}")

                index_frame_start = None

                if repre.get("frameStart"):
                    frame_start_padding = int(anatomy.templates["render"].get(
                        "frame_padding",
                        anatomy.templates["render"].get("padding")))

                    index_frame_start = int(repre.get("frameStart"))

                # exception for slate workflow
                if index_frame_start and "slate" in instance.data["families"]:
                    index_frame_start -= 1

                dst_padding_exp = src_padding_exp
                dst_start_frame = None
                for i in src_collection.indexes:
                    # TODO 1.) do not count padding in each index iteration
                    # 2.) do not count dst_padding from src_padding before
                    #   index_frame_start check
                    src_padding = src_padding_exp % i

                    src_file_name = "{0}{1}{2}".format(src_head, src_padding,
                                                       src_tail)

                    dst_padding = src_padding_exp % i

                    if index_frame_start:
                        dst_padding_exp = "%0{}d".format(frame_start_padding)
                        dst_padding = dst_padding_exp % index_frame_start
                        index_frame_start += 1

                    dst = "{0}{1}{2}".format(dst_head, dst_padding,
                                             dst_tail).replace("..", ".")

                    self.log.debug("destination: `{}`".format(dst))
                    src = os.path.join(stagingdir, src_file_name)

                    self.log.debug("source: {}".format(src))
                    instance.data["transfers"].append([src, dst])

                    published_files.append(dst)

                    # for adding first frame into db
                    if not dst_start_frame:
                        dst_start_frame = dst_padding

                # Store used frame value to template data
                template_data["frame"] = dst_start_frame
                dst = "{0}{1}{2}".format(dst_head, dst_start_frame,
                                         dst_tail).replace("..", ".")
                repre['published_path'] = dst

            else:
                # Single file
                #  _______
                # |      |\
                # |       |
                # |       |
                # |       |
                # |_______|
                #
                template_data.pop("frame", None)
                fname = files
                assert not os.path.isabs(fname), (
                    "Given file name is a full path")

                template_data["representation"] = repre['ext']

                src = os.path.join(stagingdir, fname)
                anatomy_filled = anatomy.format(template_data)
                template_filled = anatomy_filled[template_name]["path"]
                repre_context = template_filled.used_values
                dst = os.path.normpath(template_filled).replace("..", ".")

                instance.data["transfers"].append([src, dst])

                published_files.append(dst)
                repre['published_path'] = dst
                self.log.debug("__ dst: {}".format(dst))

            repre["publishedFiles"] = published_files

            for key in self.db_representation_context_keys:
                value = template_data.get(key)
                if not value:
                    continue
                repre_context[key] = template_data[key]

            # Use previous representation's id if there are any
            repre_id = None
            repre_name_low = repre["name"].lower()
            for _repre in existing_repres:
                # NOTE should we check lowered names?
                if repre_name_low == _repre["name"]:
                    repre_id = _repre["orig_id"]
                    break

            # Create new id if existing representations does not match
            if repre_id is None:
                repre_id = io.ObjectId()

            representation = {
                "_id": repre_id,
                "schema": "pype:representation-2.0",
                "type": "representation",
                "parent": version_id,
                "name": repre['name'],
                "data": {
                    'path': dst,
                    'template': template
                },
                "dependencies": instance.data.get("dependencies", "").split(),

                # Imprint shortcut to context
                # for performance reasons.
                "context": repre_context
            }

            if repre.get("outputName"):
                representation["context"]["output"] = repre['outputName']

            if sequence_repre and repre.get("frameStart"):
                representation['context']['frame'] = (
                    dst_padding_exp % int(repre.get("frameStart")))

            self.log.debug("__ representation: {}".format(representation))
            destination_list.append(dst)
            self.log.debug("__ destination_list: {}".format(destination_list))
            instance.data['destination_list'] = destination_list
            representations.append(representation)
            published_representations[repre_id] = {
                "representation": representation,
                "anatomy_data": template_data,
                "published_files": published_files
            }
            self.log.debug("__ representations: {}".format(representations))

        # Remove old representations if there are any (before insertion of new)
        if existing_repres:
            repre_ids_to_remove = []
            for repre in existing_repres:
                repre_ids_to_remove.append(repre["_id"])
            io.delete_many({"_id": {"$in": repre_ids_to_remove}})

        self.log.debug("__ representations: {}".format(representations))
        for rep in instance.data["representations"]:
            self.log.debug("__ represNAME: {}".format(rep['name']))
            self.log.debug("__ represPATH: {}".format(rep['published_path']))
        io.insert_many(representations)
        instance.data["published_representations"] = (
            published_representations)
        # self.log.debug("Representation: {}".format(representations))
        self.log.info("Registered {} items".format(len(representations)))
コード例 #11
0
    def process(self, instance):

        if not os.environ.get("AVALON_THUMBNAIL_ROOT"):
            self.log.warning("AVALON_THUMBNAIL_ROOT is not set."
                             " Skipping thumbnail integration.")
            return

        published_repres = instance.data.get("published_representations")
        if not published_repres:
            self.log.debug(
                "There are no published representations on the instance.")
            return

        project_name = api.Session["AVALON_PROJECT"]

        anatomy = instance.context.data["anatomy"]
        if "publish" not in anatomy.templates:
            self.log.warning("Anatomy is missing the \"publish\" key!")
            return

        if "thumbnail" not in anatomy.templates["publish"]:
            self.log.warning((
                "There is no \"thumbnail\" template set for the project \"{}\""
            ).format(project_name))
            return

        thumb_repre = None
        thumb_repre_anatomy_data = None
        for repre_info in published_repres.values():
            repre = repre_info["representation"]
            if repre["name"].lower() == "thumbnail":
                thumb_repre = repre
                thumb_repre_anatomy_data = repre_info["anatomy_data"]
                break

        if not thumb_repre:
            self.log.debug(
                "There is not representation with name \"thumbnail\"")
            return

        io.install()

        thumbnail_template = anatomy.templates["publish"]["thumbnail"]

        version = io.find_one({"_id": thumb_repre["parent"]})
        if not version:
            raise AssertionError(
                "There does not exist version with id {}".format(
                    str(thumb_repre["parent"])))

        # Get full path to thumbnail file from representation
        src_full_path = os.path.normpath(thumb_repre["data"]["path"])
        if not os.path.exists(src_full_path):
            self.log.warning(
                "Thumbnail file was not found. Path: {}".format(src_full_path))
            return

        filename, file_extension = os.path.splitext(src_full_path)
        # Create id for mongo entity now to fill anatomy template
        thumbnail_id = ObjectId()

        # Prepare anatomy template fill data
        template_data = copy.deepcopy(thumb_repre_anatomy_data)
        template_data.update({
            "_id":
            str(thumbnail_id),
            "thumbnail_root":
            os.environ.get("AVALON_THUMBNAIL_ROOT"),
            "ext":
            file_extension[1:],
            "thumbnail_type":
            "thumbnail"
        })

        anatomy_filled = anatomy.format(template_data)
        template_filled = anatomy_filled["publish"]["thumbnail"]

        dst_full_path = os.path.normpath(str(template_filled))
        self.log.debug("Copying file .. {} -> {}".format(
            src_full_path, dst_full_path))
        dirname = os.path.dirname(dst_full_path)
        try:
            os.makedirs(dirname)
        except OSError as e:
            if e.errno != errno.EEXIST:
                tp, value, tb = sys.exc_info()
                six.reraise(tp, value, tb)

        shutil.copy(src_full_path, dst_full_path)

        # Clean template data from keys that are dynamic
        template_data.pop("_id")
        template_data.pop("thumbnail_root")

        repre_context = template_filled.used_values
        for key in self.required_context_keys:
            value = template_data.get(key)
            if not value:
                continue
            repre_context[key] = template_data[key]

        thumbnail_entity = {
            "_id": thumbnail_id,
            "type": "thumbnail",
            "schema": "pype:thumbnail-1.0",
            "data": {
                "template": thumbnail_template,
                "template_data": repre_context
            }
        }
        # Create thumbnail entity
        io.insert_one(thumbnail_entity)
        self.log.debug("Creating entity in database {}".format(
            str(thumbnail_entity)))
        # Set thumbnail id for version
        io.update_many({"_id": version["_id"]},
                       {"$set": {
                           "data.thumbnail_id": thumbnail_id
                       }})
        self.log.debug("Setting thumbnail for version \"{}\" <{}>".format(
            version["name"], str(version["_id"])))

        asset_entity = instance.data["assetEntity"]
        io.update_many({"_id": asset_entity["_id"]},
                       {"$set": {
                           "data.thumbnail_id": thumbnail_id
                       }})
        self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format(
            asset_entity["name"], str(version["_id"])))
コード例 #12
0
    def import_to_avalon(self, input_data, parent=None):
        for name in input_data:
            self.log.info("input_data[name]: {}".format(input_data[name]))
            entity_data = input_data[name]
            entity_type = entity_data["entity_type"]

            data = {}
            data["entityType"] = entity_type

            # Custom attributes.
            for k, val in entity_data.get("custom_attributes", {}).items():
                data[k] = val

            if entity_type.lower() != "project":
                data["inputs"] = entity_data.get("inputs", [])

                # Tasks.
                tasks = entity_data.get("tasks", [])
                if tasks is not None or len(tasks) > 0:
                    data["tasks"] = tasks
                parents = []
                visualParent = None
                # do not store project"s id as visualParent (silo asset)
                if self.project is not None:
                    if self.project["_id"] != parent["_id"]:
                        visualParent = parent["_id"]
                        parents.extend(
                            parent.get("data", {}).get("parents", []))
                        parents.append(parent["name"])
                data["visualParent"] = visualParent
                data["parents"] = parents

            update_data = True
            # Process project
            if entity_type.lower() == "project":
                entity = io.find_one({"type": "project"})
                # TODO: should be in validator?
                assert (entity is not None), "Did not find project in DB"

                # get data from already existing project
                cur_entity_data = entity.get("data") or {}
                cur_entity_data.update(data)
                data = cur_entity_data

                self.project = entity
            # Raise error if project or parent are not set
            elif self.project is None or parent is None:
                raise AssertionError("Collected items are not in right order!")
            # Else process assset
            else:
                entity = io.find_one({"type": "asset", "name": name})
                if entity:
                    # Do not override data, only update
                    cur_entity_data = entity.get("data") or {}
                    cur_entity_data.update(data)
                    data = cur_entity_data
                else:
                    # Skip updating data
                    update_data = False

                    archived_entities = io.find({
                        "type": "archived_asset",
                        "name": name
                    })
                    unarchive_entity = None
                    for archived_entity in archived_entities:
                        archived_parents = (archived_entity.get(
                            "data", {}).get("parents"))
                        if data["parents"] == archived_parents:
                            unarchive_entity = archived_entity
                            break

                    if unarchive_entity is None:
                        # Create entity if doesn"t exist
                        entity = self.create_avalon_asset(name, data)
                    else:
                        # Unarchive if entity was archived
                        entity = self.unarchive_entity(unarchive_entity, data)

            if update_data:
                # Update entity data with input data
                io.update_many({"_id": entity["_id"]},
                               {"$set": {
                                   "data": data
                               }})

            if "childs" in entity_data:
                self.import_to_avalon(entity_data["childs"], entity)
コード例 #13
0
    def process(self, instance):
        import os
        import re
        import sys
        import json
        import subprocess

        from avalon import io

        asset_doc = instance.data["assetDoc"]
        asset_name = asset_doc["name"]

        # Check asset's rigging task option
        value_path = "taskOptions.rigging.autoModelUpdate.value"
        value = asset_doc["data"]
        for entry in value_path.split("."):
            value = value.get(entry, {})
        if not value:
            # Auto model update not enabled
            return

        # Get subset, version documents from instance which just been
        # integrated.
        model_subset, model_version, _ = instance.data["toDatabase"]

        if model_version["name"] == 1:
            # First version of model, must not have dependent rig.
            return

        # Find all previous versions of model, only document id is needed.
        previous = io.find({
            "type": "version",
            "parent": model_subset["_id"]
        },
                           sort=[("name", -1)],
                           projection={"_id": True},
                           skip=1)  # Skip the latest
        previous = set([str(p["_id"]) for p in previous])
        if not previous:
            self.log.warning("Model is now on version %d but has no previous, "
                             "skip updating rig." % model_version["name"])
            return

        # Any latest version of rig may not be using the latest model, so
        # we iterate through all rig subsets' latest version and compare
        # the dependency data with all previous model versions to find the
        # dependent.
        dependent_rigs = dict()

        for rig_subset in io.find(
            {
                "type": "subset",
                "parent": asset_doc["_id"],
                "name": re.compile("rig*")
            },
                projection={
                    "_id": True,
                    "name": True
                }):

            latest_rig = io.find_one(
                {
                    "type": "version",
                    "parent": rig_subset["_id"]
                },
                sort=[("name", -1)],
                projection={"data.dependencies": True})
            if latest_rig is None:
                # Not likely to happen, but just in case
                continue

            # Consider dependent if any dependency matched in model versions
            dependencies = set(latest_rig["data"]["dependencies"].keys())
            if dependencies.intersection(previous):
                dependent_rigs[str(latest_rig["_id"])] = rig_subset["name"]

        if not dependent_rigs:
            self.log.info("No rig to update, skip auto process.")
            return

        # Submit subprocess
        mayapy_exe = os.path.join(os.path.dirname(sys.executable),
                                  "mayapy.exe")
        cmd = [
            mayapy_exe,
            __file__,
            "asset_name={}".format(str(asset_name)),
            "model_subset={}".format(str(model_subset["name"])),
            "rig_versions={}".format(json.dumps(dependent_rigs)),
        ]

        print("auto rig cmd: {}".format(cmd))
        try:
            out_bytes = subprocess.check_output(cmd, shell=True)
        except subprocess.CalledProcessError:
            # Mark failed for future debug.
            io.update_many({"_id": model_version["_id"]},
                           {"$set": {
                               "data.rigAutoUpdateFailed": True
                           }})
            raise Exception("Model publish success but Rig auto update "
                            "failed. Please inform rigger or TD.")
        else:
            print(out_bytes)
コード例 #14
0
    def process(self, instance):

        context = instance.context

        if not all(result["success"] for result in context.data["results"]):
            self.log.warning("Atomicity not held, aborting.")
            return

        # Integrate representations' to database
        self.log.info("Integrating representations to database ...")

        asset = context.data["assetDoc"]
        subset, version, representations = instance.data["toDatabase"]

        # Write subset if not exists
        filter = {"parent": asset["_id"], "name": subset["name"]}
        if io.find_one(filter) is None:
            io.insert_one(subset)

        # Write version if not exists
        filter = {"parent": subset["_id"], "name": version["name"]}
        existed_version = io.find_one(filter)
        if existed_version is None:
            # Write version and representations to database
            version_id = self.write_database(instance, version,
                                             representations)
            instance.data["insertedVersionId"] = version_id

            # Update dependent
            self.update_dependent(instance, version_id)

        else:
            if context.data.get("_progressivePublishing"):
                if instance.data.get("_progressiveOutput") is None:
                    pass  # Not given any output, no progress change

                else:
                    self.log.info("Update version publish progress.")
                    # Update version document "data.time"
                    filter_ = {"_id": existed_version["_id"]}
                    update = {"$set": {"data.time": context.data["time"]}}
                    if "progress" in version["data"]:
                        # Update version document "progress.current"
                        progress = version["data"]["progress"]["current"]
                        update["$inc"] = {"data.progress.current": progress}
                    else:
                        pass  # progress == -1, no progress update needed.
                    io.update_many(filter_, update)

            else:
                self.log.info("Version existed, representation file has been "
                              "overwritten.")
                # Update version document "data.time"
                filter_ = {"_id": existed_version["_id"]}
                update = {"$set": {"data.time": context.data["time"]}}
                io.update_many(filter_, update)

                # Update representation documents "data"
                for representation in representations:
                    filter_ = {
                        "name": representation["name"],
                        "parent": existed_version["_id"],
                    }
                    update = {"$set": {"data": representation["data"]}}
                    io.update_many(filter_, update)
コード例 #15
0
    def register(self, instance):
        # Required environment variables
        PROJECT = api.Session["AVALON_PROJECT"]
        ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
        TASK = instance.data.get("task") or api.Session["AVALON_TASK"]
        LOCATION = api.Session["AVALON_LOCATION"]

        context = instance.context
        # Atomicity
        #
        # Guarantee atomic publishes - each asset contains
        # an identical set of members.
        #     __
        #    /     o
        #   /       \
        #  |    o    |
        #   \       /
        #    o   __/
        #
        # for result in context.data["results"]:
        #     if not result["success"]:
        #         self.log.debug(result)
        #         exc_type, exc_value, exc_traceback = result["error_info"]
        #         extracted_traceback = traceback.extract_tb(exc_traceback)[-1]
        #         self.log.debug(
        #             "Error at line {}: \"{}\"".format(
        #                 extracted_traceback[1], result["error"]
        #             )
        #         )
        # assert all(result["success"] for result in context.data["results"]),(
        #     "Atomicity not held, aborting.")

        # Assemble
        #
        #       |
        #       v
        #  --->   <----
        #       ^
        #       |
        #
        stagingdir = instance.data.get("stagingDir")
        if not stagingdir:
            self.log.info('''{} is missing reference to staging
                            directory Will try to get it from
                            representation'''.format(instance))

        # extra check if stagingDir actually exists and is available

        self.log.debug("Establishing staging directory @ %s" % stagingdir)

        # Ensure at least one file is set up for transfer in staging dir.
        repres = instance.data.get("representations", None)
        assert repres, "Instance has no files to transfer"
        assert isinstance(repres, (list, tuple)), (
            "Instance 'files' must be a list, got: {0}".format(repres))

        # FIXME: io is not initialized at this point for shell host
        io.install()
        project = io.find_one({"type": "project"})

        asset = io.find_one({
            "type": "asset",
            "name": ASSET,
            "parent": project["_id"]
        })

        assert all([project, asset]), ("Could not find current project or "
                                       "asset '%s'" % ASSET)

        subset = self.get_subset(asset, instance)

        # get next version
        latest_version = io.find_one(
            {
                "type": "version",
                "parent": subset["_id"]
            }, {"name": True},
            sort=[("name", -1)])

        next_version = 1
        if latest_version is not None:
            next_version += latest_version["name"]

        if instance.data.get('version'):
            next_version = int(instance.data.get('version'))

        # self.log.info("Verifying version from assumed destination")

        # assumed_data = instance.data["assumedTemplateData"]
        # assumed_version = assumed_data["version"]
        # if assumed_version != next_version:
        #     raise AttributeError("Assumed version 'v{0:03d}' does not match"
        #                          "next version in database "
        #                          "('v{1:03d}')".format(assumed_version,
        #                                                next_version))

        self.log.debug("Next version: v{0:03d}".format(next_version))

        version_data = self.create_version_data(context, instance)

        version_data_instance = instance.data.get('versionData')

        if version_data_instance:
            version_data.update(version_data_instance)

        version = self.create_version(subset=subset,
                                      version_number=next_version,
                                      locations=[LOCATION],
                                      data=version_data)

        self.log.debug("Creating version ...")
        existing_version = io.find_one({
            'type': 'version',
            'parent': subset["_id"],
            'name': next_version
        })
        if existing_version is None:
            version_id = io.insert_one(version).inserted_id
        else:
            io.update_many(
                {
                    'type': 'version',
                    'parent': subset["_id"],
                    'name': next_version
                }, {'$set': version})
            version_id = existing_version['_id']
        instance.data['version'] = version['name']

        # Write to disk
        #          _
        #         | |
        #        _| |_
        #    ____\   /
        #   |\    \ / \
        #   \ \    v   \
        #    \ \________.
        #     \|________|
        #
        root = api.registered_root()
        hierarchy = ""
        parents = io.find_one({
            "type": 'asset',
            "name": ASSET
        })['data']['parents']
        if parents and len(parents) > 0:
            # hierarchy = os.path.sep.join(hierarchy)
            hierarchy = os.path.join(*parents)

        anatomy = instance.context.data['anatomy']

        # Find the representations to transfer amongst the files
        # Each should be a single representation (as such, a single extension)
        representations = []
        destination_list = []
        template_name = 'publish'
        if 'transfers' not in instance.data:
            instance.data['transfers'] = []

        for idx, repre in enumerate(instance.data["representations"]):

            # Collection
            #   _______
            #  |______|\
            # |      |\|
            # |       ||
            # |       ||
            # |       ||
            # |_______|
            #
            # create template data for Anatomy
            template_data = {
                "root": root,
                "project": {
                    "name": PROJECT,
                    "code": project['data']['code']
                },
                "silo": asset.get('silo'),
                "task": TASK,
                "asset": ASSET,
                "family": instance.data['family'],
                "subset": subset["name"],
                "version": int(version["name"]),
                "hierarchy": hierarchy
            }

            files = repre['files']
            if repre.get('stagingDir'):
                stagingdir = repre['stagingDir']
            if repre.get('anatomy_template'):
                template_name = repre['anatomy_template']
            template = os.path.normpath(
                anatomy.templates[template_name]["path"])

            sequence_repre = isinstance(files, list)

            if sequence_repre:
                src_collections, remainder = clique.assemble(files)
                self.log.debug("src_tail_collections: {}".format(
                    str(src_collections)))
                src_collection = src_collections[0]

                # Assert that each member has identical suffix
                src_head = src_collection.format("{head}")
                src_tail = src_collection.format("{tail}")

                # fix dst_padding
                valid_files = [x for x in files if src_collection.match(x)]
                padd_len = len(valid_files[0].replace(src_head, "").replace(
                    src_tail, ""))
                src_padding_exp = "%0{}d".format(padd_len)

                test_dest_files = list()
                for i in [1, 2]:
                    template_data["representation"] = repre['ext']
                    template_data["frame"] = src_padding_exp % i
                    anatomy_filled = anatomy.format(template_data)

                    test_dest_files.append(
                        os.path.normpath(
                            anatomy_filled[template_name]["path"]))

                self.log.debug("test_dest_files: {}".format(
                    str(test_dest_files)))

                dst_collections, remainder = clique.assemble(test_dest_files)
                dst_collection = dst_collections[0]
                dst_head = dst_collection.format("{head}")
                dst_tail = dst_collection.format("{tail}")

                index_frame_start = None

                if repre.get("frameStart"):
                    frame_start_padding = len(str(repre.get("frameEnd")))
                    index_frame_start = int(repre.get("frameStart"))

                dst_padding_exp = src_padding_exp
                dst_start_frame = None
                for i in src_collection.indexes:
                    src_padding = src_padding_exp % i

                    # for adding first frame into db
                    if not dst_start_frame:
                        dst_start_frame = src_padding

                    src_file_name = "{0}{1}{2}".format(src_head, src_padding,
                                                       src_tail)

                    dst_padding = src_padding_exp % i

                    if index_frame_start:
                        dst_padding_exp = "%0{}d".format(frame_start_padding)
                        dst_padding = dst_padding_exp % index_frame_start
                        index_frame_start += 1

                    dst = "{0}{1}{2}".format(dst_head, dst_padding,
                                             dst_tail).replace("..", ".")

                    self.log.debug("destination: `{}`".format(dst))
                    src = os.path.join(stagingdir, src_file_name)

                    self.log.debug("source: {}".format(src))
                    instance.data["transfers"].append([src, dst])

                dst = "{0}{1}{2}".format(dst_head, dst_start_frame,
                                         dst_tail).replace("..", ".")
                repre['published_path'] = dst

            else:
                # Single file
                #  _______
                # |      |\
                # |       |
                # |       |
                # |       |
                # |_______|
                #
                template_data.pop("frame", None)
                fname = files
                assert not os.path.isabs(fname), (
                    "Given file name is a full path")

                template_data["representation"] = repre['ext']

                if repre.get("outputName"):
                    template_data["output"] = repre['outputName']

                src = os.path.join(stagingdir, fname)
                anatomy_filled = anatomy.format(template_data)
                dst = os.path.normpath(
                    anatomy_filled[template_name]["path"]).replace("..", ".")

                instance.data["transfers"].append([src, dst])

                repre['published_path'] = dst
                self.log.debug("__ dst: {}".format(dst))

            representation = {
                "schema": "pype:representation-2.0",
                "type": "representation",
                "parent": version_id,
                "name": repre['name'],
                "data": {
                    'path': dst,
                    'template': template
                },
                "dependencies": instance.data.get("dependencies", "").split(),

                # Imprint shortcut to context
                # for performance reasons.
                "context": {
                    "root": root,
                    "project": {
                        "name": PROJECT,
                        "code": project['data']['code']
                    },
                    'task': TASK,
                    "silo": asset.get('silo'),
                    "asset": ASSET,
                    "family": instance.data['family'],
                    "subset": subset["name"],
                    "version": version["name"],
                    "hierarchy": hierarchy,
                    "representation": repre['ext']
                }
            }

            if sequence_repre and repre.get("frameStart"):
                representation['context']['frame'] = repre.get("frameStart")

            self.log.debug("__ representation: {}".format(representation))
            destination_list.append(dst)
            self.log.debug("__ destination_list: {}".format(destination_list))
            instance.data['destination_list'] = destination_list
            representations.append(representation)
            self.log.debug("__ representations: {}".format(representations))

        self.log.debug("__ representations: {}".format(representations))
        for rep in instance.data["representations"]:
            self.log.debug("__ represNAME: {}".format(rep['name']))
            self.log.debug("__ represPATH: {}".format(rep['published_path']))
        io.insert_many(representations)
        # self.log.debug("Representation: {}".format(representations))
        self.log.info("Registered {} items".format(len(representations)))