Пример #1
0
    def __init__(self, avalon_module, server_manager):
        self.module = avalon_module
        self.server_manager = server_manager

        self.dbcon = AvalonMongoDB()
        self.dbcon.install()

        self.prefix = "/avalon"

        self.endpoint_defs = (
            (
                "GET",
                "/projects",
                AvalonProjectsEndpoint(self)
            ),
            (
                "GET",
                "/projects/{project_name}",
                AvalonProjectEndpoint(self)
            ),
            (
                "GET",
                "/projects/{project_name}/assets",
                AvalonAssetsEndpoint(self)
            ),
            (
                "GET",
                "/projects/{project_name}/assets/{asset_name}",
                AvalonAssetEndpoint(self)
            )
        )

        self.register()
Пример #2
0
class AvalonRestApiResource:
    def __init__(self, avalon_module, server_manager):
        self.module = avalon_module
        self.server_manager = server_manager

        self.dbcon = AvalonMongoDB()
        self.dbcon.install()

        self.prefix = "/avalon"

        self.endpoint_defs = (
            (
                "GET",
                "/projects",
                AvalonProjectsEndpoint(self)
            ),
            (
                "GET",
                "/projects/{project_name}",
                AvalonProjectEndpoint(self)
            ),
            (
                "GET",
                "/projects/{project_name}/assets",
                AvalonAssetsEndpoint(self)
            ),
            (
                "GET",
                "/projects/{project_name}/assets/{asset_name}",
                AvalonAssetEndpoint(self)
            )
        )

        self.register()

    def register(self):
        for methods, url, endpoint in self.endpoint_defs:
            final_url = self.prefix + url
            self.server_manager.add_route(
                methods, final_url, endpoint.dispatch
            )

    @staticmethod
    def json_dump_handler(value):
        if isinstance(value, datetime.datetime):
            return value.isoformat()
        if isinstance(value, ObjectId):
            return str(value)
        raise TypeError(value)

    @classmethod
    def encode(cls, data):
        return json.dumps(
            data,
            indent=4,
            default=cls.json_dump_handler
        ).encode("utf-8")
Пример #3
0
def get_app_environments_for_context(project_name,
                                     asset_name,
                                     task_name,
                                     app_name,
                                     env=None):
    """Prepare environment variables by context.
    Args:
        project_name (str): Name of project.
        asset_name (str): Name of asset.
        task_name (str): Name of task.
        app_name (str): Name of application that is launched and can be found
            by ApplicationManager.
        env (dict): Initial environment variables. `os.environ` is used when
            not passed.

    Returns:
        dict: Environments for passed context and application.
    """
    from avalon.api import AvalonMongoDB

    # Avalon database connection
    dbcon = AvalonMongoDB()
    dbcon.Session["AVALON_PROJECT"] = project_name
    dbcon.install()

    # Project document
    project_doc = dbcon.find_one({"type": "project"})
    asset_doc = dbcon.find_one({"type": "asset", "name": asset_name})

    # Prepare app object which can be obtained only from ApplciationManager
    app_manager = ApplicationManager()
    app = app_manager.applications[app_name]

    # Project's anatomy
    anatomy = Anatomy(project_name)

    data = EnvironmentPrepData({
        "project_name": project_name,
        "asset_name": asset_name,
        "task_name": task_name,
        "app_name": app_name,
        "app": app,
        "dbcon": dbcon,
        "project_doc": project_doc,
        "asset_doc": asset_doc,
        "anatomy": anatomy,
        "env": env
    })

    prepare_host_environments(data)
    prepare_context_environments(data)

    # Discard avalon connection
    dbcon.uninstall()

    return data["env"]
Пример #4
0
    def __init__(self):
        # Get mongo connection
        from openpype.lib import OpenPypeMongoConnection
        from avalon.api import AvalonMongoDB

        settings_collection = OpenPypeMongoConnection.get_mongo_client()

        self._anatomy_keys = None
        self._attribute_keys = None
        # TODO prepare version of pype
        # - pype version should define how are settings saved and loaded

        database_name = os.environ["OPENPYPE_DATABASE_NAME"]
        # TODO modify to not use hardcoded keys
        collection_name = "settings"

        self.settings_collection = settings_collection

        self.database_name = database_name
        self.collection_name = collection_name

        self.collection = settings_collection[database_name][collection_name]
        self.avalon_db = AvalonMongoDB()

        self.system_settings_cache = CacheValues()
        self.project_settings_cache = collections.defaultdict(CacheValues)
        self.project_anatomy_cache = collections.defaultdict(CacheValues)
Пример #5
0
    def start_timer(self, project_name, asset_name, task_name, hierarchy):
        """
            Start timer for 'project_name', 'asset_name' and 'task_name'

            Called from REST api by hosts.

            Args:
                project_name (string)
                asset_name (string)
                task_name (string)
                hierarchy (string)
        """
        dbconn = AvalonMongoDB()
        dbconn.install()
        dbconn.Session["AVALON_PROJECT"] = project_name

        asset_doc = dbconn.find_one({"type": "asset", "name": asset_name})
        if not asset_doc:
            raise ValueError("Uknown asset {}".format(asset_name))

        task_type = ''
        try:
            task_type = asset_doc["data"]["tasks"][task_name]["type"]
        except KeyError:
            self.log.warning(
                "Couldn't find task_type for {}".format(task_name))

        hierarchy = hierarchy.split("\\")
        hierarchy.append(asset_name)

        data = {
            "project_name": project_name,
            "task_name": task_name,
            "task_type": task_type,
            "hierarchy": hierarchy
        }
        self.timer_started(None, data)
Пример #6
0
class DeleteOldVersions(api.Loader):

    representations = ["*"]
    families = ["*"]

    label = "Delete Old Versions"
    icon = "trash"
    color = "#d8d8d8"

    options = [
        qargparse.Integer("versions_to_keep",
                          default=2,
                          min=0,
                          help="Versions to keep:"),
        qargparse.Boolean("remove_publish_folder",
                          help="Remove publish folder:")
    ]

    def sizeof_fmt(self, num, suffix='B'):
        for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
            if abs(num) < 1024.0:
                return "%3.1f%s%s" % (num, unit, suffix)
            num /= 1024.0
        return "%.1f%s%s" % (num, 'Yi', suffix)

    def delete_whole_dir_paths(self, dir_paths, delete=True):
        size = 0

        for dir_path in dir_paths:
            # Delete all files and fodlers in dir path
            for root, dirs, files in os.walk(dir_path, topdown=False):
                for name in files:
                    file_path = os.path.join(root, name)
                    size += os.path.getsize(file_path)
                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))

                for name in dirs:
                    if delete:
                        os.rmdir(os.path.join(root, name))

            if not delete:
                continue

            # Delete even the folder and it's parents folders if they are empty
            while True:
                if not os.path.exists(dir_path):
                    dir_path = os.path.dirname(dir_path)
                    continue

                if len(os.listdir(dir_path)) != 0:
                    break

                os.rmdir(os.path.join(dir_path))

        return size

    def path_from_representation(self, representation, anatomy):
        try:
            template = representation["data"]["template"]

        except KeyError:
            return (None, None)

        sequence_path = None
        try:
            context = representation["context"]
            context["root"] = anatomy.roots
            path = avalon.pipeline.format_template_with_optional_keys(
                context, template)
            if "frame" in context:
                context["frame"] = self.sequence_splitter
                sequence_path = os.path.normpath(
                    avalon.pipeline.format_template_with_optional_keys(
                        context, template))

        except KeyError:
            # Template references unavailable data
            return (None, None)

        return (os.path.normpath(path), sequence_path)

    def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
        size = 0

        for dir_id, dir_path in dir_paths.items():
            dir_files = os.listdir(dir_path)
            collections, remainders = clique.assemble(dir_files)
            for file_path, seq_path in file_paths[dir_id]:
                file_path_base = os.path.split(file_path)[1]
                # Just remove file if `frame` key was not in context or
                # filled path is in remainders (single file sequence)
                if not seq_path or file_path_base in remainders:
                    if not os.path.exists(file_path):
                        self.log.debug(
                            "File was not found: {}".format(file_path))
                        continue

                    size += os.path.getsize(file_path)

                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))

                    remainders.remove(file_path_base)
                    continue

                seq_path_base = os.path.split(seq_path)[1]
                head, tail = seq_path_base.split(self.sequence_splitter)

                final_col = None
                for collection in collections:
                    if head != collection.head or tail != collection.tail:
                        continue
                    final_col = collection
                    break

                if final_col is not None:
                    # Fill full path to head
                    final_col.head = os.path.join(dir_path, final_col.head)
                    for _file_path in final_col:
                        if os.path.exists(_file_path):

                            size += os.path.getsize(_file_path)

                            if delete:
                                os.remove(_file_path)
                                self.log.debug(
                                    "Removed file: {}".format(_file_path))

                    _seq_path = final_col.format("{head}{padding}{tail}")
                    self.log.debug("Removed files: {}".format(_seq_path))
                    collections.remove(final_col)

                elif os.path.exists(file_path):
                    size += os.path.getsize(file_path)

                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))
                else:
                    self.log.debug("File was not found: {}".format(file_path))

        # Delete as much as possible parent folders
        if not delete:
            return size

        for dir_path in dir_paths.values():
            while True:
                if not os.path.exists(dir_path):
                    dir_path = os.path.dirname(dir_path)
                    continue

                if len(os.listdir(dir_path)) != 0:
                    break

                self.log.debug("Removed folder: {}".format(dir_path))
                os.rmdir(dir_path)

        return size

    def message(self, text):
        msgBox = QtWidgets.QMessageBox()
        msgBox.setText(text)
        msgBox.setStyleSheet(style.load_stylesheet())
        msgBox.setWindowFlags(msgBox.windowFlags()
                              | QtCore.Qt.FramelessWindowHint)
        msgBox.exec_()

    def get_data(self, context, versions_count):
        subset = context["subset"]
        asset = context["asset"]
        anatomy = Anatomy(context["project"]["name"])

        self.dbcon = AvalonMongoDB()
        self.dbcon.Session["AVALON_PROJECT"] = context["project"]["name"]
        self.dbcon.install()

        versions = list(
            self.dbcon.find({
                "type": "version",
                "parent": {
                    "$in": [subset["_id"]]
                }
            }))

        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        def sort_func(ent):
            return int(ent["name"])

        all_last_versions = []
        for _parent_id, _versions in versions_by_parent.items():
            for idx, version in enumerate(
                    sorted(_versions, key=sort_func, reverse=True)):
                if idx >= versions_count:
                    break
                all_last_versions.append(version)

        self.log.debug("Collected versions ({})".format(len(versions)))

        # Filter latest versions
        for version in all_last_versions:
            versions.remove(version)

        # Update versions_by_parent without filtered versions
        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        # Filter already deleted versions
        versions_to_pop = []
        for version in versions:
            version_tags = version["data"].get("tags")
            if version_tags and "deleted" in version_tags:
                versions_to_pop.append(version)

        for version in versions_to_pop:
            msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format(
                asset["name"], subset["name"], version["name"])
            self.log.debug(
                ("Skipping version. Already tagged as `deleted`. < {} >"
                 ).format(msg))
            versions.remove(version)

        version_ids = [ent["_id"] for ent in versions]

        self.log.debug("Filtered versions to delete ({})".format(
            len(version_ids)))

        if not version_ids:
            msg = "Skipping processing. Nothing to delete."
            self.log.info(msg)
            self.message(msg)
            return

        repres = list(
            self.dbcon.find({
                "type": "representation",
                "parent": {
                    "$in": version_ids
                }
            }))

        self.log.debug("Collected representations to remove ({})".format(
            len(repres)))

        dir_paths = {}
        file_paths_by_dir = collections.defaultdict(list)
        for repre in repres:
            file_path, seq_path = self.path_from_representation(repre, anatomy)
            if file_path is None:
                self.log.debug(
                    ("Could not format path for represenation \"{}\"").format(
                        str(repre)))
                continue

            dir_path = os.path.dirname(file_path)
            dir_id = None
            for _dir_id, _dir_path in dir_paths.items():
                if _dir_path == dir_path:
                    dir_id = _dir_id
                    break

            if dir_id is None:
                dir_id = uuid.uuid4()
                dir_paths[dir_id] = dir_path

            file_paths_by_dir[dir_id].append([file_path, seq_path])

        dir_ids_to_pop = []
        for dir_id, dir_path in dir_paths.items():
            if os.path.exists(dir_path):
                continue

            dir_ids_to_pop.append(dir_id)

        # Pop dirs from both dictionaries
        for dir_id in dir_ids_to_pop:
            dir_paths.pop(dir_id)
            paths = file_paths_by_dir.pop(dir_id)
            # TODO report of missing directories?
            paths_msg = ", ".join(
                ["'{}'".format(path[0].replace("\\", "/")) for path in paths])
            self.log.debug(
                ("Folder does not exist. Deleting it's files skipped: {}"
                 ).format(paths_msg))

        data = {
            "dir_paths": dir_paths,
            "file_paths_by_dir": file_paths_by_dir,
            "versions": versions,
            "asset": asset,
            "subset": subset,
            "archive_subset": versions_count == 0
        }

        return data

    def main(self, data, remove_publish_folder):
        # Size of files.
        size = 0

        if remove_publish_folder:
            size = self.delete_whole_dir_paths(data["dir_paths"].values())
        else:
            size = self.delete_only_repre_files(data["dir_paths"],
                                                data["file_paths_by_dir"])

        mongo_changes_bulk = []
        for version in data["versions"]:
            orig_version_tags = version["data"].get("tags") or []
            version_tags = [tag for tag in orig_version_tags]
            if "deleted" not in version_tags:
                version_tags.append("deleted")

            if version_tags == orig_version_tags:
                continue

            update_query = {"_id": version["_id"]}
            update_data = {"$set": {"data.tags": version_tags}}
            mongo_changes_bulk.append(UpdateOne(update_query, update_data))

        if data["archive_subset"]:
            mongo_changes_bulk.append(
                UpdateOne({
                    "_id": data["subset"]["_id"],
                    "type": "subset"
                }, {"$set": {
                    "type": "archived_subset"
                }}))

        if mongo_changes_bulk:
            self.dbcon.bulk_write(mongo_changes_bulk)

        self.dbcon.uninstall()

        # Set attribute `is_published` to `False` on ftrack AssetVersions
        session = ftrack_api.Session()
        query = ("AssetVersion where asset.parent.id is \"{}\""
                 " and asset.name is \"{}\""
                 " and version is \"{}\"")
        for v in data["versions"]:
            try:
                ftrack_version = session.query(
                    query.format(data["asset"]["data"]["ftrackId"],
                                 data["subset"]["name"], v["name"])).one()
            except ftrack_api.exception.NoResultFoundError:
                continue

            ftrack_version["is_published"] = False

        try:
            session.commit()

        except Exception:
            msg = ("Could not set `is_published` attribute to `False`"
                   " for selected AssetVersions.")
            self.log.error(msg)
            self.message(msg)

        msg = "Total size of files: " + self.sizeof_fmt(size)
        self.log.info(msg)
        self.message(msg)

    def load(self, context, name=None, namespace=None, options=None):
        try:
            versions_to_keep = 2
            remove_publish_folder = False
            if options:
                versions_to_keep = options.get("versions_to_keep",
                                               versions_to_keep)
                remove_publish_folder = options.get("remove_publish_folder",
                                                    remove_publish_folder)

            data = self.get_data(context, versions_to_keep)

            self.main(data, remove_publish_folder)

        except Exception:
            self.log.error("Failed to delete versions.", exc_info=True)
Пример #7
0
class DeleteOldVersions(BaseAction):

    identifier = "delete.old.versions"
    label = "OpenPype Admin"
    variant = "- Delete old versions"
    description = ("Delete files from older publishes so project can be"
                   " archived with only lates versions.")
    icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")

    dbcon = AvalonMongoDB()

    inteface_title = "Choose your preferences"
    splitter_item = {"type": "label", "value": "---"}
    sequence_splitter = "__sequence_splitter__"

    def discover(self, session, entities, event):
        """ Validation. """
        is_valid = False
        for entity in entities:
            if entity.entity_type.lower() == "assetversion":
                is_valid = True
                break

        if is_valid:
            is_valid = self.valid_roles(session, entities, event)
        return is_valid

    def interface(self, session, entities, event):
        # TODO Add roots existence validation
        items = []
        values = event["data"].get("values")
        if values:
            versions_count = int(values["last_versions_count"])
            if versions_count >= 1:
                return
            items.append({
                "type": "label",
                "value": ("# You have to keep at least 1 version!")
            })

        items.append({
            "type":
            "label",
            "value":
            ("<i><b>WARNING:</b> This will remove published files of older"
             " versions from disk so we don't recommend use"
             " this action on \"live\" project.</i>")
        })

        items.append(self.splitter_item)

        # How many versions to keep
        items.append({
            "type": "label",
            "value": "## Choose how many versions you want to keep:"
        })
        items.append({
            "type":
            "label",
            "value":
            ("<i><b>NOTE:</b> We do recommend to keep 2 versions.</i>")
        })
        items.append({
            "type": "number",
            "name": "last_versions_count",
            "label": "Versions",
            "value": 2
        })

        items.append(self.splitter_item)

        items.append({
            "type":
            "label",
            "value": ("## Remove publish folder even if there"
                      " are other than published files:")
        })
        items.append({
            "type":
            "label",
            "value":
            ("<i><b>WARNING:</b> This may remove more than you want.</i>")
        })
        items.append({
            "type": "boolean",
            "name": "force_delete_publish_folder",
            "label": "Are You sure?",
            "value": False
        })

        items.append(self.splitter_item)

        items.append({
            "type":
            "label",
            "value":
            ("<i>This will <b>NOT</b> delete any files and only return the "
             "total size of the files.</i>")
        })
        items.append({
            "type": "boolean",
            "name": "only_calculate",
            "label": "Only calculate size of files.",
            "value": False
        })

        return {"items": items, "title": self.inteface_title}

    def sizeof_fmt(self, num, suffix='B'):
        for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
            if abs(num) < 1024.0:
                return "%3.1f%s%s" % (num, unit, suffix)
            num /= 1024.0
        return "%.1f%s%s" % (num, 'Yi', suffix)

    def launch(self, session, entities, event):
        values = event["data"].get("values")
        if not values:
            return

        versions_count = int(values["last_versions_count"])
        force_to_remove = values["force_delete_publish_folder"]
        only_calculate = values["only_calculate"]

        _val1 = "OFF"
        if force_to_remove:
            _val1 = "ON"

        _val3 = "s"
        if versions_count == 1:
            _val3 = ""

        self.log.debug(
            ("Process started. Force to delete publish folder is set to [{0}]"
             " and will keep {1} latest version{2}.").format(
                 _val1, versions_count, _val3))

        self.dbcon.install()

        project = None
        avalon_asset_names = []
        asset_versions_by_parent_id = collections.defaultdict(list)
        subset_names_by_asset_name = collections.defaultdict(list)

        ftrack_assets_by_name = {}
        for entity in entities:
            ftrack_asset = entity["asset"]

            parent_ent = ftrack_asset["parent"]
            parent_ftrack_id = parent_ent["id"]
            parent_name = parent_ent["name"]

            if parent_name not in avalon_asset_names:
                avalon_asset_names.append(parent_name)

            # Group asset versions by parent entity
            asset_versions_by_parent_id[parent_ftrack_id].append(entity)

            # Get project
            if project is None:
                project = parent_ent["project"]

            # Collect subset names per asset
            subset_name = ftrack_asset["name"]
            subset_names_by_asset_name[parent_name].append(subset_name)

            if subset_name not in ftrack_assets_by_name:
                ftrack_assets_by_name[subset_name] = ftrack_asset

        # Set Mongo collection
        project_name = project["full_name"]
        anatomy = Anatomy(project_name)
        self.dbcon.Session["AVALON_PROJECT"] = project_name
        self.log.debug("Project is set to {}".format(project_name))

        # Get Assets from avalon database
        assets = list(
            self.dbcon.find({
                "type": "asset",
                "name": {
                    "$in": avalon_asset_names
                }
            }))
        asset_id_to_name_map = {
            asset["_id"]: asset["name"]
            for asset in assets
        }
        asset_ids = list(asset_id_to_name_map.keys())

        self.log.debug("Collected assets ({})".format(len(asset_ids)))

        # Get Subsets
        subsets = list(
            self.dbcon.find({
                "type": "subset",
                "parent": {
                    "$in": asset_ids
                }
            }))
        subsets_by_id = {}
        subset_ids = []
        for subset in subsets:
            asset_id = subset["parent"]
            asset_name = asset_id_to_name_map[asset_id]
            available_subsets = subset_names_by_asset_name[asset_name]

            if subset["name"] not in available_subsets:
                continue

            subset_ids.append(subset["_id"])
            subsets_by_id[subset["_id"]] = subset

        self.log.debug("Collected subsets ({})".format(len(subset_ids)))

        # Get Versions
        versions = list(
            self.dbcon.find({
                "type": "version",
                "parent": {
                    "$in": subset_ids
                }
            }))

        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        def sort_func(ent):
            return int(ent["name"])

        all_last_versions = []
        for parent_id, _versions in versions_by_parent.items():
            for idx, version in enumerate(
                    sorted(_versions, key=sort_func, reverse=True)):
                if idx >= versions_count:
                    break
                all_last_versions.append(version)

        self.log.debug("Collected versions ({})".format(len(versions)))

        # Filter latest versions
        for version in all_last_versions:
            versions.remove(version)

        # Update versions_by_parent without filtered versions
        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        # Filter already deleted versions
        versions_to_pop = []
        for version in versions:
            version_tags = version["data"].get("tags")
            if version_tags and "deleted" in version_tags:
                versions_to_pop.append(version)

        for version in versions_to_pop:
            subset = subsets_by_id[version["parent"]]
            asset_id = subset["parent"]
            asset_name = asset_id_to_name_map[asset_id]
            msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format(
                asset_name, subset["name"], version["name"])
            self.log.warning(
                ("Skipping version. Already tagged as `deleted`. < {} >"
                 ).format(msg))
            versions.remove(version)

        version_ids = [ent["_id"] for ent in versions]

        self.log.debug("Filtered versions to delete ({})".format(
            len(version_ids)))

        if not version_ids:
            msg = "Skipping processing. Nothing to delete."
            self.log.debug(msg)
            return {"success": True, "message": msg}

        repres = list(
            self.dbcon.find({
                "type": "representation",
                "parent": {
                    "$in": version_ids
                }
            }))

        self.log.debug("Collected representations to remove ({})".format(
            len(repres)))

        dir_paths = {}
        file_paths_by_dir = collections.defaultdict(list)
        for repre in repres:
            file_path, seq_path = self.path_from_represenation(repre, anatomy)
            if file_path is None:
                self.log.warning(
                    ("Could not format path for represenation \"{}\"").format(
                        str(repre)))
                continue

            dir_path = os.path.dirname(file_path)
            dir_id = None
            for _dir_id, _dir_path in dir_paths.items():
                if _dir_path == dir_path:
                    dir_id = _dir_id
                    break

            if dir_id is None:
                dir_id = uuid.uuid4()
                dir_paths[dir_id] = dir_path

            file_paths_by_dir[dir_id].append([file_path, seq_path])

        dir_ids_to_pop = []
        for dir_id, dir_path in dir_paths.items():
            if os.path.exists(dir_path):
                continue

            dir_ids_to_pop.append(dir_id)

        # Pop dirs from both dictionaries
        for dir_id in dir_ids_to_pop:
            dir_paths.pop(dir_id)
            paths = file_paths_by_dir.pop(dir_id)
            # TODO report of missing directories?
            paths_msg = ", ".join(
                ["'{}'".format(path[0].replace("\\", "/")) for path in paths])
            self.log.warning(
                ("Folder does not exist. Deleting it's files skipped: {}"
                 ).format(paths_msg))

        # Size of files.
        size = 0

        if only_calculate:
            if force_to_remove:
                size = self.delete_whole_dir_paths(dir_paths.values(),
                                                   delete=False)
            else:
                size = self.delete_only_repre_files(dir_paths,
                                                    file_paths_by_dir,
                                                    delete=False)

            msg = "Total size of files: " + self.sizeof_fmt(size)

            self.log.warning(msg)

            return {"success": True, "message": msg}

        if force_to_remove:
            size = self.delete_whole_dir_paths(dir_paths.values())
        else:
            size = self.delete_only_repre_files(dir_paths, file_paths_by_dir)

        mongo_changes_bulk = []
        for version in versions:
            orig_version_tags = version["data"].get("tags") or []
            version_tags = [tag for tag in orig_version_tags]
            if "deleted" not in version_tags:
                version_tags.append("deleted")

            if version_tags == orig_version_tags:
                continue

            update_query = {"_id": version["_id"]}
            update_data = {"$set": {"data.tags": version_tags}}
            mongo_changes_bulk.append(UpdateOne(update_query, update_data))

        if mongo_changes_bulk:
            self.dbcon.bulk_write(mongo_changes_bulk)

        self.dbcon.uninstall()

        # Set attribute `is_published` to `False` on ftrack AssetVersions
        for subset_id, _versions in versions_by_parent.items():
            subset_name = None
            for subset in subsets:
                if subset["_id"] == subset_id:
                    subset_name = subset["name"]
                    break

            if subset_name is None:
                self.log.warning("Subset with ID `{}` was not found.".format(
                    str(subset_id)))
                continue

            ftrack_asset = ftrack_assets_by_name.get(subset_name)
            if not ftrack_asset:
                self.log.warning(("Could not find Ftrack asset with name `{}`"
                                  ).format(subset_name))
                continue

            version_numbers = [int(ver["name"]) for ver in _versions]
            for version in ftrack_asset["versions"]:
                if int(version["version"]) in version_numbers:
                    version["is_published"] = False

        try:
            session.commit()

        except Exception:
            msg = ("Could not set `is_published` attribute to `False`"
                   " for selected AssetVersions.")
            self.log.warning(msg, exc_info=True)

            return {"success": False, "message": msg}

        msg = "Total size of files deleted: " + self.sizeof_fmt(size)

        self.log.warning(msg)

        return {"success": True, "message": msg}

    def delete_whole_dir_paths(self, dir_paths, delete=True):
        size = 0

        for dir_path in dir_paths:
            # Delete all files and fodlers in dir path
            for root, dirs, files in os.walk(dir_path, topdown=False):
                for name in files:
                    file_path = os.path.join(root, name)
                    size += os.path.getsize(file_path)
                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))

                for name in dirs:
                    if delete:
                        os.rmdir(os.path.join(root, name))

            if not delete:
                continue

            # Delete even the folder and it's parents folders if they are empty
            while True:
                if not os.path.exists(dir_path):
                    dir_path = os.path.dirname(dir_path)
                    continue

                if len(os.listdir(dir_path)) != 0:
                    break

                os.rmdir(os.path.join(dir_path))

        return size

    def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
        size = 0

        for dir_id, dir_path in dir_paths.items():
            dir_files = os.listdir(dir_path)
            collections, remainders = clique.assemble(dir_files)
            for file_path, seq_path in file_paths[dir_id]:
                file_path_base = os.path.split(file_path)[1]
                # Just remove file if `frame` key was not in context or
                # filled path is in remainders (single file sequence)
                if not seq_path or file_path_base in remainders:
                    if not os.path.exists(file_path):
                        self.log.warning(
                            "File was not found: {}".format(file_path))
                        continue

                    size += os.path.getsize(file_path)

                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))

                    remainders.remove(file_path_base)
                    continue

                seq_path_base = os.path.split(seq_path)[1]
                head, tail = seq_path_base.split(self.sequence_splitter)

                final_col = None
                for collection in collections:
                    if head != collection.head or tail != collection.tail:
                        continue
                    final_col = collection
                    break

                if final_col is not None:
                    # Fill full path to head
                    final_col.head = os.path.join(dir_path, final_col.head)
                    for _file_path in final_col:
                        if os.path.exists(_file_path):

                            size += os.path.getsize(_file_path)

                            if delete:
                                os.remove(_file_path)
                                self.log.debug(
                                    "Removed file: {}".format(_file_path))

                    _seq_path = final_col.format("{head}{padding}{tail}")
                    self.log.debug("Removed files: {}".format(_seq_path))
                    collections.remove(final_col)

                elif os.path.exists(file_path):
                    size += os.path.getsize(file_path)

                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))
                else:
                    self.log.warning(
                        "File was not found: {}".format(file_path))

        # Delete as much as possible parent folders
        if not delete:
            return size

        for dir_path in dir_paths.values():
            while True:
                if not os.path.exists(dir_path):
                    dir_path = os.path.dirname(dir_path)
                    continue

                if len(os.listdir(dir_path)) != 0:
                    break

                self.log.debug("Removed folder: {}".format(dir_path))
                os.rmdir(dir_path)

        return size

    def path_from_represenation(self, representation, anatomy):
        try:
            template = representation["data"]["template"]

        except KeyError:
            return (None, None)

        sequence_path = None
        try:
            context = representation["context"]
            context["root"] = anatomy.roots
            path = avalon.pipeline.format_template_with_optional_keys(
                context, template)
            if "frame" in context:
                context["frame"] = self.sequence_splitter
                sequence_path = os.path.normpath(
                    avalon.pipeline.format_template_with_optional_keys(
                        context, template))

        except KeyError:
            # Template references unavailable data
            return (None, None)

        return (os.path.normpath(path), sequence_path)
Пример #8
0
    def __init__(self, parent=None):
        super(LauncherWindow, self).__init__(parent)

        self.log = logging.getLogger(".".join(
            [__name__, self.__class__.__name__]))
        self.dbcon = AvalonMongoDB()

        self.setWindowTitle("Launcher")
        self.setFocusPolicy(QtCore.Qt.StrongFocus)
        self.setAttribute(QtCore.Qt.WA_DeleteOnClose, False)

        icon = QtGui.QIcon(resources.pype_icon_filepath())
        self.setWindowIcon(icon)
        self.setStyleSheet(style.load_stylesheet())

        # Allow minimize
        self.setWindowFlags(self.windowFlags()
                            | QtCore.Qt.WindowMinimizeButtonHint)

        project_panel = ProjectsPanel(self.dbcon)
        asset_panel = AssetsPanel(self.dbcon)

        page_slider = SlidePageWidget()
        page_slider.addWidget(project_panel)
        page_slider.addWidget(asset_panel)

        # actions
        actions_bar = ActionBar(self.dbcon, self)

        # statusbar
        statusbar = QtWidgets.QWidget()
        layout = QtWidgets.QHBoxLayout(statusbar)

        message_label = QtWidgets.QLabel()
        message_label.setFixedHeight(15)

        action_history = ActionHistory()
        action_history.setStatusTip("Show Action History")

        layout.addWidget(message_label)
        layout.addWidget(action_history)

        # Vertically split Pages and Actions
        body = QtWidgets.QSplitter()
        body.setContentsMargins(0, 0, 0, 0)
        body.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
                           QtWidgets.QSizePolicy.Expanding)
        body.setOrientation(QtCore.Qt.Vertical)
        body.addWidget(page_slider)
        body.addWidget(actions_bar)

        # Set useful default sizes and set stretch
        # for the pages so that is the only one that
        # stretches on UI resize.
        body.setStretchFactor(0, 10)
        body.setSizes([580, 160])

        layout = QtWidgets.QVBoxLayout(self)
        layout.addWidget(body)
        layout.addWidget(statusbar)
        layout.setSpacing(0)
        layout.setContentsMargins(0, 0, 0, 0)

        self.message_label = message_label
        self.project_panel = project_panel
        self.asset_panel = asset_panel
        self.actions_bar = actions_bar
        self.action_history = action_history
        self.page_slider = page_slider
        self._page = 0

        # signals
        actions_bar.action_clicked.connect(self.on_action_clicked)
        action_history.trigger_history.connect(self.on_history_action)
        project_panel.project_clicked.connect(self.on_project_clicked)
        asset_panel.back_clicked.connect(self.on_back_clicked)

        # Add some signals to propagate from the asset panel
        for signal in (asset_panel.project_bar.project_changed,
                       asset_panel.assets_widget.selection_changed,
                       asset_panel.tasks_widget.task_changed):
            signal.connect(self.on_session_changed)

        # todo: Simplify this callback connection
        asset_panel.project_bar.project_changed.connect(
            self.on_project_changed)

        self.resize(520, 740)
 def __init__(self, *args, **kwargs):
     self.db_con = AvalonMongoDB()
     super(StoreThumbnailsToAvalon, self).__init__(*args, **kwargs)
Пример #10
0
class DeleteAssetSubset(BaseAction):
    '''Edit meta data action.'''

    #: Action identifier.
    identifier = "delete.asset.subset"
    #: Action label.
    label = "Delete Asset/Subsets"
    #: Action description.
    description = "Removes from Avalon with all childs and asset from Ftrack"
    icon = statics_icon("ftrack", "action_icons", "DeleteAsset.svg")

    settings_key = "delete_asset_subset"
    #: Db connection
    dbcon = AvalonMongoDB()

    splitter = {"type": "label", "value": "---"}
    action_data_by_id = {}
    asset_prefix = "asset:"
    subset_prefix = "subset:"

    def discover(self, session, entities, event):
        """ Validation """
        task_ids = []
        for ent_info in event["data"]["selection"]:
            if ent_info.get("entityType") == "task":
                task_ids.append(ent_info["entityId"])

        is_valid = False
        for entity in entities:
            if (entity["id"] in task_ids
                    and entity.entity_type.lower() != "task"):
                is_valid = True
                break

        if is_valid:
            is_valid = self.valid_roles(session, entities, event)
        return is_valid

    def _launch(self, event):
        try:
            entities = self._translate_event(event)
            if "values" not in event["data"]:
                self.dbcon.install()
                return self._interface(self.session, entities, event)

            confirmation = self.confirm_delete(entities, event)
            if confirmation:
                return confirmation

            self.dbcon.install()
            response = self.launch(self.session, entities, event)
        finally:
            self.dbcon.uninstall()

        return self._handle_result(response)

    def interface(self, session, entities, event):
        self.show_message(event, "Preparing data...", True)
        items = []
        title = "Choose items to delete"

        # Filter selection and get ftrack ids
        selection = event["data"].get("selection") or []
        ftrack_ids = []
        project_in_selection = False
        for entity in selection:
            entity_type = (entity.get("entityType") or "").lower()
            if entity_type != "task":
                if entity_type == "show":
                    project_in_selection = True
                continue

            ftrack_id = entity.get("entityId")
            if not ftrack_id:
                continue

            ftrack_ids.append(ftrack_id)

        if project_in_selection:
            msg = "It is not possible to use this action on project entity."
            self.show_message(event, msg, True)

        # Filter event even more (skip task entities)
        # - task entities are not relevant for avalon
        entity_mapping = {}
        for entity in entities:
            ftrack_id = entity["id"]
            if ftrack_id not in ftrack_ids:
                continue

            if entity.entity_type.lower() == "task":
                ftrack_ids.remove(ftrack_id)

            entity_mapping[ftrack_id] = entity

        if not ftrack_ids:
            # It is bug if this happens!
            return {
                "success": False,
                "message": "Invalid selection for this action (Bug)"
            }

        if entities[0].entity_type.lower() == "project":
            project = entities[0]
        else:
            project = entities[0]["project"]

        project_name = project["full_name"]
        self.dbcon.Session["AVALON_PROJECT"] = project_name

        selected_av_entities = list(
            self.dbcon.find({
                "type": "asset",
                "data.ftrackId": {
                    "$in": ftrack_ids
                }
            }))
        found_without_ftrack_id = {}
        if len(selected_av_entities) != len(ftrack_ids):
            found_ftrack_ids = [
                ent["data"]["ftrackId"] for ent in selected_av_entities
            ]
            for ftrack_id, entity in entity_mapping.items():
                if ftrack_id in found_ftrack_ids:
                    continue

                av_ents_by_name = list(
                    self.dbcon.find({
                        "type": "asset",
                        "name": entity["name"]
                    }))
                if not av_ents_by_name:
                    continue

                ent_path_items = [ent["name"] for ent in entity["link"]]
                parents = ent_path_items[1:len(ent_path_items) - 1:]
                # TODO we should say to user that
                # few of them are missing in avalon
                for av_ent in av_ents_by_name:
                    if av_ent["data"]["parents"] != parents:
                        continue

                    # TODO we should say to user that found entity
                    # with same name does not match same ftrack id?
                    if "ftrackId" not in av_ent["data"]:
                        selected_av_entities.append(av_ent)
                        found_without_ftrack_id[str(av_ent["_id"])] = ftrack_id
                        break

        if not selected_av_entities:
            return {
                "success": False,
                "message": "Didn't found entities in avalon"
            }

        # Remove cached action older than 2 minutes
        old_action_ids = []
        for id, data in self.action_data_by_id.items():
            created_at = data.get("created_at")
            if not created_at:
                old_action_ids.append(id)
                continue
            cur_time = datetime.now()
            existing_in_sec = (created_at - cur_time).total_seconds()
            if existing_in_sec > 60 * 2:
                old_action_ids.append(id)

        for id in old_action_ids:
            self.action_data_by_id.pop(id, None)

        # Store data for action id
        action_id = str(uuid.uuid1())
        self.action_data_by_id[action_id] = {
            "attempt": 1,
            "created_at": datetime.now(),
            "project_name": project_name,
            "subset_ids_by_name": {},
            "subset_ids_by_parent": {},
            "without_ftrack_id": found_without_ftrack_id
        }

        id_item = {"type": "hidden", "name": "action_id", "value": action_id}

        items.append(id_item)
        asset_ids = [ent["_id"] for ent in selected_av_entities]
        subsets_for_selection = self.dbcon.find({
            "type": "subset",
            "parent": {
                "$in": asset_ids
            }
        })

        asset_ending = ""
        if len(selected_av_entities) > 1:
            asset_ending = "s"

        asset_title = {
            "type": "label",
            "value": "# Delete asset{}:".format(asset_ending)
        }
        asset_note = {
            "type":
            "label",
            "value": ("<p><i>NOTE: Action will delete checked entities"
                      " in Ftrack and Avalon with all children entities and"
                      " published content.</i></p>")
        }

        items.append(asset_title)
        items.append(asset_note)

        asset_items = collections.defaultdict(list)
        for asset in selected_av_entities:
            ent_path_items = [project_name]
            ent_path_items.extend(asset.get("data", {}).get("parents") or [])
            ent_path_to_parent = "/".join(ent_path_items) + "/"
            asset_items[ent_path_to_parent].append(asset)

        for asset_parent_path, assets in sorted(asset_items.items()):
            items.append({
                "type": "label",
                "value": "## <b>- {}</b>".format(asset_parent_path)
            })
            for asset in assets:
                items.append({
                    "label":
                    asset["name"],
                    "name":
                    "{}{}".format(self.asset_prefix, str(asset["_id"])),
                    "type":
                    'boolean',
                    "value":
                    False
                })

        subset_ids_by_name = collections.defaultdict(list)
        subset_ids_by_parent = collections.defaultdict(list)
        for subset in subsets_for_selection:
            subset_id = subset["_id"]
            name = subset["name"]
            parent_id = subset["parent"]
            subset_ids_by_name[name].append(subset_id)
            subset_ids_by_parent[parent_id].append(subset_id)

        if not subset_ids_by_name:
            return {"items": items, "title": title}

        subset_ending = ""
        if len(subset_ids_by_name.keys()) > 1:
            subset_ending = "s"

        subset_title = {
            "type": "label",
            "value": "# Subset{} to delete:".format(subset_ending)
        }
        subset_note = {
            "type":
            "label",
            "value": ("<p><i>WARNING: Subset{} will be removed"
                      " for all <b>selected</b> entities.</i></p>"
                      ).format(subset_ending)
        }

        items.append(self.splitter)
        items.append(subset_title)
        items.append(subset_note)

        for name in subset_ids_by_name:
            items.append({
                "label": "<b>{}</b>".format(name),
                "name": "{}{}".format(self.subset_prefix, name),
                "type": "boolean",
                "value": False
            })

        self.action_data_by_id[action_id]["subset_ids_by_parent"] = (
            subset_ids_by_parent)
        self.action_data_by_id[action_id]["subset_ids_by_name"] = (
            subset_ids_by_name)

        return {"items": items, "title": title}

    def confirm_delete(self, entities, event):
        values = event["data"]["values"]
        action_id = values.get("action_id")
        spec_data = self.action_data_by_id.get(action_id)
        if not spec_data:
            # it is a bug if this happens!
            return {
                "success": False,
                "message": "Something bad has happened. Please try again."
            }

        # Process Delete confirmation
        delete_key = values.get("delete_key")
        if delete_key:
            delete_key = delete_key.lower().strip()
            # Go to launch part if user entered `delete`
            if delete_key == "delete":
                return
            # Skip whole process if user didn't enter any text
            elif delete_key == "":
                self.action_data_by_id.pop(action_id, None)
                return {
                    "success": True,
                    "message": "Deleting cancelled (delete entry was empty)"
                }
            # Get data to show again
            to_delete = spec_data["to_delete"]

        else:
            to_delete = collections.defaultdict(list)
            for key, value in values.items():
                if not value:
                    continue
                if key.startswith(self.asset_prefix):
                    _key = key.replace(self.asset_prefix, "")
                    to_delete["assets"].append(_key)

                elif key.startswith(self.subset_prefix):
                    _key = key.replace(self.subset_prefix, "")
                    to_delete["subsets"].append(_key)

            self.action_data_by_id[action_id]["to_delete"] = to_delete

        asset_to_delete = len(to_delete.get("assets") or []) > 0
        subset_to_delete = len(to_delete.get("subsets") or []) > 0

        if not asset_to_delete and not subset_to_delete:
            self.action_data_by_id.pop(action_id, None)
            return {
                "success": True,
                "message": "Nothing was selected to delete"
            }

        attempt = spec_data["attempt"]
        if attempt > 3:
            self.action_data_by_id.pop(action_id, None)
            return {
                "success": False,
                "message": "You didn't enter \"DELETE\" properly 3 times!"
            }

        self.action_data_by_id[action_id]["attempt"] += 1

        title = "Confirmation of deleting"

        if asset_to_delete:
            asset_len = len(to_delete["assets"])
            asset_ending = ""
            if asset_len > 1:
                asset_ending = "s"
            title += " {} Asset{}".format(asset_len, asset_ending)
            if subset_to_delete:
                title += " and"

        if subset_to_delete:
            sub_len = len(to_delete["subsets"])
            type_ending = ""
            sub_ending = ""
            if sub_len == 1:
                subset_ids_by_name = spec_data["subset_ids_by_name"]
                if len(subset_ids_by_name[to_delete["subsets"][0]]) > 1:
                    sub_ending = "s"

            elif sub_len > 1:
                type_ending = "s"
                sub_ending = "s"

            title += " {} type{} of subset{}".format(sub_len, type_ending,
                                                     sub_ending)

        items = []

        id_item = {"type": "hidden", "name": "action_id", "value": action_id}
        delete_label = {
            'type': 'label',
            'value': '# Please enter "DELETE" to confirm #'
        }
        delete_item = {
            "name": "delete_key",
            "type": "text",
            "value": "",
            "empty_text": "Type Delete here..."
        }

        items.append(id_item)
        items.append(delete_label)
        items.append(delete_item)

        return {"items": items, "title": title}

    def launch(self, session, entities, event):
        self.show_message(event, "Processing...", True)
        values = event["data"]["values"]
        action_id = values.get("action_id")
        spec_data = self.action_data_by_id.get(action_id)
        if not spec_data:
            # it is a bug if this happens!
            return {
                "success": False,
                "message": "Something bad has happened. Please try again."
            }

        report_messages = collections.defaultdict(list)

        project_name = spec_data["project_name"]
        to_delete = spec_data["to_delete"]
        self.dbcon.Session["AVALON_PROJECT"] = project_name

        assets_to_delete = to_delete.get("assets") or []
        subsets_to_delete = to_delete.get("subsets") or []

        # Convert asset ids to ObjectId obj
        assets_to_delete = [ObjectId(id) for id in assets_to_delete if id]

        subset_ids_by_parent = spec_data["subset_ids_by_parent"]
        subset_ids_by_name = spec_data["subset_ids_by_name"]

        subset_ids_to_archive = []
        asset_ids_to_archive = []
        ftrack_ids_to_delete = []
        if len(assets_to_delete) > 0:
            map_av_ftrack_id = spec_data["without_ftrack_id"]
            # Prepare data when deleting whole avalon asset
            avalon_assets = self.dbcon.find({"type": "asset"})
            avalon_assets_by_parent = collections.defaultdict(list)
            for asset in avalon_assets:
                asset_id = asset["_id"]
                parent_id = asset["data"]["visualParent"]
                avalon_assets_by_parent[parent_id].append(asset)
                if asset_id in assets_to_delete:
                    ftrack_id = map_av_ftrack_id.get(str(asset_id))
                    if not ftrack_id:
                        ftrack_id = asset["data"].get("ftrackId")

                    if not ftrack_id:
                        continue
                    ftrack_ids_to_delete.append(ftrack_id)

            children_queue = Queue()
            for mongo_id in assets_to_delete:
                children_queue.put(mongo_id)

            while not children_queue.empty():
                mongo_id = children_queue.get()
                if mongo_id in asset_ids_to_archive:
                    continue

                asset_ids_to_archive.append(mongo_id)
                for subset_id in subset_ids_by_parent.get(mongo_id, []):
                    if subset_id not in subset_ids_to_archive:
                        subset_ids_to_archive.append(subset_id)

                children = avalon_assets_by_parent.get(mongo_id)
                if not children:
                    continue

                for child in children:
                    child_id = child["_id"]
                    if child_id not in asset_ids_to_archive:
                        children_queue.put(child_id)

        # Prepare names of assets in ftrack and ids of subsets in mongo
        asset_names_to_delete = []
        if len(subsets_to_delete) > 0:
            for name in subsets_to_delete:
                asset_names_to_delete.append(name)
                for subset_id in subset_ids_by_name[name]:
                    if subset_id in subset_ids_to_archive:
                        continue
                    subset_ids_to_archive.append(subset_id)

        # Get ftrack ids of entities where will be delete only asset
        not_deleted_entities_id = []
        ftrack_id_name_map = {}
        if asset_names_to_delete:
            for entity in entities:
                ftrack_id = entity["id"]
                ftrack_id_name_map[ftrack_id] = entity["name"]
                if ftrack_id not in ftrack_ids_to_delete:
                    not_deleted_entities_id.append(ftrack_id)

        mongo_proc_txt = "MongoProcessing: "
        ftrack_proc_txt = "Ftrack processing: "
        if asset_ids_to_archive:
            self.log.debug("{}Archivation of assets <{}>".format(
                mongo_proc_txt,
                ", ".join([str(id) for id in asset_ids_to_archive])))
            self.dbcon.update_many(
                {
                    "_id": {
                        "$in": asset_ids_to_archive
                    },
                    "type": "asset"
                }, {"$set": {
                    "type": "archived_asset"
                }})

        if subset_ids_to_archive:
            self.log.debug("{}Archivation of subsets <{}>".format(
                mongo_proc_txt,
                ", ".join([str(id) for id in subset_ids_to_archive])))
            self.dbcon.update_many(
                {
                    "_id": {
                        "$in": subset_ids_to_archive
                    },
                    "type": "subset"
                }, {"$set": {
                    "type": "archived_subset"
                }})

        if ftrack_ids_to_delete:
            self.log.debug("{}Deleting Ftrack Entities <{}>".format(
                ftrack_proc_txt, ", ".join(ftrack_ids_to_delete)))

            ftrack_ents_to_delete = (self._filter_entities_to_delete(
                ftrack_ids_to_delete, session))
            for entity in ftrack_ents_to_delete:
                session.delete(entity)
                try:
                    session.commit()
                except Exception:
                    ent_path = "/".join(
                        [ent["name"] for ent in entity["link"]])
                    msg = "Failed to delete entity"
                    report_messages[msg].append(ent_path)
                    session.rollback()
                    self.log.warning("{} <{}>".format(msg, ent_path),
                                     exc_info=True)

        if not_deleted_entities_id:
            joined_not_deleted = ", ".join([
                "\"{}\"".format(ftrack_id)
                for ftrack_id in not_deleted_entities_id
            ])
            joined_asset_names = ", ".join(
                ["\"{}\"".format(name) for name in asset_names_to_delete])
            # Find assets of selected entities with names of checked subsets
            assets = session.query(
                ("select id from Asset where"
                 " context_id in ({}) and name in ({})").format(
                     joined_not_deleted, joined_asset_names)).all()

            self.log.debug("{}Deleting Ftrack Assets <{}>".format(
                ftrack_proc_txt, ", ".join([asset["id"] for asset in assets])))
            for asset in assets:
                session.delete(asset)
                try:
                    session.commit()
                except Exception:
                    session.rollback()
                    msg = "Failed to delete asset"
                    report_messages[msg].append(asset["id"])
                    self.log.warning("Asset: {} <{}>".format(
                        asset["name"], asset["id"]),
                                     exc_info=True)

        return self.report_handle(report_messages, project_name, event)

    def _filter_entities_to_delete(self, ftrack_ids_to_delete, session):
        """Filter children entities to avoid CircularDependencyError."""
        joined_ids_to_delete = ", ".join(
            ["\"{}\"".format(id) for id in ftrack_ids_to_delete])
        to_delete_entities = session.query(
            "select id, link from TypedContext where id in ({})".format(
                joined_ids_to_delete)).all()
        filtered = to_delete_entities[:]
        while True:
            changed = False
            _filtered = filtered[:]
            for entity in filtered:
                entity_id = entity["id"]

                for _entity in tuple(_filtered):
                    if entity_id == _entity["id"]:
                        continue

                    for _link in _entity["link"]:
                        if entity_id == _link["id"] and _entity in _filtered:
                            _filtered.remove(_entity)
                            changed = True
                            break

            filtered = _filtered

            if not changed:
                break

        return filtered

    def report_handle(self, report_messages, project_name, event):
        if not report_messages:
            return {"success": True, "message": "Deletion was successful!"}

        title = "Delete report ({}):".format(project_name)
        items = []
        items.append({
            "type": "label",
            "value": "# Deleting was not completely successful"
        })
        items.append({
            "type": "label",
            "value": "<p><i>Check logs for more information</i></p>"
        })
        for msg, _items in report_messages.items():
            if not _items or not msg:
                continue

            items.append({"type": "label", "value": "# {}".format(msg)})

            if isinstance(_items, str):
                _items = [_items]
            items.append({
                "type": "label",
                "value": '<p>{}</p>'.format("<br>".join(_items))
            })
            items.append(self.splitter)

        self.show_interface(items, title, event)

        return {
            "success": False,
            "message": "Deleting finished. Read report messages."
        }
Пример #11
0
class Delivery(BaseAction):

    identifier = "delivery.action"
    label = "Delivery"
    description = "Deliver data to client"
    role_list = ["Pypeclub", "Administrator", "Project manager"]
    icon = statics_icon("ftrack", "action_icons", "Delivery.svg")
    settings_key = "delivery_action"

    def __init__(self, *args, **kwargs):
        self.db_con = AvalonMongoDB()

        super(Delivery, self).__init__(*args, **kwargs)

    def discover(self, session, entities, event):
        is_valid = False
        for entity in entities:
            if entity.entity_type.lower() == "assetversion":
                is_valid = True
                break

        if is_valid:
            is_valid = self.valid_roles(session, entities, event)
        return is_valid

    def interface(self, session, entities, event):
        if event["data"].get("values", {}):
            return

        title = "Delivery data to Client"

        items = []
        item_splitter = {"type": "label", "value": "---"}

        project_entity = self.get_project_from_entity(entities[0])
        project_name = project_entity["full_name"]
        self.db_con.install()
        self.db_con.Session["AVALON_PROJECT"] = project_name
        project_doc = self.db_con.find_one({"type": "project"})
        if not project_doc:
            return {
                "success":
                False,
                "message":
                ("Didn't found project \"{}\" in avalon.").format(project_name)
            }

        repre_names = self._get_repre_names(entities)
        self.db_con.uninstall()

        items.append({
            "type": "hidden",
            "name": "__project_name__",
            "value": project_name
        })

        # Prpeare anatomy data
        anatomy = Anatomy(project_name)
        new_anatomies = []
        first = None
        for key, template in (anatomy.templates.get("delivery") or {}).items():
            # Use only keys with `{root}` or `{root[*]}` in value
            if isinstance(template, str) and "{root" in template:
                new_anatomies.append({"label": key, "value": key})
                if first is None:
                    first = key

        skipped = False
        # Add message if there are any common components
        if not repre_names or not new_anatomies:
            skipped = True
            items.append({
                "type": "label",
                "value": "<h1>Something went wrong:</h1>"
            })

        items.append({
            "type": "hidden",
            "name": "__skipped__",
            "value": skipped
        })

        if not repre_names:
            if len(entities) == 1:
                items.append({
                    "type":
                    "label",
                    "value":
                    ("- Selected entity doesn't have components to deliver.")
                })
            else:
                items.append({
                    "type":
                    "label",
                    "value":
                    ("- Selected entities don't have common components.")
                })

        # Add message if delivery anatomies are not set
        if not new_anatomies:
            items.append({
                "type":
                "label",
                "value": ("- `\"delivery\"` anatomy key is not set in config.")
            })

        # Skip if there are any data shortcomings
        if skipped:
            return {"items": items, "title": title}

        items.append({
            "value": "<h1>Choose Components to deliver</h1>",
            "type": "label"
        })

        for repre_name in repre_names:
            items.append({
                "type": "boolean",
                "value": False,
                "label": repre_name,
                "name": repre_name
            })

        items.append(item_splitter)

        items.append({
            "value": "<h2>Location for delivery</h2>",
            "type": "label"
        })

        items.append({
            "type":
            "label",
            "value":
            ("<i>NOTE: It is possible to replace `root` key in anatomy.</i>")
        })

        items.append({
            "type": "text",
            "name": "__location_path__",
            "empty_text": "Type location path here...(Optional)"
        })

        items.append(item_splitter)

        items.append({
            "value": "<h2>Anatomy of delivery files</h2>",
            "type": "label"
        })

        items.append({
            "type":
            "label",
            "value": ("<p><i>NOTE: These can be set in Anatomy.yaml"
                      " within `delivery` key.</i></p>")
        })

        items.append({
            "type": "enumerator",
            "name": "__new_anatomies__",
            "data": new_anatomies,
            "value": first
        })

        return {"items": items, "title": title}

    def _get_repre_names(self, entities):
        version_ids = self._get_interest_version_ids(entities)
        repre_docs = self.db_con.find({
            "type": "representation",
            "parent": {
                "$in": version_ids
            }
        })
        return list(sorted(repre_docs.distinct("name")))

    def _get_interest_version_ids(self, entities):
        parent_ent_by_id = {}
        subset_names = set()
        version_nums = set()
        for entity in entities:
            asset = entity["asset"]
            parent = asset["parent"]
            parent_ent_by_id[parent["id"]] = parent

            subset_name = asset["name"]
            subset_names.add(subset_name)

            version = entity["version"]
            version_nums.add(version)

        asset_docs_by_ftrack_id = self._get_asset_docs(parent_ent_by_id)
        subset_docs = self._get_subset_docs(asset_docs_by_ftrack_id,
                                            subset_names, entities)
        version_docs = self._get_version_docs(asset_docs_by_ftrack_id,
                                              subset_docs, version_nums,
                                              entities)

        return [version_doc["_id"] for version_doc in version_docs]

    def _get_version_docs(self, asset_docs_by_ftrack_id, subset_docs,
                          version_nums, entities):
        subset_docs_by_id = {
            subset_doc["_id"]: subset_doc
            for subset_doc in subset_docs
        }
        version_docs = list(
            self.db_con.find({
                "type": "version",
                "parent": {
                    "$in": list(subset_docs_by_id.keys())
                },
                "name": {
                    "$in": list(version_nums)
                }
            }))
        version_docs_by_parent_id = collections.defaultdict(dict)
        for version_doc in version_docs:
            subset_doc = subset_docs_by_id[version_doc["parent"]]

            asset_id = subset_doc["parent"]
            subset_name = subset_doc["name"]
            version = version_doc["name"]
            if version_docs_by_parent_id[asset_id].get(subset_name) is None:
                version_docs_by_parent_id[asset_id][subset_name] = {}

            version_docs_by_parent_id[asset_id][subset_name][version] = (
                version_doc)

        filtered_versions = []
        for entity in entities:
            asset = entity["asset"]

            parent = asset["parent"]
            asset_doc = asset_docs_by_ftrack_id[parent["id"]]

            subsets_by_name = version_docs_by_parent_id.get(asset_doc["_id"])
            if not subsets_by_name:
                continue

            subset_name = asset["name"]
            version_docs_by_version = subsets_by_name.get(subset_name)
            if not version_docs_by_version:
                continue

            version = entity["version"]
            version_doc = version_docs_by_version.get(version)
            if version_doc:
                filtered_versions.append(version_doc)
        return filtered_versions

    def _get_subset_docs(self, asset_docs_by_ftrack_id, subset_names,
                         entities):
        asset_doc_ids = list()
        for asset_doc in asset_docs_by_ftrack_id.values():
            asset_doc_ids.append(asset_doc["_id"])

        subset_docs = list(
            self.db_con.find({
                "type": "subset",
                "parent": {
                    "$in": asset_doc_ids
                },
                "name": {
                    "$in": list(subset_names)
                }
            }))
        subset_docs_by_parent_id = collections.defaultdict(dict)
        for subset_doc in subset_docs:
            asset_id = subset_doc["parent"]
            subset_name = subset_doc["name"]
            subset_docs_by_parent_id[asset_id][subset_name] = subset_doc

        filtered_subsets = []
        for entity in entities:
            asset = entity["asset"]

            parent = asset["parent"]
            asset_doc = asset_docs_by_ftrack_id[parent["id"]]

            subsets_by_name = subset_docs_by_parent_id.get(asset_doc["_id"])
            if not subsets_by_name:
                continue

            subset_name = asset["name"]
            subset_doc = subsets_by_name.get(subset_name)
            if subset_doc:
                filtered_subsets.append(subset_doc)
        return filtered_subsets

    def _get_asset_docs(self, parent_ent_by_id):
        asset_docs = list(
            self.db_con.find({
                "type": "asset",
                "data.ftrackId": {
                    "$in": list(parent_ent_by_id.keys())
                }
            }))
        asset_docs_by_ftrack_id = {
            asset_doc["data"]["ftrackId"]: asset_doc
            for asset_doc in asset_docs
        }

        entities_by_mongo_id = {}
        entities_by_names = {}
        for ftrack_id, entity in parent_ent_by_id.items():
            if ftrack_id not in asset_docs_by_ftrack_id:
                parent_mongo_id = entity["custom_attributes"].get(
                    CUST_ATTR_ID_KEY)
                if parent_mongo_id:
                    entities_by_mongo_id[ObjectId(parent_mongo_id)] = entity
                else:
                    entities_by_names[entity["name"]] = entity

        expressions = []
        if entities_by_mongo_id:
            expression = {
                "type": "asset",
                "_id": {
                    "$in": list(entities_by_mongo_id.keys())
                }
            }
            expressions.append(expression)

        if entities_by_names:
            expression = {
                "type": "asset",
                "name": {
                    "$in": list(entities_by_names.keys())
                }
            }
            expressions.append(expression)

        if expressions:
            if len(expressions) == 1:
                filter = expressions[0]
            else:
                filter = {"$or": expressions}

            asset_docs = self.db_con.find(filter)
            for asset_doc in asset_docs:
                if asset_doc["_id"] in entities_by_mongo_id:
                    entity = entities_by_mongo_id[asset_doc["_id"]]
                    asset_docs_by_ftrack_id[entity["id"]] = asset_doc

                elif asset_doc["name"] in entities_by_names:
                    entity = entities_by_names[asset_doc["name"]]
                    asset_docs_by_ftrack_id[entity["id"]] = asset_doc

        return asset_docs_by_ftrack_id

    def launch(self, session, entities, event):
        if "values" not in event["data"]:
            return

        values = event["data"]["values"]
        skipped = values.pop("__skipped__")
        if skipped:
            return None

        user_id = event["source"]["user"]["id"]
        user_entity = session.query(
            "User where id is {}".format(user_id)).one()

        job = session.create(
            "Job", {
                "user": user_entity,
                "status": "running",
                "data": json.dumps({"description": "Delivery processing."})
            })
        session.commit()

        try:
            self.db_con.install()
            self.real_launch(session, entities, event)
            job["status"] = "done"

        except Exception:
            self.log.warning("Failed during processing delivery action.",
                             exc_info=True)

        finally:
            if job["status"] != "done":
                job["status"] = "failed"
            session.commit()
            self.db_con.uninstall()

        if job["status"] == "failed":
            return {
                "success": False,
                "message": "Delivery failed. Check logs for more information."
            }
        return True

    def real_launch(self, session, entities, event):
        self.log.info("Delivery action just started.")
        report_items = collections.defaultdict(list)

        values = event["data"]["values"]

        location_path = values.pop("__location_path__")
        anatomy_name = values.pop("__new_anatomies__")
        project_name = values.pop("__project_name__")

        repre_names = []
        for key, value in values.items():
            if value is True:
                repre_names.append(key)

        if not repre_names:
            return {
                "success": True,
                "message": "Not selected components to deliver."
            }

        location_path = location_path.strip()
        if location_path:
            location_path = os.path.normpath(location_path)
            if not os.path.exists(location_path):
                os.makedirs(location_path)

        self.db_con.Session["AVALON_PROJECT"] = project_name

        self.log.debug("Collecting representations to process.")
        version_ids = self._get_interest_version_ids(entities)
        repres_to_deliver = list(
            self.db_con.find({
                "type": "representation",
                "parent": {
                    "$in": version_ids
                },
                "name": {
                    "$in": repre_names
                }
            }))

        anatomy = Anatomy(project_name)

        format_dict = {}
        if location_path:
            location_path = location_path.replace("\\", "/")
            root_names = anatomy.root_names_from_templates(
                anatomy.templates["delivery"])
            if root_names is None:
                format_dict["root"] = location_path
            else:
                format_dict["root"] = {}
                for name in root_names:
                    format_dict["root"][name] = location_path

        datetime_data = config.get_datetime_data()
        for repre in repres_to_deliver:
            source_path = repre.get("data", {}).get("path")
            debug_msg = "Processing representation {}".format(repre["_id"])
            if source_path:
                debug_msg += " with published path {}.".format(source_path)
            self.log.debug(debug_msg)

            # Get destination repre path
            anatomy_data = copy.deepcopy(repre["context"])
            anatomy_data.update(datetime_data)
            anatomy_filled = anatomy.format_all(anatomy_data)
            test_path = anatomy_filled["delivery"][anatomy_name]

            if not test_path.solved:
                msg = ("Missing keys in Representation's context"
                       " for anatomy template \"{}\".").format(anatomy_name)

                if test_path.missing_keys:
                    keys = ", ".join(test_path.missing_keys)
                    sub_msg = (
                        "Representation: {}<br>- Missing keys: \"{}\"<br>"
                    ).format(str(repre["_id"]), keys)

                if test_path.invalid_types:
                    items = []
                    for key, value in test_path.invalid_types.items():
                        items.append("\"{}\" {}".format(key, str(value)))

                    keys = ", ".join(items)
                    sub_msg = ("Representation: {}<br>"
                               "- Invalid value DataType: \"{}\"<br>").format(
                                   str(repre["_id"]), keys)

                report_items[msg].append(sub_msg)
                self.log.warning(
                    "{} Representation: \"{}\" Filled: <{}>".format(
                        msg, str(repre["_id"]), str(test_path)))
                continue

            # Get source repre path
            frame = repre['context'].get('frame')

            if frame:
                repre["context"]["frame"] = len(str(frame)) * "#"

            repre_path = self.path_from_represenation(repre, anatomy)
            # TODO add backup solution where root of path from component
            # is repalced with root
            args = (repre_path, anatomy, anatomy_name, anatomy_data,
                    format_dict, report_items)
            if not frame:
                self.process_single_file(*args)
            else:
                self.process_sequence(*args)

        return self.report(report_items)

    def process_single_file(self, repre_path, anatomy, anatomy_name,
                            anatomy_data, format_dict, report_items):
        anatomy_filled = anatomy.format(anatomy_data)
        if format_dict:
            template_result = anatomy_filled["delivery"][anatomy_name]
            delivery_path = template_result.rootless.format(**format_dict)
        else:
            delivery_path = anatomy_filled["delivery"][anatomy_name]

        delivery_folder = os.path.dirname(delivery_path)
        if not os.path.exists(delivery_folder):
            os.makedirs(delivery_folder)

        self.copy_file(repre_path, delivery_path)

    def process_sequence(self, repre_path, anatomy, anatomy_name, anatomy_data,
                         format_dict, report_items):
        dir_path, file_name = os.path.split(str(repre_path))

        base_name, ext = os.path.splitext(file_name)
        file_name_items = None
        if "#" in base_name:
            file_name_items = [part for part in base_name.split("#") if part]

        elif "%" in base_name:
            file_name_items = base_name.split("%")

        if not file_name_items:
            msg = "Source file was not found"
            report_items[msg].append(repre_path)
            self.log.warning("{} <{}>".format(msg, repre_path))
            return

        src_collections, remainder = clique.assemble(os.listdir(dir_path))
        src_collection = None
        for col in src_collections:
            if col.tail != ext:
                continue

            # skip if collection don't have same basename
            if not col.head.startswith(file_name_items[0]):
                continue

            src_collection = col
            break

        if src_collection is None:
            # TODO log error!
            msg = "Source collection of files was not found"
            report_items[msg].append(repre_path)
            self.log.warning("{} <{}>".format(msg, repre_path))
            return

        frame_indicator = "@####@"

        anatomy_data["frame"] = frame_indicator
        anatomy_filled = anatomy.format(anatomy_data)

        if format_dict:
            template_result = anatomy_filled["delivery"][anatomy_name]
            delivery_path = template_result.rootless.format(**format_dict)
        else:
            delivery_path = anatomy_filled["delivery"][anatomy_name]

        delivery_folder = os.path.dirname(delivery_path)
        dst_head, dst_tail = delivery_path.split(frame_indicator)
        dst_padding = src_collection.padding
        dst_collection = clique.Collection(head=dst_head,
                                           tail=dst_tail,
                                           padding=dst_padding)

        if not os.path.exists(delivery_folder):
            os.makedirs(delivery_folder)

        src_head = src_collection.head
        src_tail = src_collection.tail
        for index in src_collection.indexes:
            src_padding = src_collection.format("{padding}") % index
            src_file_name = "{}{}{}".format(src_head, src_padding, src_tail)
            src = os.path.normpath(os.path.join(dir_path, src_file_name))

            dst_padding = dst_collection.format("{padding}") % index
            dst = "{}{}{}".format(dst_head, dst_padding, dst_tail)

            self.copy_file(src, dst)

    def path_from_represenation(self, representation, anatomy):
        try:
            template = representation["data"]["template"]

        except KeyError:
            return None

        try:
            context = representation["context"]
            context["root"] = anatomy.roots
            path = pipeline.format_template_with_optional_keys(
                context, template)

        except KeyError:
            # Template references unavailable data
            return None

        return os.path.normpath(path)

    def copy_file(self, src_path, dst_path):
        if os.path.exists(dst_path):
            return
        try:
            filelink.create(src_path, dst_path, filelink.HARDLINK)
        except OSError:
            shutil.copyfile(src_path, dst_path)

    def report(self, report_items):
        items = []
        title = "Delivery report"
        for msg, _items in report_items.items():
            if not _items:
                continue

            if items:
                items.append({"type": "label", "value": "---"})

            items.append({"type": "label", "value": "# {}".format(msg)})
            if not isinstance(_items, (list, tuple)):
                _items = [_items]
            __items = []
            for item in _items:
                __items.append(str(item))

            items.append({
                "type": "label",
                "value": '<p>{}</p>'.format("<br>".join(__items))
            })

        if not items:
            return {"success": True, "message": "Delivery Finished"}

        return {
            "items": items,
            "title": title,
            "success": False,
            "message": "Delivery Finished"
        }
Пример #12
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        self.application_manager = ApplicationManager()
        self.dbcon = AvalonMongoDB()
Пример #13
0
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     self.dbcon = AvalonMongoDB()
     self.dbcon.install()
Пример #14
0
class Window(QtWidgets.QDialog):
    """Main window of Standalone publisher.

    :param parent: Main widget that cares about all GUIs
    :type parent: QtWidgets.QMainWindow
    """
    _db = AvalonMongoDB()
    _jobs = {}
    valid_family = False
    valid_components = False
    initialized = False
    WIDTH = 1100
    HEIGHT = 500

    def __init__(self, pyblish_paths, parent=None):
        super(Window, self).__init__(parent=parent)
        self._db.install()

        self.pyblish_paths = pyblish_paths

        self.setWindowTitle("Standalone Publish")
        self.setFocusPolicy(QtCore.Qt.StrongFocus)
        self.setAttribute(QtCore.Qt.WA_DeleteOnClose)

        # Validators
        self.valid_parent = False

        # assets widget
        widget_assets = AssetWidget(dbcon=self._db, parent=self)

        # family widget
        widget_family = FamilyWidget(dbcon=self._db, parent=self)

        # components widget
        widget_components = ComponentsWidget(parent=self)

        # Body
        body = QtWidgets.QSplitter()
        body.setContentsMargins(0, 0, 0, 0)
        body.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
                           QtWidgets.QSizePolicy.Expanding)
        body.setOrientation(QtCore.Qt.Horizontal)
        body.addWidget(widget_assets)
        body.addWidget(widget_family)
        body.addWidget(widget_components)
        body.setStretchFactor(body.indexOf(widget_assets), 2)
        body.setStretchFactor(body.indexOf(widget_family), 3)
        body.setStretchFactor(body.indexOf(widget_components), 5)

        layout = QtWidgets.QVBoxLayout(self)
        layout.addWidget(body)

        self.resize(self.WIDTH, self.HEIGHT)

        # signals
        widget_assets.selection_changed.connect(self.on_asset_changed)
        widget_assets.task_changed.connect(self._on_task_change)
        widget_assets.project_changed.connect(self.on_project_change)
        widget_family.stateChanged.connect(self.set_valid_family)

        self.widget_assets = widget_assets
        self.widget_family = widget_family
        self.widget_components = widget_components

        # on start
        self.on_start()

    @property
    def db(self):
        ''' Returns DB object for MongoDB I/O
        '''
        return self._db

    def on_start(self):
        ''' Things must be done when initilized.
        '''
        # Refresh asset input in Family widget
        self.on_asset_changed()
        self.widget_components.validation()
        # Initializing shadow widget
        self.shadow_widget = ShadowWidget(self)
        self.shadow_widget.setVisible(False)

    def resizeEvent(self, event=None):
        ''' Helps resize shadow widget
        '''
        position_x = (self.frameGeometry().width() -
                      self.shadow_widget.frameGeometry().width()) / 2
        position_y = (self.frameGeometry().height() -
                      self.shadow_widget.frameGeometry().height()) / 2
        self.shadow_widget.move(position_x, position_y)
        w = self.frameGeometry().width()
        h = self.frameGeometry().height()
        self.shadow_widget.resize(QtCore.QSize(w, h))
        if event:
            super().resizeEvent(event)

    def get_avalon_parent(self, entity):
        ''' Avalon DB entities helper - get all parents (exclude project).
        '''
        parent_id = entity['data']['visualParent']
        parents = []
        if parent_id is not None:
            parent = self.db.find_one({'_id': parent_id})
            parents.extend(self.get_avalon_parent(parent))
            parents.append(parent['name'])
        return parents

    def on_project_change(self, project_name):
        self.widget_family.refresh()

    def on_asset_changed(self):
        '''Callback on asset selection changed

        Updates the task view.

        '''
        selected = [
            asset_id for asset_id in self.widget_assets.get_selected_assets()
            if isinstance(asset_id, ObjectId)
        ]
        if len(selected) == 1:
            self.valid_parent = True
            asset = self.db.find_one({"_id": selected[0], "type": "asset"})
            self.widget_family.change_asset(asset['name'])
        else:
            self.valid_parent = False
            self.widget_family.change_asset(None)
        self.widget_family.on_data_changed()

    def _on_task_change(self):
        self.widget_family.on_task_change()

    def keyPressEvent(self, event):
        ''' Handling Ctrl+V KeyPress event
        Can handle:
            - files/folders in clipboard (tested only on Windows OS)
            - copied path of file/folder in clipboard ('c:/path/to/folder')
        '''
        if (event.key() == QtCore.Qt.Key_V
                and event.modifiers() == QtCore.Qt.ControlModifier):
            clip = QtWidgets.QApplication.clipboard()
            self.widget_components.process_mime_data(clip)
        super().keyPressEvent(event)

    def working_start(self, msg=None):
        ''' Shows shadowed foreground with message
        :param msg: Message that will be displayed
        (set to `Please wait...` if `None` entered)
        :type msg: str
        '''
        if msg is None:
            msg = 'Please wait...'
        self.shadow_widget.message = msg
        self.shadow_widget.setVisible(True)
        self.resizeEvent()
        QtWidgets.QApplication.processEvents()

    def working_stop(self):
        ''' Hides shadowed foreground
        '''
        if self.shadow_widget.isVisible():
            self.shadow_widget.setVisible(False)
        # Refresh version
        self.widget_family.on_version_refresh()

    def set_valid_family(self, valid):
        ''' Sets `valid_family` attribute for validation

        .. note::
            if set to `False` publishing is not possible
        '''
        self.valid_family = valid
        # If widget_components not initialized yet
        if hasattr(self, 'widget_components'):
            self.widget_components.validation()

    def collect_data(self):
        ''' Collecting necessary data for pyblish from child widgets
        '''
        data = {}
        data.update(self.widget_assets.collect_data())
        data.update(self.widget_family.collect_data())
        data.update(self.widget_components.collect_data())

        return data
Пример #15
0
    def get_data(self, context, versions_count):
        subset = context["subset"]
        asset = context["asset"]
        anatomy = Anatomy(context["project"]["name"])

        self.dbcon = AvalonMongoDB()
        self.dbcon.Session["AVALON_PROJECT"] = context["project"]["name"]
        self.dbcon.install()

        versions = list(
            self.dbcon.find({
                "type": "version",
                "parent": {
                    "$in": [subset["_id"]]
                }
            }))

        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        def sort_func(ent):
            return int(ent["name"])

        all_last_versions = []
        for _parent_id, _versions in versions_by_parent.items():
            for idx, version in enumerate(
                    sorted(_versions, key=sort_func, reverse=True)):
                if idx >= versions_count:
                    break
                all_last_versions.append(version)

        self.log.debug("Collected versions ({})".format(len(versions)))

        # Filter latest versions
        for version in all_last_versions:
            versions.remove(version)

        # Update versions_by_parent without filtered versions
        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        # Filter already deleted versions
        versions_to_pop = []
        for version in versions:
            version_tags = version["data"].get("tags")
            if version_tags and "deleted" in version_tags:
                versions_to_pop.append(version)

        for version in versions_to_pop:
            msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format(
                asset["name"], subset["name"], version["name"])
            self.log.debug(
                ("Skipping version. Already tagged as `deleted`. < {} >"
                 ).format(msg))
            versions.remove(version)

        version_ids = [ent["_id"] for ent in versions]

        self.log.debug("Filtered versions to delete ({})".format(
            len(version_ids)))

        if not version_ids:
            msg = "Skipping processing. Nothing to delete."
            self.log.info(msg)
            self.message(msg)
            return

        repres = list(
            self.dbcon.find({
                "type": "representation",
                "parent": {
                    "$in": version_ids
                }
            }))

        self.log.debug("Collected representations to remove ({})".format(
            len(repres)))

        dir_paths = {}
        file_paths_by_dir = collections.defaultdict(list)
        for repre in repres:
            file_path, seq_path = self.path_from_representation(repre, anatomy)
            if file_path is None:
                self.log.debug(
                    ("Could not format path for represenation \"{}\"").format(
                        str(repre)))
                continue

            dir_path = os.path.dirname(file_path)
            dir_id = None
            for _dir_id, _dir_path in dir_paths.items():
                if _dir_path == dir_path:
                    dir_id = _dir_id
                    break

            if dir_id is None:
                dir_id = uuid.uuid4()
                dir_paths[dir_id] = dir_path

            file_paths_by_dir[dir_id].append([file_path, seq_path])

        dir_ids_to_pop = []
        for dir_id, dir_path in dir_paths.items():
            if os.path.exists(dir_path):
                continue

            dir_ids_to_pop.append(dir_id)

        # Pop dirs from both dictionaries
        for dir_id in dir_ids_to_pop:
            dir_paths.pop(dir_id)
            paths = file_paths_by_dir.pop(dir_id)
            # TODO report of missing directories?
            paths_msg = ", ".join(
                ["'{}'".format(path[0].replace("\\", "/")) for path in paths])
            self.log.debug(
                ("Folder does not exist. Deleting it's files skipped: {}"
                 ).format(paths_msg))

        data = {
            "dir_paths": dir_paths,
            "file_paths_by_dir": file_paths_by_dir,
            "versions": versions,
            "asset": asset,
            "subset": subset,
            "archive_subset": versions_count == 0
        }

        return data
Пример #16
0
class UserAssigmentEvent(BaseEvent):
    """
    This script will intercept user assigment / de-assigment event and
    run shell script, providing as much context as possible.

    It expects configuration file ``presets/ftrack/user_assigment_event.json``.
    In it, you define paths to scripts to be run for user assigment event and
    for user-deassigment::
        {
            "add": [
                "/path/to/script1",
                "/path/to/script2"
            ],
            "remove": [
                "/path/to/script3",
                "/path/to/script4"
            ]
        }

    Those scripts are executed in shell. Three arguments will be passed to
    to them:
        1) user name of user (de)assigned
        2) path to workfiles of task user was (de)assigned to
        3) path to publish files of task user was (de)assigned to
    """

    db_con = AvalonMongoDB()

    def error(self, *err):
        for e in err:
            self.log.error(e)

    def _run_script(self, script, args):
        """
        Run shell script with arguments as subprocess

        :param script: script path
        :type script: str
        :param args: list of arguments passed to script
        :type args: list
        :returns: return code
        :rtype: int
        """
        p = subprocess.call([script, args], shell=True)
        return p

    def _get_task_and_user(self, session, action, changes):
        """
        Get Task and User entities from Ftrack session

        :param session: ftrack session
        :type session: ftrack_api.session
        :param action: event action
        :type action: str
        :param changes: what was changed by event
        :type changes: dict
        :returns: User and Task entities
        :rtype: tuple
        """
        if not changes:
            return None, None

        if action == 'add':
            task_id = changes.get('context_id', {}).get('new')
            user_id = changes.get('resource_id', {}).get('new')

        elif action == 'remove':
            task_id = changes.get('context_id', {}).get('old')
            user_id = changes.get('resource_id', {}).get('old')

        if not task_id:
            return None, None

        if not user_id:
            return None, None

        task = session.query('Task where id is "{}"'.format(task_id)).one()
        user = session.query('User where id is "{}"'.format(user_id)).one()

        return task, user

    def _get_asset(self, task):
        """
        Get asset from task entity

        :param task: Task entity
        :type task: dict
        :returns: Asset entity
        :rtype: dict
        """
        parent = task['parent']
        self.db_con.install()
        self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name']

        avalon_entity = None
        parent_id = parent['custom_attributes'].get(CUST_ATTR_ID_KEY)
        if parent_id:
            parent_id = ObjectId(parent_id)
            avalon_entity = self.db_con.find_one({
                '_id': parent_id,
                'type': 'asset'
            })

        if not avalon_entity:
            avalon_entity = self.db_con.find_one({
                'type': 'asset',
                'name': parent['name']
            })

        if not avalon_entity:
            self.db_con.uninstall()
            msg = 'Entity "{}" not found in avalon database'.format(
                parent['name'])
            self.error(msg)
            return {'success': False, 'message': msg}
        self.db_con.uninstall()
        return avalon_entity

    def _get_hierarchy(self, asset):
        """
        Get hierarchy from Asset entity

        :param asset: Asset entity
        :type asset: dict
        :returns: hierarchy string
        :rtype: str
        """
        return asset['data']['hierarchy']

    def _get_template_data(self, task):
        """
        Get data to fill template from task

        .. seealso:: :mod:`openpype.api.Anatomy`

        :param task: Task entity
        :type task: dict
        :returns: data for anatomy template
        :rtype: dict
        """
        project_name = task['project']['full_name']
        project_code = task['project']['name']

        # fill in template data
        asset = self._get_asset(task)
        t_data = {
            'project': {
                'name': project_name,
                'code': project_code
            },
            'asset': asset['name'],
            'task': task['name'],
            'hierarchy': self._get_hierarchy(asset)
        }

        return t_data

    def launch(self, session, event):
        if not event.get("data"):
            return

        entities_info = event["data"].get("entities")
        if not entities_info:
            return

        # load shell scripts presets
        tmp_by_project_name = {}
        for entity_info in entities_info:
            if entity_info.get('entity_type') != 'Appointment':
                continue

            task_entity, user_entity = self._get_task_and_user(
                session, entity_info.get('action'), entity_info.get('changes'))

            if not task_entity or not user_entity:
                self.log.error("Task or User was not found.")
                continue

            # format directories to pass to shell script
            project_name = task_entity["project"]["full_name"]
            project_data = tmp_by_project_name.get(project_name) or {}
            if "scripts_by_action" not in project_data:
                project_settings = get_project_settings(project_name)
                _settings = (
                    project_settings["ftrack"]["events"]["user_assignment"])
                project_data["scripts_by_action"] = _settings.get("scripts")
                tmp_by_project_name[project_name] = project_data

            scripts_by_action = project_data["scripts_by_action"]
            if not scripts_by_action:
                continue

            if "anatomy" not in project_data:
                project_data["anatomy"] = Anatomy(project_name)
                tmp_by_project_name[project_name] = project_data

            anatomy = project_data["anatomy"]
            data = self._get_template_data(task_entity)
            anatomy_filled = anatomy.format(data)
            # formatting work dir is easiest part as we can use whole path
            work_dir = anatomy_filled["work"]["folder"]
            # we also need publish but not whole
            anatomy_filled.strict = False
            publish = anatomy_filled["publish"]["folder"]

            # now find path to {asset}
            m = re.search("(^.+?{})".format(data["asset"]), publish)

            if not m:
                msg = 'Cannot get part of publish path {}'.format(publish)
                self.log.error(msg)
                return {'success': False, 'message': msg}
            publish_dir = m.group(1)

            username = user_entity["username"]
            event_entity_action = entity_info["action"]
            for script in scripts_by_action.get(event_entity_action):
                self.log.info(("[{}] : running script for user {}").format(
                    event_entity_action, username))
                self._run_script(script, [username, work_dir, publish_dir])

        return True
Пример #17
0
class AvalonRestApi(RestApi):

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.dbcon = AvalonMongoDB()
        self.dbcon.install()

    @RestApi.route("/projects/<project_name>", url_prefix="/avalon", methods="GET")
    def get_project(self, request):
        project_name = request.url_data["project_name"]
        if not project_name:
            output = {}
            for project_name in self.dbcon.tables():
                project = self.dbcon[project_name].find_one({
                    "type": "project"
                })
                output[project_name] = project

            return CallbackResult(data=self.result_to_json(output))

        project = self.dbcon[project_name].find_one({"type": "project"})

        if project:
            return CallbackResult(data=self.result_to_json(project))

        abort(404, "Project \"{}\" was not found in database".format(
            project_name
        ))

    @RestApi.route("/projects/<project_name>/assets/<asset>", url_prefix="/avalon", methods="GET")
    def get_assets(self, request):
        _project_name = request.url_data["project_name"]
        _asset = request.url_data["asset"]

        if not self.dbcon.exist_table(_project_name):
            abort(404, "Project \"{}\" was not found in database".format(
                _project_name
            ))

        if not _asset:
            assets = self.dbcon[_project_name].find({"type": "asset"})
            output = self.result_to_json(assets)
            return CallbackResult(data=output)

        # identificator can be specified with url query (default is `name`)
        identificator = request.query.get("identificator", "name")

        asset = self.dbcon[_project_name].find_one({
            "type": "asset",
            identificator: _asset
        })
        if asset:
            id = asset["_id"]
            asset["_id"] = str(id)
            return asset

        abort(404, "Asset \"{}\" with {} was not found in project {}".format(
            _asset, identificator, _project_name
        ))

    def result_to_json(self, result):
        """ Converts result of MongoDB query to dict without $oid (ObjectId)
        keys with help of regex matching.

        ..note:
            This will convert object type entries similar to ObjectId.
        """
        bson_json = bson.json_util.dumps(result)
        # Replace "{$oid: "{entity id}"}" with "{entity id}"
        regex1 = '(?P<id>{\"\$oid\": \"[^\"]+\"})'
        regex2 = '{\"\$oid\": (?P<id>\"[^\"]+\")}'
        for value in re.findall(regex1, bson_json):
            for substr in re.findall(regex2, value):
                bson_json = bson_json.replace(value, substr)

        return json.loads(bson_json)
Пример #18
0
class AppplicationsAction(BaseAction):
    """Application Action class.

    Args:
        session (ftrack_api.Session): Session where action will be registered.
        label (str): A descriptive string identifing your action.
        varaint (str, optional): To group actions together, give them the same
            label and specify a unique variant per action.
        identifier (str): An unique identifier for app.
        description (str): A verbose descriptive text for you action.
        icon (str): Url path to icon which will be shown in Ftrack web.
    """

    type = "Application"
    label = "Application action"
    identifier = "pype_app.{}.".format(str(uuid4()))
    icon_url = os.environ.get("OPENPYPE_STATICS_SERVER")

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        self.application_manager = ApplicationManager()
        self.dbcon = AvalonMongoDB()

    def construct_requirements_validations(self):
        # Override validation as this action does not need them
        return

    def register(self):
        """Registers the action, subscribing the discover and launch topics."""

        discovery_subscription = (
            "topic=ftrack.action.discover and source.user.username={0}"
        ).format(self.session.api_user)

        self.session.event_hub.subscribe(discovery_subscription,
                                         self._discover,
                                         priority=self.priority)

        launch_subscription = ("topic=ftrack.action.launch"
                               " and data.actionIdentifier={0}"
                               " and source.user.username={1}").format(
                                   self.identifier + "*",
                                   self.session.api_user)
        self.session.event_hub.subscribe(launch_subscription, self._launch)

    def _discover(self, event):
        entities = self._translate_event(event)
        items = self.discover(self.session, entities, event)
        if items:
            return {"items": items}

    def discover(self, session, entities, event):
        """Return true if we can handle the selected entities.

        Args:
            session (ftrack_api.Session): Helps to query necessary data.
            entities (list): Object of selected entities.
            event (ftrack_api.Event): Ftrack event causing discover callback.
        """

        if (len(entities) != 1 or entities[0].entity_type.lower() != "task"):
            return False

        entity = entities[0]
        if entity["parent"].entity_type.lower() == "project":
            return False

        avalon_project_apps = event["data"].get("avalon_project_apps", None)
        avalon_project_doc = event["data"].get("avalon_project_doc", None)
        if avalon_project_apps is None:
            if avalon_project_doc is None:
                ft_project = self.get_project_from_entity(entity)
                project_name = ft_project["full_name"]
                if not self.dbcon.is_installed():
                    self.dbcon.install()
                self.dbcon.Session["AVALON_PROJECT"] = project_name
                avalon_project_doc = self.dbcon.find_one({"type": "project"
                                                          }) or False
                event["data"]["avalon_project_doc"] = avalon_project_doc

            if not avalon_project_doc:
                return False

            project_apps_config = avalon_project_doc["config"].get("apps", [])
            avalon_project_apps = [app["name"]
                                   for app in project_apps_config] or False
            event["data"]["avalon_project_apps"] = avalon_project_apps

        if not avalon_project_apps:
            return False

        items = []
        for app_name in avalon_project_apps:
            app = self.application_manager.applications.get(app_name)
            if not app or not app.enabled:
                continue

            app_icon = app.icon
            if app_icon and self.icon_url:
                try:
                    app_icon = app_icon.format(self.icon_url)
                except Exception:
                    self.log.warning(
                        ("Couldn't fill icon path. Icon template: \"{}\""
                         " --- Icon url: \"{}\"").format(
                             app_icon, self.icon_url))
                    app_icon = None

            items.append({
                "label": app.group.label,
                "variant": app.label,
                "description": None,
                "actionIdentifier": self.identifier + app_name,
                "icon": app_icon
            })

        return items

    def launch(self, session, entities, event):
        """Callback method for the custom action.

        return either a bool (True if successful or False if the action failed)
        or a dictionary with they keys `message` and `success`, the message
        should be a string and will be displayed as feedback to the user,
        success should be a bool, True if successful or False if the action
        failed.

        *session* is a `ftrack_api.Session` instance

        *entities* is a list of tuples each containing the entity type and
        the entity id. If the entity is a hierarchical you will always get
        the entity type TypedContext, once retrieved through a get operation
        you will have the "real" entity type ie. example Shot, Sequence
        or Asset Build.

        *event* the unmodified original event
        """
        identifier = event["data"]["actionIdentifier"]
        app_name = identifier[len(self.identifier):]

        entity = entities[0]

        task_name = entity["name"]
        asset_name = entity["parent"]["name"]
        project_name = entity["project"]["full_name"]
        self.log.info(
            ("Ftrack launch app: \"{}\" on Project/Asset/Task: {}/{}/{}"
             ).format(app_name, project_name, asset_name, task_name))
        try:
            self.application_manager.launch(app_name,
                                            project_name=project_name,
                                            asset_name=asset_name,
                                            task_name=task_name)

        except ApplictionExecutableNotFound as exc:
            self.log.warning(exc.exc_msg)
            return {"success": False, "message": exc.msg}

        except ApplicationLaunchFailed as exc:
            self.log.error(str(exc))
            return {"success": False, "message": str(exc)}

        except Exception:
            msg = "Unexpected failure of application launch {}".format(
                self.label)
            self.log.error(msg, exc_info=True)
            return {"success": False, "message": msg}

        return {"success": True, "message": "Launching {0}".format(self.label)}
class StoreThumbnailsToAvalon(BaseAction):
    # Action identifier
    identifier = "store.thubmnail.to.avalon"
    # Action label
    label = "OpenPype Admin"
    # Action variant
    variant = "- Store Thumbnails to avalon"
    # Action description
    description = 'Test action'
    # roles that are allowed to register this action
    icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")
    settings_key = "store_thubmnail_to_avalon"

    thumbnail_key = "AVALON_THUMBNAIL_ROOT"

    def __init__(self, *args, **kwargs):
        self.db_con = AvalonMongoDB()
        super(StoreThumbnailsToAvalon, self).__init__(*args, **kwargs)

    def discover(self, session, entities, event):
        is_valid = False
        for entity in entities:
            if entity.entity_type.lower() == "assetversion":
                is_valid = True
                break

        if is_valid:
            is_valid = self.valid_roles(session, entities, event)
        return is_valid

    def launch(self, session, entities, event):
        user = session.query("User where username is '{0}'".format(
            session.api_user)).one()
        action_job = session.create(
            "Job", {
                "user":
                user,
                "status":
                "running",
                "data":
                json.dumps({"description": "Storing thumbnails to avalon."})
            })
        session.commit()

        project = self.get_project_from_entity(entities[0])
        project_name = project["full_name"]
        anatomy = Anatomy(project_name)

        if "publish" not in anatomy.templates:
            msg = "Anatomy does not have set publish key!"

            action_job["status"] = "failed"
            session.commit()

            self.log.warning(msg)

            return {"success": False, "message": msg}

        if "thumbnail" not in anatomy.templates["publish"]:
            msg = (
                "There is not set \"thumbnail\""
                " template in Antomy for project \"{}\"").format(project_name)

            action_job["status"] = "failed"
            session.commit()

            self.log.warning(msg)

            return {"success": False, "message": msg}

        thumbnail_roots = os.environ.get(self.thumbnail_key)
        if ("{thumbnail_root}" in anatomy.templates["publish"]["thumbnail"]
                and not thumbnail_roots):
            msg = "`{}` environment is not set".format(self.thumbnail_key)

            action_job["status"] = "failed"
            session.commit()

            self.log.warning(msg)

            return {"success": False, "message": msg}

        existing_thumbnail_root = None
        for path in thumbnail_roots.split(os.pathsep):
            if os.path.exists(path):
                existing_thumbnail_root = path
                break

        if existing_thumbnail_root is None:
            msg = ("Can't access paths, set in `{}` ({})").format(
                self.thumbnail_key, thumbnail_roots)

            action_job["status"] = "failed"
            session.commit()

            self.log.warning(msg)

            return {"success": False, "message": msg}

        example_template_data = {
            "_id": "ID",
            "thumbnail_root": "THUBMNAIL_ROOT",
            "thumbnail_type": "THUMBNAIL_TYPE",
            "ext": ".EXT",
            "project": {
                "name": "PROJECT_NAME",
                "code": "PROJECT_CODE"
            },
            "asset": "ASSET_NAME",
            "subset": "SUBSET_NAME",
            "version": "VERSION_NAME",
            "hierarchy": "HIERARCHY"
        }
        tmp_filled = anatomy.format_all(example_template_data)
        thumbnail_result = tmp_filled["publish"]["thumbnail"]
        if not thumbnail_result.solved:
            missing_keys = thumbnail_result.missing_keys
            invalid_types = thumbnail_result.invalid_types
            submsg = ""
            if missing_keys:
                submsg += "Missing keys: {}".format(", ".join(
                    ["\"{}\"".format(key) for key in missing_keys]))

            if invalid_types:
                items = []
                for key, value in invalid_types.items():
                    items.append("{}{}".format(str(key), str(value)))
                submsg += "Invalid types: {}".format(", ".join(items))

            msg = ("Thumbnail Anatomy template expects more keys than action"
                   " can offer. {}").format(submsg)

            action_job["status"] = "failed"
            session.commit()

            self.log.warning(msg)

            return {"success": False, "message": msg}

        thumbnail_template = anatomy.templates["publish"]["thumbnail"]

        self.db_con.install()

        for entity in entities:
            # Skip if entity is not AssetVersion (never should happend, but..)
            if entity.entity_type.lower() != "assetversion":
                continue

            # Skip if AssetVersion don't have thumbnail
            thumbnail_ent = entity["thumbnail"]
            if thumbnail_ent is None:
                self.log.debug(("Skipping. AssetVersion don't "
                                "have set thumbnail. {}").format(entity["id"]))
                continue

            avalon_ents_result = self.get_avalon_entities_for_assetversion(
                entity, self.db_con)
            version_full_path = ("Asset: \"{project_name}/{asset_path}\""
                                 " | Subset: \"{subset_name}\""
                                 " | Version: \"{version_name}\"").format(
                                     **avalon_ents_result)

            version = avalon_ents_result["version"]
            if not version:
                self.log.warning(
                    ("AssetVersion does not have version in avalon. {}"
                     ).format(version_full_path))
                continue

            thumbnail_id = version["data"].get("thumbnail_id")
            if thumbnail_id:
                self.log.info(
                    ("AssetVersion skipped, already has thubmanil set. {}"
                     ).format(version_full_path))
                continue

            # Get thumbnail extension
            file_ext = thumbnail_ent["file_type"]
            if not file_ext.startswith("."):
                file_ext = ".{}".format(file_ext)

            avalon_project = avalon_ents_result["project"]
            avalon_asset = avalon_ents_result["asset"]
            hierarchy = ""
            parents = avalon_asset["data"].get("parents") or []
            if parents:
                hierarchy = "/".join(parents)

            # Prepare anatomy template fill data
            # 1. Create new id for thumbnail entity
            thumbnail_id = ObjectId()

            template_data = {
                "_id": str(thumbnail_id),
                "thumbnail_root": existing_thumbnail_root,
                "thumbnail_type": "thumbnail",
                "ext": file_ext,
                "project": {
                    "name": avalon_project["name"],
                    "code": avalon_project["data"].get("code")
                },
                "asset": avalon_ents_result["asset_name"],
                "subset": avalon_ents_result["subset_name"],
                "version": avalon_ents_result["version_name"],
                "hierarchy": hierarchy
            }

            anatomy_filled = anatomy.format(template_data)
            thumbnail_path = anatomy_filled["publish"]["thumbnail"]
            thumbnail_path = thumbnail_path.replace("..", ".")
            thumbnail_path = os.path.normpath(thumbnail_path)

            downloaded = False
            for loc in (thumbnail_ent.get("component_locations") or []):
                res_id = loc.get("resource_identifier")
                if not res_id:
                    continue

                thubmnail_url = self.get_thumbnail_url(res_id)
                if self.download_file(thubmnail_url, thumbnail_path):
                    downloaded = True
                    break

            if not downloaded:
                self.log.warning("Could not download thumbnail for {}".format(
                    version_full_path))
                continue

            # Clean template data from keys that are dynamic
            template_data.pop("_id")
            template_data.pop("thumbnail_root")

            thumbnail_entity = {
                "_id": thumbnail_id,
                "type": "thumbnail",
                "schema": "openpype:thumbnail-1.0",
                "data": {
                    "template": thumbnail_template,
                    "template_data": template_data
                }
            }

            # Create thumbnail entity
            self.db_con.insert_one(thumbnail_entity)
            self.log.debug("Creating entity in database {}".format(
                str(thumbnail_entity)))

            # Set thumbnail id for version
            self.db_con.update_one(
                {"_id": version["_id"]},
                {"$set": {
                    "data.thumbnail_id": thumbnail_id
                }})

            self.db_con.update_one(
                {"_id": avalon_asset["_id"]},
                {"$set": {
                    "data.thumbnail_id": thumbnail_id
                }})

        action_job["status"] = "done"
        session.commit()

        return True

    def get_thumbnail_url(self, resource_identifier, size=None):
        # TODO use ftrack_api method rather (find way how to use it)
        url_string = (u'{url}/component/thumbnail?id={id}&username={username}'
                      u'&apiKey={apiKey}')
        url = url_string.format(url=self.session.server_url,
                                id=resource_identifier,
                                username=self.session.api_user,
                                apiKey=self.session.api_key)
        if size:
            url += u'&size={0}'.format(size)

        return url

    def download_file(self, source_url, dst_file_path):
        dir_path = os.path.dirname(dst_file_path)
        try:
            os.makedirs(dir_path)
        except OSError as exc:
            if exc.errno != errno.EEXIST:
                self.log.warning(
                    "Could not create folder: \"{}\"".format(dir_path))
                return False

        self.log.debug("Downloading file \"{}\" -> \"{}\"".format(
            source_url, dst_file_path))
        file_open = open(dst_file_path, "wb")
        try:
            file_open.write(requests.get(source_url).content)
        except Exception:
            self.log.warning(
                "Download of image `{}` failed.".format(source_url))
            return False
        finally:
            file_open.close()
        return True

    def get_avalon_entities_for_assetversion(self, asset_version, db_con):
        output = {
            "success": True,
            "message": None,
            "project": None,
            "project_name": None,
            "asset": None,
            "asset_name": None,
            "asset_path": None,
            "subset": None,
            "subset_name": None,
            "version": None,
            "version_name": None,
            "representations": None
        }

        db_con.install()

        ft_asset = asset_version["asset"]
        subset_name = ft_asset["name"]
        version = asset_version["version"]
        parent = ft_asset["parent"]
        ent_path = "/".join([ent["name"] for ent in parent["link"]])
        project = self.get_project_from_entity(asset_version)
        project_name = project["full_name"]

        output["project_name"] = project_name
        output["asset_name"] = parent["name"]
        output["asset_path"] = ent_path
        output["subset_name"] = subset_name
        output["version_name"] = version

        db_con.Session["AVALON_PROJECT"] = project_name

        avalon_project = db_con.find_one({"type": "project"})
        output["project"] = avalon_project

        if not avalon_project:
            output["success"] = False
            output["message"] = (
                "Project not synchronized to avalon `{}`".format(project_name))
            return output

        asset_ent = None
        asset_mongo_id = parent["custom_attributes"].get(CUST_ATTR_ID_KEY)
        if asset_mongo_id:
            try:
                asset_mongo_id = ObjectId(asset_mongo_id)
                asset_ent = db_con.find_one({
                    "type": "asset",
                    "_id": asset_mongo_id
                })
            except Exception:
                pass

        if not asset_ent:
            asset_ent = db_con.find_one({
                "type": "asset",
                "data.ftrackId": parent["id"]
            })

        output["asset"] = asset_ent

        if not asset_ent:
            output["success"] = False
            output["message"] = (
                "Not synchronized entity to avalon `{}`".format(ent_path))
            return output

        asset_mongo_id = asset_ent["_id"]

        subset_ent = db_con.find_one({
            "type": "subset",
            "parent": asset_mongo_id,
            "name": subset_name
        })

        output["subset"] = subset_ent

        if not subset_ent:
            output["success"] = False
            output["message"] = (
                "Subset `{}` does not exist under Asset `{}`").format(
                    subset_name, ent_path)
            return output

        version_ent = db_con.find_one({
            "type": "version",
            "name": version,
            "parent": subset_ent["_id"]
        })

        output["version"] = version_ent

        if not version_ent:
            output["success"] = False
            output["message"] = (
                "Version `{}` does not exist under Subset `{}` | Asset `{}`"
            ).format(version, subset_name, ent_path)
            return output

        repre_ents = list(
            db_con.find({
                "type": "representation",
                "parent": version_ent["_id"]
            }))

        output["representations"] = repre_ents
        return output
Пример #20
0
    def __init__(self, *args, **kwargs):
        self.db_con = AvalonMongoDB()

        super(Delivery, self).__init__(*args, **kwargs)
Пример #21
0
class AdobeRestApi(RestApi):
    dbcon = AvalonMongoDB()

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.dbcon.install()

    @route("/available", "/adobe")
    def available(self):
        return CallbackResult()

    @route("/presets/<project_name>", "/adobe")
    def get_presets(self, request):
        project_name = request.url_data["project_name"]
        return CallbackResult(data=config.get_presets(project_name))

    @route("/publish", "/adobe", "POST")
    def publish(self, request):
        """Triggers publishing script in subprocess.

        The subprocess freeze process and during publishing is not possible to
        handle other requests and is possible that freeze main application.

        TODO: Freezing issue may be fixed with socket communication.

        Example url:
        http://localhost:8021/adobe/publish (POST)
        """
        try:
            publish_env = self._prepare_publish_environments(
                request.request_data
            )
        except Exception as exc:
            log.warning(
                "Failed to prepare environments for publishing.",
                exc_info=True
            )
            abort(400, str(exc))

        output_data_path = publish_env["AC_PUBLISH_OUTPATH"]

        log.info("Pyblish is running")
        try:
            # Trigger subprocess
            # QUESTION should we check returncode?
            returncode = execute(
                [sys.executable, PUBLISH_SCRIPT_PATH],
                env=publish_env
            )

            # Check if output file exists
            if returncode != 0 or not os.path.exists(output_data_path):
                abort(500, "Publishing failed")

            log.info("Pyblish have stopped")

            return CallbackResult(
                data={"return_data_path": output_data_path}
            )

        except Exception:
            log.warning("Publishing failed", exc_info=True)
            abort(500, "Publishing failed")

    def _prepare_publish_environments(self, data):
        """Prepares environments based on request data."""
        env = copy.deepcopy(os.environ)

        project_name = data["project"]
        asset_name = data["asset"]

        project_doc = self.dbcon[project_name].find_one({
            "type": "project"
        })
        av_asset = self.dbcon[project_name].find_one({
            "type": "asset",
            "name": asset_name
        })
        parents = av_asset["data"]["parents"]
        hierarchy = ""
        if parents:
            hierarchy = "/".join(parents)

        env["AVALON_PROJECT"] = project_name
        env["AVALON_ASSET"] = asset_name
        env["AVALON_TASK"] = data["task"]
        env["AVALON_WORKDIR"] = data["workdir"]
        env["AVALON_HIERARCHY"] = hierarchy
        env["AVALON_PROJECTCODE"] = project_doc["data"].get("code", "")
        env["AVALON_APP"] = data["AVALON_APP"]
        env["AVALON_APP_NAME"] = data["AVALON_APP_NAME"]

        env["PYBLISH_HOSTS"] = data["AVALON_APP"]

        env["PUBLISH_PATHS"] = os.pathsep.join(PUBLISH_PATHS)

        # Input and Output paths where source data and result data will be
        # stored
        env["AC_PUBLISH_INPATH"] = data["adobePublishJsonPathSend"]
        env["AC_PUBLISH_OUTPATH"] = data["adobePublishJsonPathGet"]

        return env