Ejemplo n.º 1
0
def get_app_environments_for_context(project_name,
                                     asset_name,
                                     task_name,
                                     app_name,
                                     env=None):
    """Prepare environment variables by context.
    Args:
        project_name (str): Name of project.
        asset_name (str): Name of asset.
        task_name (str): Name of task.
        app_name (str): Name of application that is launched and can be found
            by ApplicationManager.
        env (dict): Initial environment variables. `os.environ` is used when
            not passed.

    Returns:
        dict: Environments for passed context and application.
    """
    from avalon.api import AvalonMongoDB

    # Avalon database connection
    dbcon = AvalonMongoDB()
    dbcon.Session["AVALON_PROJECT"] = project_name
    dbcon.install()

    # Project document
    project_doc = dbcon.find_one({"type": "project"})
    asset_doc = dbcon.find_one({"type": "asset", "name": asset_name})

    # Prepare app object which can be obtained only from ApplciationManager
    app_manager = ApplicationManager()
    app = app_manager.applications[app_name]

    # Project's anatomy
    anatomy = Anatomy(project_name)

    data = EnvironmentPrepData({
        "project_name": project_name,
        "asset_name": asset_name,
        "task_name": task_name,
        "app_name": app_name,
        "app": app,
        "dbcon": dbcon,
        "project_doc": project_doc,
        "asset_doc": asset_doc,
        "anatomy": anatomy,
        "env": env
    })

    prepare_host_environments(data)
    prepare_context_environments(data)

    # Discard avalon connection
    dbcon.uninstall()

    return data["env"]
Ejemplo n.º 2
0
class Delivery(BaseAction):

    identifier = "delivery.action"
    label = "Delivery"
    description = "Deliver data to client"
    role_list = ["Pypeclub", "Administrator", "Project manager"]
    icon = statics_icon("ftrack", "action_icons", "Delivery.svg")
    settings_key = "delivery_action"

    def __init__(self, *args, **kwargs):
        self.db_con = AvalonMongoDB()

        super(Delivery, self).__init__(*args, **kwargs)

    def discover(self, session, entities, event):
        is_valid = False
        for entity in entities:
            if entity.entity_type.lower() == "assetversion":
                is_valid = True
                break

        if is_valid:
            is_valid = self.valid_roles(session, entities, event)
        return is_valid

    def interface(self, session, entities, event):
        if event["data"].get("values", {}):
            return

        title = "Delivery data to Client"

        items = []
        item_splitter = {"type": "label", "value": "---"}

        project_entity = self.get_project_from_entity(entities[0])
        project_name = project_entity["full_name"]
        self.db_con.install()
        self.db_con.Session["AVALON_PROJECT"] = project_name
        project_doc = self.db_con.find_one({"type": "project"})
        if not project_doc:
            return {
                "success":
                False,
                "message":
                ("Didn't found project \"{}\" in avalon.").format(project_name)
            }

        repre_names = self._get_repre_names(entities)
        self.db_con.uninstall()

        items.append({
            "type": "hidden",
            "name": "__project_name__",
            "value": project_name
        })

        # Prpeare anatomy data
        anatomy = Anatomy(project_name)
        new_anatomies = []
        first = None
        for key, template in (anatomy.templates.get("delivery") or {}).items():
            # Use only keys with `{root}` or `{root[*]}` in value
            if isinstance(template, str) and "{root" in template:
                new_anatomies.append({"label": key, "value": key})
                if first is None:
                    first = key

        skipped = False
        # Add message if there are any common components
        if not repre_names or not new_anatomies:
            skipped = True
            items.append({
                "type": "label",
                "value": "<h1>Something went wrong:</h1>"
            })

        items.append({
            "type": "hidden",
            "name": "__skipped__",
            "value": skipped
        })

        if not repre_names:
            if len(entities) == 1:
                items.append({
                    "type":
                    "label",
                    "value":
                    ("- Selected entity doesn't have components to deliver.")
                })
            else:
                items.append({
                    "type":
                    "label",
                    "value":
                    ("- Selected entities don't have common components.")
                })

        # Add message if delivery anatomies are not set
        if not new_anatomies:
            items.append({
                "type":
                "label",
                "value": ("- `\"delivery\"` anatomy key is not set in config.")
            })

        # Skip if there are any data shortcomings
        if skipped:
            return {"items": items, "title": title}

        items.append({
            "value": "<h1>Choose Components to deliver</h1>",
            "type": "label"
        })

        for repre_name in repre_names:
            items.append({
                "type": "boolean",
                "value": False,
                "label": repre_name,
                "name": repre_name
            })

        items.append(item_splitter)

        items.append({
            "value": "<h2>Location for delivery</h2>",
            "type": "label"
        })

        items.append({
            "type":
            "label",
            "value":
            ("<i>NOTE: It is possible to replace `root` key in anatomy.</i>")
        })

        items.append({
            "type": "text",
            "name": "__location_path__",
            "empty_text": "Type location path here...(Optional)"
        })

        items.append(item_splitter)

        items.append({
            "value": "<h2>Anatomy of delivery files</h2>",
            "type": "label"
        })

        items.append({
            "type":
            "label",
            "value": ("<p><i>NOTE: These can be set in Anatomy.yaml"
                      " within `delivery` key.</i></p>")
        })

        items.append({
            "type": "enumerator",
            "name": "__new_anatomies__",
            "data": new_anatomies,
            "value": first
        })

        return {"items": items, "title": title}

    def _get_repre_names(self, entities):
        version_ids = self._get_interest_version_ids(entities)
        repre_docs = self.db_con.find({
            "type": "representation",
            "parent": {
                "$in": version_ids
            }
        })
        return list(sorted(repre_docs.distinct("name")))

    def _get_interest_version_ids(self, entities):
        parent_ent_by_id = {}
        subset_names = set()
        version_nums = set()
        for entity in entities:
            asset = entity["asset"]
            parent = asset["parent"]
            parent_ent_by_id[parent["id"]] = parent

            subset_name = asset["name"]
            subset_names.add(subset_name)

            version = entity["version"]
            version_nums.add(version)

        asset_docs_by_ftrack_id = self._get_asset_docs(parent_ent_by_id)
        subset_docs = self._get_subset_docs(asset_docs_by_ftrack_id,
                                            subset_names, entities)
        version_docs = self._get_version_docs(asset_docs_by_ftrack_id,
                                              subset_docs, version_nums,
                                              entities)

        return [version_doc["_id"] for version_doc in version_docs]

    def _get_version_docs(self, asset_docs_by_ftrack_id, subset_docs,
                          version_nums, entities):
        subset_docs_by_id = {
            subset_doc["_id"]: subset_doc
            for subset_doc in subset_docs
        }
        version_docs = list(
            self.db_con.find({
                "type": "version",
                "parent": {
                    "$in": list(subset_docs_by_id.keys())
                },
                "name": {
                    "$in": list(version_nums)
                }
            }))
        version_docs_by_parent_id = collections.defaultdict(dict)
        for version_doc in version_docs:
            subset_doc = subset_docs_by_id[version_doc["parent"]]

            asset_id = subset_doc["parent"]
            subset_name = subset_doc["name"]
            version = version_doc["name"]
            if version_docs_by_parent_id[asset_id].get(subset_name) is None:
                version_docs_by_parent_id[asset_id][subset_name] = {}

            version_docs_by_parent_id[asset_id][subset_name][version] = (
                version_doc)

        filtered_versions = []
        for entity in entities:
            asset = entity["asset"]

            parent = asset["parent"]
            asset_doc = asset_docs_by_ftrack_id[parent["id"]]

            subsets_by_name = version_docs_by_parent_id.get(asset_doc["_id"])
            if not subsets_by_name:
                continue

            subset_name = asset["name"]
            version_docs_by_version = subsets_by_name.get(subset_name)
            if not version_docs_by_version:
                continue

            version = entity["version"]
            version_doc = version_docs_by_version.get(version)
            if version_doc:
                filtered_versions.append(version_doc)
        return filtered_versions

    def _get_subset_docs(self, asset_docs_by_ftrack_id, subset_names,
                         entities):
        asset_doc_ids = list()
        for asset_doc in asset_docs_by_ftrack_id.values():
            asset_doc_ids.append(asset_doc["_id"])

        subset_docs = list(
            self.db_con.find({
                "type": "subset",
                "parent": {
                    "$in": asset_doc_ids
                },
                "name": {
                    "$in": list(subset_names)
                }
            }))
        subset_docs_by_parent_id = collections.defaultdict(dict)
        for subset_doc in subset_docs:
            asset_id = subset_doc["parent"]
            subset_name = subset_doc["name"]
            subset_docs_by_parent_id[asset_id][subset_name] = subset_doc

        filtered_subsets = []
        for entity in entities:
            asset = entity["asset"]

            parent = asset["parent"]
            asset_doc = asset_docs_by_ftrack_id[parent["id"]]

            subsets_by_name = subset_docs_by_parent_id.get(asset_doc["_id"])
            if not subsets_by_name:
                continue

            subset_name = asset["name"]
            subset_doc = subsets_by_name.get(subset_name)
            if subset_doc:
                filtered_subsets.append(subset_doc)
        return filtered_subsets

    def _get_asset_docs(self, parent_ent_by_id):
        asset_docs = list(
            self.db_con.find({
                "type": "asset",
                "data.ftrackId": {
                    "$in": list(parent_ent_by_id.keys())
                }
            }))
        asset_docs_by_ftrack_id = {
            asset_doc["data"]["ftrackId"]: asset_doc
            for asset_doc in asset_docs
        }

        entities_by_mongo_id = {}
        entities_by_names = {}
        for ftrack_id, entity in parent_ent_by_id.items():
            if ftrack_id not in asset_docs_by_ftrack_id:
                parent_mongo_id = entity["custom_attributes"].get(
                    CUST_ATTR_ID_KEY)
                if parent_mongo_id:
                    entities_by_mongo_id[ObjectId(parent_mongo_id)] = entity
                else:
                    entities_by_names[entity["name"]] = entity

        expressions = []
        if entities_by_mongo_id:
            expression = {
                "type": "asset",
                "_id": {
                    "$in": list(entities_by_mongo_id.keys())
                }
            }
            expressions.append(expression)

        if entities_by_names:
            expression = {
                "type": "asset",
                "name": {
                    "$in": list(entities_by_names.keys())
                }
            }
            expressions.append(expression)

        if expressions:
            if len(expressions) == 1:
                filter = expressions[0]
            else:
                filter = {"$or": expressions}

            asset_docs = self.db_con.find(filter)
            for asset_doc in asset_docs:
                if asset_doc["_id"] in entities_by_mongo_id:
                    entity = entities_by_mongo_id[asset_doc["_id"]]
                    asset_docs_by_ftrack_id[entity["id"]] = asset_doc

                elif asset_doc["name"] in entities_by_names:
                    entity = entities_by_names[asset_doc["name"]]
                    asset_docs_by_ftrack_id[entity["id"]] = asset_doc

        return asset_docs_by_ftrack_id

    def launch(self, session, entities, event):
        if "values" not in event["data"]:
            return

        values = event["data"]["values"]
        skipped = values.pop("__skipped__")
        if skipped:
            return None

        user_id = event["source"]["user"]["id"]
        user_entity = session.query(
            "User where id is {}".format(user_id)).one()

        job = session.create(
            "Job", {
                "user": user_entity,
                "status": "running",
                "data": json.dumps({"description": "Delivery processing."})
            })
        session.commit()

        try:
            self.db_con.install()
            self.real_launch(session, entities, event)
            job["status"] = "done"

        except Exception:
            self.log.warning("Failed during processing delivery action.",
                             exc_info=True)

        finally:
            if job["status"] != "done":
                job["status"] = "failed"
            session.commit()
            self.db_con.uninstall()

        if job["status"] == "failed":
            return {
                "success": False,
                "message": "Delivery failed. Check logs for more information."
            }
        return True

    def real_launch(self, session, entities, event):
        self.log.info("Delivery action just started.")
        report_items = collections.defaultdict(list)

        values = event["data"]["values"]

        location_path = values.pop("__location_path__")
        anatomy_name = values.pop("__new_anatomies__")
        project_name = values.pop("__project_name__")

        repre_names = []
        for key, value in values.items():
            if value is True:
                repre_names.append(key)

        if not repre_names:
            return {
                "success": True,
                "message": "Not selected components to deliver."
            }

        location_path = location_path.strip()
        if location_path:
            location_path = os.path.normpath(location_path)
            if not os.path.exists(location_path):
                os.makedirs(location_path)

        self.db_con.Session["AVALON_PROJECT"] = project_name

        self.log.debug("Collecting representations to process.")
        version_ids = self._get_interest_version_ids(entities)
        repres_to_deliver = list(
            self.db_con.find({
                "type": "representation",
                "parent": {
                    "$in": version_ids
                },
                "name": {
                    "$in": repre_names
                }
            }))

        anatomy = Anatomy(project_name)

        format_dict = {}
        if location_path:
            location_path = location_path.replace("\\", "/")
            root_names = anatomy.root_names_from_templates(
                anatomy.templates["delivery"])
            if root_names is None:
                format_dict["root"] = location_path
            else:
                format_dict["root"] = {}
                for name in root_names:
                    format_dict["root"][name] = location_path

        datetime_data = config.get_datetime_data()
        for repre in repres_to_deliver:
            source_path = repre.get("data", {}).get("path")
            debug_msg = "Processing representation {}".format(repre["_id"])
            if source_path:
                debug_msg += " with published path {}.".format(source_path)
            self.log.debug(debug_msg)

            # Get destination repre path
            anatomy_data = copy.deepcopy(repre["context"])
            anatomy_data.update(datetime_data)
            anatomy_filled = anatomy.format_all(anatomy_data)
            test_path = anatomy_filled["delivery"][anatomy_name]

            if not test_path.solved:
                msg = ("Missing keys in Representation's context"
                       " for anatomy template \"{}\".").format(anatomy_name)

                if test_path.missing_keys:
                    keys = ", ".join(test_path.missing_keys)
                    sub_msg = (
                        "Representation: {}<br>- Missing keys: \"{}\"<br>"
                    ).format(str(repre["_id"]), keys)

                if test_path.invalid_types:
                    items = []
                    for key, value in test_path.invalid_types.items():
                        items.append("\"{}\" {}".format(key, str(value)))

                    keys = ", ".join(items)
                    sub_msg = ("Representation: {}<br>"
                               "- Invalid value DataType: \"{}\"<br>").format(
                                   str(repre["_id"]), keys)

                report_items[msg].append(sub_msg)
                self.log.warning(
                    "{} Representation: \"{}\" Filled: <{}>".format(
                        msg, str(repre["_id"]), str(test_path)))
                continue

            # Get source repre path
            frame = repre['context'].get('frame')

            if frame:
                repre["context"]["frame"] = len(str(frame)) * "#"

            repre_path = self.path_from_represenation(repre, anatomy)
            # TODO add backup solution where root of path from component
            # is repalced with root
            args = (repre_path, anatomy, anatomy_name, anatomy_data,
                    format_dict, report_items)
            if not frame:
                self.process_single_file(*args)
            else:
                self.process_sequence(*args)

        return self.report(report_items)

    def process_single_file(self, repre_path, anatomy, anatomy_name,
                            anatomy_data, format_dict, report_items):
        anatomy_filled = anatomy.format(anatomy_data)
        if format_dict:
            template_result = anatomy_filled["delivery"][anatomy_name]
            delivery_path = template_result.rootless.format(**format_dict)
        else:
            delivery_path = anatomy_filled["delivery"][anatomy_name]

        delivery_folder = os.path.dirname(delivery_path)
        if not os.path.exists(delivery_folder):
            os.makedirs(delivery_folder)

        self.copy_file(repre_path, delivery_path)

    def process_sequence(self, repre_path, anatomy, anatomy_name, anatomy_data,
                         format_dict, report_items):
        dir_path, file_name = os.path.split(str(repre_path))

        base_name, ext = os.path.splitext(file_name)
        file_name_items = None
        if "#" in base_name:
            file_name_items = [part for part in base_name.split("#") if part]

        elif "%" in base_name:
            file_name_items = base_name.split("%")

        if not file_name_items:
            msg = "Source file was not found"
            report_items[msg].append(repre_path)
            self.log.warning("{} <{}>".format(msg, repre_path))
            return

        src_collections, remainder = clique.assemble(os.listdir(dir_path))
        src_collection = None
        for col in src_collections:
            if col.tail != ext:
                continue

            # skip if collection don't have same basename
            if not col.head.startswith(file_name_items[0]):
                continue

            src_collection = col
            break

        if src_collection is None:
            # TODO log error!
            msg = "Source collection of files was not found"
            report_items[msg].append(repre_path)
            self.log.warning("{} <{}>".format(msg, repre_path))
            return

        frame_indicator = "@####@"

        anatomy_data["frame"] = frame_indicator
        anatomy_filled = anatomy.format(anatomy_data)

        if format_dict:
            template_result = anatomy_filled["delivery"][anatomy_name]
            delivery_path = template_result.rootless.format(**format_dict)
        else:
            delivery_path = anatomy_filled["delivery"][anatomy_name]

        delivery_folder = os.path.dirname(delivery_path)
        dst_head, dst_tail = delivery_path.split(frame_indicator)
        dst_padding = src_collection.padding
        dst_collection = clique.Collection(head=dst_head,
                                           tail=dst_tail,
                                           padding=dst_padding)

        if not os.path.exists(delivery_folder):
            os.makedirs(delivery_folder)

        src_head = src_collection.head
        src_tail = src_collection.tail
        for index in src_collection.indexes:
            src_padding = src_collection.format("{padding}") % index
            src_file_name = "{}{}{}".format(src_head, src_padding, src_tail)
            src = os.path.normpath(os.path.join(dir_path, src_file_name))

            dst_padding = dst_collection.format("{padding}") % index
            dst = "{}{}{}".format(dst_head, dst_padding, dst_tail)

            self.copy_file(src, dst)

    def path_from_represenation(self, representation, anatomy):
        try:
            template = representation["data"]["template"]

        except KeyError:
            return None

        try:
            context = representation["context"]
            context["root"] = anatomy.roots
            path = pipeline.format_template_with_optional_keys(
                context, template)

        except KeyError:
            # Template references unavailable data
            return None

        return os.path.normpath(path)

    def copy_file(self, src_path, dst_path):
        if os.path.exists(dst_path):
            return
        try:
            filelink.create(src_path, dst_path, filelink.HARDLINK)
        except OSError:
            shutil.copyfile(src_path, dst_path)

    def report(self, report_items):
        items = []
        title = "Delivery report"
        for msg, _items in report_items.items():
            if not _items:
                continue

            if items:
                items.append({"type": "label", "value": "---"})

            items.append({"type": "label", "value": "# {}".format(msg)})
            if not isinstance(_items, (list, tuple)):
                _items = [_items]
            __items = []
            for item in _items:
                __items.append(str(item))

            items.append({
                "type": "label",
                "value": '<p>{}</p>'.format("<br>".join(__items))
            })

        if not items:
            return {"success": True, "message": "Delivery Finished"}

        return {
            "items": items,
            "title": title,
            "success": False,
            "message": "Delivery Finished"
        }
Ejemplo n.º 3
0
class DeleteOldVersions(api.Loader):

    representations = ["*"]
    families = ["*"]

    label = "Delete Old Versions"
    icon = "trash"
    color = "#d8d8d8"

    options = [
        qargparse.Integer("versions_to_keep",
                          default=2,
                          min=0,
                          help="Versions to keep:"),
        qargparse.Boolean("remove_publish_folder",
                          help="Remove publish folder:")
    ]

    def sizeof_fmt(self, num, suffix='B'):
        for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
            if abs(num) < 1024.0:
                return "%3.1f%s%s" % (num, unit, suffix)
            num /= 1024.0
        return "%.1f%s%s" % (num, 'Yi', suffix)

    def delete_whole_dir_paths(self, dir_paths, delete=True):
        size = 0

        for dir_path in dir_paths:
            # Delete all files and fodlers in dir path
            for root, dirs, files in os.walk(dir_path, topdown=False):
                for name in files:
                    file_path = os.path.join(root, name)
                    size += os.path.getsize(file_path)
                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))

                for name in dirs:
                    if delete:
                        os.rmdir(os.path.join(root, name))

            if not delete:
                continue

            # Delete even the folder and it's parents folders if they are empty
            while True:
                if not os.path.exists(dir_path):
                    dir_path = os.path.dirname(dir_path)
                    continue

                if len(os.listdir(dir_path)) != 0:
                    break

                os.rmdir(os.path.join(dir_path))

        return size

    def path_from_representation(self, representation, anatomy):
        try:
            template = representation["data"]["template"]

        except KeyError:
            return (None, None)

        sequence_path = None
        try:
            context = representation["context"]
            context["root"] = anatomy.roots
            path = avalon.pipeline.format_template_with_optional_keys(
                context, template)
            if "frame" in context:
                context["frame"] = self.sequence_splitter
                sequence_path = os.path.normpath(
                    avalon.pipeline.format_template_with_optional_keys(
                        context, template))

        except KeyError:
            # Template references unavailable data
            return (None, None)

        return (os.path.normpath(path), sequence_path)

    def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
        size = 0

        for dir_id, dir_path in dir_paths.items():
            dir_files = os.listdir(dir_path)
            collections, remainders = clique.assemble(dir_files)
            for file_path, seq_path in file_paths[dir_id]:
                file_path_base = os.path.split(file_path)[1]
                # Just remove file if `frame` key was not in context or
                # filled path is in remainders (single file sequence)
                if not seq_path or file_path_base in remainders:
                    if not os.path.exists(file_path):
                        self.log.debug(
                            "File was not found: {}".format(file_path))
                        continue

                    size += os.path.getsize(file_path)

                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))

                    remainders.remove(file_path_base)
                    continue

                seq_path_base = os.path.split(seq_path)[1]
                head, tail = seq_path_base.split(self.sequence_splitter)

                final_col = None
                for collection in collections:
                    if head != collection.head or tail != collection.tail:
                        continue
                    final_col = collection
                    break

                if final_col is not None:
                    # Fill full path to head
                    final_col.head = os.path.join(dir_path, final_col.head)
                    for _file_path in final_col:
                        if os.path.exists(_file_path):

                            size += os.path.getsize(_file_path)

                            if delete:
                                os.remove(_file_path)
                                self.log.debug(
                                    "Removed file: {}".format(_file_path))

                    _seq_path = final_col.format("{head}{padding}{tail}")
                    self.log.debug("Removed files: {}".format(_seq_path))
                    collections.remove(final_col)

                elif os.path.exists(file_path):
                    size += os.path.getsize(file_path)

                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))
                else:
                    self.log.debug("File was not found: {}".format(file_path))

        # Delete as much as possible parent folders
        if not delete:
            return size

        for dir_path in dir_paths.values():
            while True:
                if not os.path.exists(dir_path):
                    dir_path = os.path.dirname(dir_path)
                    continue

                if len(os.listdir(dir_path)) != 0:
                    break

                self.log.debug("Removed folder: {}".format(dir_path))
                os.rmdir(dir_path)

        return size

    def message(self, text):
        msgBox = QtWidgets.QMessageBox()
        msgBox.setText(text)
        msgBox.setStyleSheet(style.load_stylesheet())
        msgBox.setWindowFlags(msgBox.windowFlags()
                              | QtCore.Qt.FramelessWindowHint)
        msgBox.exec_()

    def get_data(self, context, versions_count):
        subset = context["subset"]
        asset = context["asset"]
        anatomy = Anatomy(context["project"]["name"])

        self.dbcon = AvalonMongoDB()
        self.dbcon.Session["AVALON_PROJECT"] = context["project"]["name"]
        self.dbcon.install()

        versions = list(
            self.dbcon.find({
                "type": "version",
                "parent": {
                    "$in": [subset["_id"]]
                }
            }))

        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        def sort_func(ent):
            return int(ent["name"])

        all_last_versions = []
        for _parent_id, _versions in versions_by_parent.items():
            for idx, version in enumerate(
                    sorted(_versions, key=sort_func, reverse=True)):
                if idx >= versions_count:
                    break
                all_last_versions.append(version)

        self.log.debug("Collected versions ({})".format(len(versions)))

        # Filter latest versions
        for version in all_last_versions:
            versions.remove(version)

        # Update versions_by_parent without filtered versions
        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        # Filter already deleted versions
        versions_to_pop = []
        for version in versions:
            version_tags = version["data"].get("tags")
            if version_tags and "deleted" in version_tags:
                versions_to_pop.append(version)

        for version in versions_to_pop:
            msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format(
                asset["name"], subset["name"], version["name"])
            self.log.debug(
                ("Skipping version. Already tagged as `deleted`. < {} >"
                 ).format(msg))
            versions.remove(version)

        version_ids = [ent["_id"] for ent in versions]

        self.log.debug("Filtered versions to delete ({})".format(
            len(version_ids)))

        if not version_ids:
            msg = "Skipping processing. Nothing to delete."
            self.log.info(msg)
            self.message(msg)
            return

        repres = list(
            self.dbcon.find({
                "type": "representation",
                "parent": {
                    "$in": version_ids
                }
            }))

        self.log.debug("Collected representations to remove ({})".format(
            len(repres)))

        dir_paths = {}
        file_paths_by_dir = collections.defaultdict(list)
        for repre in repres:
            file_path, seq_path = self.path_from_representation(repre, anatomy)
            if file_path is None:
                self.log.debug(
                    ("Could not format path for represenation \"{}\"").format(
                        str(repre)))
                continue

            dir_path = os.path.dirname(file_path)
            dir_id = None
            for _dir_id, _dir_path in dir_paths.items():
                if _dir_path == dir_path:
                    dir_id = _dir_id
                    break

            if dir_id is None:
                dir_id = uuid.uuid4()
                dir_paths[dir_id] = dir_path

            file_paths_by_dir[dir_id].append([file_path, seq_path])

        dir_ids_to_pop = []
        for dir_id, dir_path in dir_paths.items():
            if os.path.exists(dir_path):
                continue

            dir_ids_to_pop.append(dir_id)

        # Pop dirs from both dictionaries
        for dir_id in dir_ids_to_pop:
            dir_paths.pop(dir_id)
            paths = file_paths_by_dir.pop(dir_id)
            # TODO report of missing directories?
            paths_msg = ", ".join(
                ["'{}'".format(path[0].replace("\\", "/")) for path in paths])
            self.log.debug(
                ("Folder does not exist. Deleting it's files skipped: {}"
                 ).format(paths_msg))

        data = {
            "dir_paths": dir_paths,
            "file_paths_by_dir": file_paths_by_dir,
            "versions": versions,
            "asset": asset,
            "subset": subset,
            "archive_subset": versions_count == 0
        }

        return data

    def main(self, data, remove_publish_folder):
        # Size of files.
        size = 0

        if remove_publish_folder:
            size = self.delete_whole_dir_paths(data["dir_paths"].values())
        else:
            size = self.delete_only_repre_files(data["dir_paths"],
                                                data["file_paths_by_dir"])

        mongo_changes_bulk = []
        for version in data["versions"]:
            orig_version_tags = version["data"].get("tags") or []
            version_tags = [tag for tag in orig_version_tags]
            if "deleted" not in version_tags:
                version_tags.append("deleted")

            if version_tags == orig_version_tags:
                continue

            update_query = {"_id": version["_id"]}
            update_data = {"$set": {"data.tags": version_tags}}
            mongo_changes_bulk.append(UpdateOne(update_query, update_data))

        if data["archive_subset"]:
            mongo_changes_bulk.append(
                UpdateOne({
                    "_id": data["subset"]["_id"],
                    "type": "subset"
                }, {"$set": {
                    "type": "archived_subset"
                }}))

        if mongo_changes_bulk:
            self.dbcon.bulk_write(mongo_changes_bulk)

        self.dbcon.uninstall()

        # Set attribute `is_published` to `False` on ftrack AssetVersions
        session = ftrack_api.Session()
        query = ("AssetVersion where asset.parent.id is \"{}\""
                 " and asset.name is \"{}\""
                 " and version is \"{}\"")
        for v in data["versions"]:
            try:
                ftrack_version = session.query(
                    query.format(data["asset"]["data"]["ftrackId"],
                                 data["subset"]["name"], v["name"])).one()
            except ftrack_api.exception.NoResultFoundError:
                continue

            ftrack_version["is_published"] = False

        try:
            session.commit()

        except Exception:
            msg = ("Could not set `is_published` attribute to `False`"
                   " for selected AssetVersions.")
            self.log.error(msg)
            self.message(msg)

        msg = "Total size of files: " + self.sizeof_fmt(size)
        self.log.info(msg)
        self.message(msg)

    def load(self, context, name=None, namespace=None, options=None):
        try:
            versions_to_keep = 2
            remove_publish_folder = False
            if options:
                versions_to_keep = options.get("versions_to_keep",
                                               versions_to_keep)
                remove_publish_folder = options.get("remove_publish_folder",
                                                    remove_publish_folder)

            data = self.get_data(context, versions_to_keep)

            self.main(data, remove_publish_folder)

        except Exception:
            self.log.error("Failed to delete versions.", exc_info=True)