class ThumbToChildren(BaseAction):
    '''Custom action.'''

    # Action identifier
    identifier = 'thumb.to.children'
    # Action label
    label = 'Thumbnail'
    # Action variant
    variant = " to Children"
    # Action icon
    icon = statics_icon("ftrack", "action_icons", "Thumbnail.svg")

    def discover(self, session, entities, event):
        ''' Validation '''

        if (len(entities) != 1 or entities[0].entity_type in ['Project']):
            return False

        return True

    def launch(self, session, entities, event):
        '''Callback method for action.'''

        userId = event['source']['user']['id']
        user = session.query('User where id is ' + userId).one()

        job = session.create('Job', {
            'user': user,
            'status': 'running',
            'data': json.dumps({
                'description': 'Push thumbnails to Childrens'
            })
        })
        session.commit()
        try:
            for entity in entities:
                thumbid = entity['thumbnail_id']
                if thumbid:
                    for child in entity['children']:
                        child['thumbnail_id'] = thumbid

            # inform the user that the job is done
            job['status'] = 'done'
        except Exception as exc:
            session.rollback()
            # fail the job if something goes wrong
            job['status'] = 'failed'
            raise exc
        finally:
            session.commit()

        return {
            'success': True,
            'message': 'Created job for updating thumbnails!'
        }
Exemple #2
0
class ComponentOpen(BaseAction):
    '''Custom action.'''

    # Action identifier
    identifier = 'component.open'
    # Action label
    label = 'Open File'
    # Action icon
    icon = statics_icon("ftrack", "action_icons", "ComponentOpen.svg")

    def discover(self, session, entities, event):
        ''' Validation '''
        if len(entities) != 1 or entities[0].entity_type != 'FileComponent':
            return False

        return True

    def launch(self, session, entities, event):

        entity = entities[0]

        # Return error if component is on ftrack server
        location_name = entity['component_locations'][0]['location']['name']
        if location_name == 'ftrack.server':
            return {
                'success': False,
                'message': "This component is stored on ftrack server!"
            }

        # Get component filepath
        # TODO with locations it will be different???
        fpath = entity['component_locations'][0]['resource_identifier']
        fpath = os.path.normpath(os.path.dirname(fpath))

        if os.path.isdir(fpath):
            if 'win' in sys.platform:  # windows
                subprocess.Popen('explorer "%s"' % fpath)
            elif sys.platform == 'darwin':  # macOS
                subprocess.Popen(['open', fpath])
            else:  # linux
                try:
                    subprocess.Popen(['xdg-open', fpath])
                except OSError:
                    raise OSError('unsupported xdg-open call??')
        else:
            return {'success': False, 'message': "Didn't found file: " + fpath}

        return {'success': True, 'message': 'Component folder Opened'}
Exemple #3
0
class TestAction(BaseAction):
    """Action for testing purpose or as base for new actions."""

    ignore_me = True

    identifier = 'test.action'
    label = 'Test action'
    description = 'Test action'
    priority = 10000
    role_list = ['Pypeclub']
    icon = statics_icon("ftrack", "action_icons", "TestAction.svg")

    def discover(self, session, entities, event):
        return True

    def launch(self, session, entities, event):
        self.log.info(event)

        return True
Exemple #4
0
class ActionAskWhereIRun(BaseAction):
    """ Sometimes user forget where pipeline with his credentials is running.
    - this action triggers `ActionShowWhereIRun`
    """
    ignore_me = True
    identifier = 'ask.where.i.run'
    label = 'Ask where I run'
    description = 'Triggers PC info where user have running Pype'
    icon = statics_icon("ftrack", "action_icons", "ActionAskWhereIRun.svg")

    def discover(self, session, entities, event):
        """ Hide by default - Should be enabled only if you want to run.
        - best practise is to create another action that triggers this one
        """

        return True

    def launch(self, session, entities, event):
        more_data = {"event_hub_id": session.event_hub.id}
        self.trigger_action(
            "show.where.i.run", event, additional_event_data=more_data
        )

        return True
class SyncToAvalonLocal(BaseAction):
    """
    Synchronizing data action - from Ftrack to Avalon DB

    Stores all information about entity.
    - Name(string) - Most important information = identifier of entity
    - Parent(ObjectId) - Avalon Project Id, if entity is not project itself
    - Data(dictionary):
        - VisualParent(ObjectId) - Avalon Id of parent asset
        - Parents(array of string) - All parent names except project
        - Tasks(array of string) - Tasks on asset
        - FtrackId(string)
        - entityType(string) - entity's type on Ftrack
        * All Custom attributes in group 'Avalon'
            - custom attributes that start with 'avalon_' are skipped

    * These information are stored for entities in whole project.

    Avalon ID of asset is stored to Ftrack
        - Custom attribute 'avalon_mongo_id'.
    - action IS NOT creating this Custom attribute if doesn't exist
        - run 'Create Custom Attributes' action
        - or do it manually (Not recommended)
    """

    #: Action identifier.
    identifier = "sync.to.avalon.local"
    #: Action label.
    label = "Pype Admin"
    #: Action variant
    variant = "- Sync To Avalon (Local)"
    #: Action description.
    description = "Send data from Ftrack to Avalon"
    #: priority
    priority = 200
    #: roles that are allowed to register this action
    icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")

    settings_key = "sync_to_avalon_local"

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.entities_factory = SyncEntitiesFactory(self.log, self.session)

    def discover(self, session, entities, event):
        """ Validate selection. """
        is_valid = False
        for ent in event["data"]["selection"]:
            # Ignore entities that are not tasks or projects
            if ent["entityType"].lower() in ["show", "task"]:
                is_valid = True
                break

        if is_valid:
            is_valid = self.valid_roles(session, entities, event)
        return is_valid

    def launch(self, session, in_entities, event):
        time_start = time.time()

        self.show_message(event, "Synchronization - Preparing data", True)
        # Get ftrack project
        if in_entities[0].entity_type.lower() == "project":
            ft_project_name = in_entities[0]["full_name"]
        else:
            ft_project_name = in_entities[0]["project"]["full_name"]

        try:
            output = self.entities_factory.launch_setup(ft_project_name)
            if output is not None:
                return output

            time_1 = time.time()

            self.entities_factory.set_cutom_attributes()
            time_2 = time.time()

            # This must happen before all filtering!!!
            self.entities_factory.prepare_avalon_entities(ft_project_name)
            time_3 = time.time()

            self.entities_factory.filter_by_ignore_sync()
            time_4 = time.time()

            self.entities_factory.duplicity_regex_check()
            time_5 = time.time()

            self.entities_factory.prepare_ftrack_ent_data()
            time_6 = time.time()

            self.entities_factory.synchronize()
            time_7 = time.time()

            self.log.debug(
                "*** Synchronization finished ***"
            )
            self.log.debug(
                "preparation <{}>".format(time_1 - time_start)
            )
            self.log.debug(
                "set_cutom_attributes <{}>".format(time_2 - time_1)
            )
            self.log.debug(
                "prepare_avalon_entities <{}>".format(time_3 - time_2)
            )
            self.log.debug(
                "filter_by_ignore_sync <{}>".format(time_4 - time_3)
            )
            self.log.debug(
                "duplicity_regex_check <{}>".format(time_5 - time_4)
            )
            self.log.debug(
                "prepare_ftrack_ent_data <{}>".format(time_6 - time_5)
            )
            self.log.debug(
                "synchronize <{}>".format(time_7 - time_6)
            )
            self.log.debug(
                "* Total time: {}".format(time_7 - time_start)
            )

            report = self.entities_factory.report()
            if report and report.get("items"):
                default_title = "Synchronization report ({}):".format(
                    ft_project_name
                )
                self.show_interface(
                    items=report["items"],
                    title=report.get("title", default_title),
                    event=event
                )
            return {
                "success": True,
                "message": "Synchronization Finished"
            }

        except Exception:
            self.log.error(
                "Synchronization failed due to code error", exc_info=True
            )
            msg = "An error occurred during synchronization"
            title = "Synchronization report ({}):".format(ft_project_name)
            items = []
            items.append({
                "type": "label",
                "value": "# {}".format(msg)
            })
            items.append({
                "type": "label",
                "value": "## Traceback of the error"
            })
            items.append({
                "type": "label",
                "value": "<p>{}</p>".format(
                    str(traceback.format_exc()).replace(
                        "\n", "<br>").replace(
                        " ", "&nbsp;"
                    )
                )
            })

            report = {"items": []}
            try:
                report = self.entities_factory.report()
            except Exception:
                pass

            _items = report.get("items", [])
            if _items:
                items.append(self.entities_factory.report_splitter)
                items.extend(_items)

            self.show_interface(items, title, event)

            return {"success": True, "message": msg}

        finally:
            try:
                self.entities_factory.dbcon.uninstall()
            except Exception:
                pass

            try:
                self.entities_factory.session.close()
            except Exception:
                pass
Exemple #6
0
class Delivery(BaseAction):

    identifier = "delivery.action"
    label = "Delivery"
    description = "Deliver data to client"
    role_list = ["Pypeclub", "Administrator", "Project manager"]
    icon = statics_icon("ftrack", "action_icons", "Delivery.svg")

    db_con = AvalonMongoDB()

    def discover(self, session, entities, event):
        for entity in entities:
            if entity.entity_type.lower() == "assetversion":
                return True

        return False

    def interface(self, session, entities, event):
        if event["data"].get("values", {}):
            return

        title = "Delivery data to Client"

        items = []
        item_splitter = {"type": "label", "value": "---"}

        project_entity = self.get_project_from_entity(entities[0])
        project_name = project_entity["full_name"]
        self.db_con.install()
        self.db_con.Session["AVALON_PROJECT"] = project_name
        project_doc = self.db_con.find_one({"type": "project"})
        if not project_doc:
            return {
                "success": False,
                "message": (
                    "Didn't found project \"{}\" in avalon."
                ).format(project_name)
            }

        repre_names = self._get_repre_names(entities)
        self.db_con.uninstall()

        items.append({
            "type": "hidden",
            "name": "__project_name__",
            "value": project_name
        })

        # Prpeare anatomy data
        anatomy = Anatomy(project_name)
        new_anatomies = []
        first = None
        for key, template in (anatomy.templates.get("delivery") or {}).items():
            # Use only keys with `{root}` or `{root[*]}` in value
            if isinstance(template, str) and "{root" in template:
                new_anatomies.append({
                    "label": key,
                    "value": key
                })
                if first is None:
                    first = key

        skipped = False
        # Add message if there are any common components
        if not repre_names or not new_anatomies:
            skipped = True
            items.append({
                "type": "label",
                "value": "<h1>Something went wrong:</h1>"
            })

        items.append({
            "type": "hidden",
            "name": "__skipped__",
            "value": skipped
        })

        if not repre_names:
            if len(entities) == 1:
                items.append({
                    "type": "label",
                    "value": (
                        "- Selected entity doesn't have components to deliver."
                    )
                })
            else:
                items.append({
                    "type": "label",
                    "value": (
                        "- Selected entities don't have common components."
                    )
                })

        # Add message if delivery anatomies are not set
        if not new_anatomies:
            items.append({
                "type": "label",
                "value": (
                    "- `\"delivery\"` anatomy key is not set in config."
                )
            })

        # Skip if there are any data shortcomings
        if skipped:
            return {
                "items": items,
                "title": title
            }

        items.append({
            "value": "<h1>Choose Components to deliver</h1>",
            "type": "label"
        })

        for repre_name in repre_names:
            items.append({
                "type": "boolean",
                "value": False,
                "label": repre_name,
                "name": repre_name
            })

        items.append(item_splitter)

        items.append({
            "value": "<h2>Location for delivery</h2>",
            "type": "label"
        })

        items.append({
            "type": "label",
            "value": (
                "<i>NOTE: It is possible to replace `root` key in anatomy.</i>"
            )
        })

        items.append({
            "type": "text",
            "name": "__location_path__",
            "empty_text": "Type location path here...(Optional)"
        })

        items.append(item_splitter)

        items.append({
            "value": "<h2>Anatomy of delivery files</h2>",
            "type": "label"
        })

        items.append({
            "type": "label",
            "value": (
                "<p><i>NOTE: These can be set in Anatomy.yaml"
                " within `delivery` key.</i></p>"
            )
        })

        items.append({
            "type": "enumerator",
            "name": "__new_anatomies__",
            "data": new_anatomies,
            "value": first
        })

        return {
            "items": items,
            "title": title
        }

    def _get_repre_names(self, entities):
        version_ids = self._get_interest_version_ids(entities)
        repre_docs = self.db_con.find({
            "type": "representation",
            "parent": {"$in": version_ids}
        })
        return list(sorted(repre_docs.distinct("name")))

    def _get_interest_version_ids(self, entities):
        parent_ent_by_id = {}
        subset_names = set()
        version_nums = set()
        for entity in entities:
            asset = entity["asset"]
            parent = asset["parent"]
            parent_ent_by_id[parent["id"]] = parent

            subset_name = asset["name"]
            subset_names.add(subset_name)

            version = entity["version"]
            version_nums.add(version)

        asset_docs_by_ftrack_id = self._get_asset_docs(parent_ent_by_id)
        subset_docs = self._get_subset_docs(
            asset_docs_by_ftrack_id, subset_names, entities
        )
        version_docs = self._get_version_docs(
            asset_docs_by_ftrack_id, subset_docs, version_nums, entities
        )

        return [version_doc["_id"] for version_doc in version_docs]

    def _get_version_docs(
        self, asset_docs_by_ftrack_id, subset_docs, version_nums, entities
    ):
        subset_docs_by_id = {
            subset_doc["_id"]: subset_doc
            for subset_doc in subset_docs
        }
        version_docs = list(self.db_con.find({
            "type": "version",
            "parent": {"$in": list(subset_docs_by_id.keys())},
            "name": {"$in": list(version_nums)}
        }))
        version_docs_by_parent_id = collections.defaultdict(dict)
        for version_doc in version_docs:
            subset_doc = subset_docs_by_id[version_doc["parent"]]

            asset_id = subset_doc["parent"]
            subset_name = subset_doc["name"]
            version = version_doc["name"]
            if version_docs_by_parent_id[asset_id].get(subset_name) is None:
                version_docs_by_parent_id[asset_id][subset_name] = {}

            version_docs_by_parent_id[asset_id][subset_name][version] = (
                version_doc
            )

        filtered_versions = []
        for entity in entities:
            asset = entity["asset"]

            parent = asset["parent"]
            asset_doc = asset_docs_by_ftrack_id[parent["id"]]

            subsets_by_name = version_docs_by_parent_id.get(asset_doc["_id"])
            if not subsets_by_name:
                continue

            subset_name = asset["name"]
            version_docs_by_version = subsets_by_name.get(subset_name)
            if not version_docs_by_version:
                continue

            version = entity["version"]
            version_doc = version_docs_by_version.get(version)
            if version_doc:
                filtered_versions.append(version_doc)
        return filtered_versions

    def _get_subset_docs(
        self, asset_docs_by_ftrack_id, subset_names, entities
    ):
        asset_doc_ids = list()
        for asset_doc in asset_docs_by_ftrack_id.values():
            asset_doc_ids.append(asset_doc["_id"])

        subset_docs = list(self.db_con.find({
            "type": "subset",
            "parent": {"$in": asset_doc_ids},
            "name": {"$in": list(subset_names)}
        }))
        subset_docs_by_parent_id = collections.defaultdict(dict)
        for subset_doc in subset_docs:
            asset_id = subset_doc["parent"]
            subset_name = subset_doc["name"]
            subset_docs_by_parent_id[asset_id][subset_name] = subset_doc

        filtered_subsets = []
        for entity in entities:
            asset = entity["asset"]

            parent = asset["parent"]
            asset_doc = asset_docs_by_ftrack_id[parent["id"]]

            subsets_by_name = subset_docs_by_parent_id.get(asset_doc["_id"])
            if not subsets_by_name:
                continue

            subset_name = asset["name"]
            subset_doc = subsets_by_name.get(subset_name)
            if subset_doc:
                filtered_subsets.append(subset_doc)
        return filtered_subsets

    def _get_asset_docs(self, parent_ent_by_id):
        asset_docs = list(self.db_con.find({
            "type": "asset",
            "data.ftrackId": {"$in": list(parent_ent_by_id.keys())}
        }))
        asset_docs_by_ftrack_id = {
            asset_doc["data"]["ftrackId"]: asset_doc
            for asset_doc in asset_docs
        }

        entities_by_mongo_id = {}
        entities_by_names = {}
        for ftrack_id, entity in parent_ent_by_id.items():
            if ftrack_id not in asset_docs_by_ftrack_id:
                parent_mongo_id = entity["custom_attributes"].get(
                    CUST_ATTR_ID_KEY
                )
                if parent_mongo_id:
                    entities_by_mongo_id[ObjectId(parent_mongo_id)] = entity
                else:
                    entities_by_names[entity["name"]] = entity

        expressions = []
        if entities_by_mongo_id:
            expression = {
                "type": "asset",
                "_id": {"$in": list(entities_by_mongo_id.keys())}
            }
            expressions.append(expression)

        if entities_by_names:
            expression = {
                "type": "asset",
                "name": {"$in": list(entities_by_names.keys())}
            }
            expressions.append(expression)

        if expressions:
            if len(expressions) == 1:
                filter = expressions[0]
            else:
                filter = {"$or": expressions}

            asset_docs = self.db_con.find(filter)
            for asset_doc in asset_docs:
                if asset_doc["_id"] in entities_by_mongo_id:
                    entity = entities_by_mongo_id[asset_doc["_id"]]
                    asset_docs_by_ftrack_id[entity["id"]] = asset_doc

                elif asset_doc["name"] in entities_by_names:
                    entity = entities_by_names[asset_doc["name"]]
                    asset_docs_by_ftrack_id[entity["id"]] = asset_doc

        return asset_docs_by_ftrack_id

    def launch(self, session, entities, event):
        if "values" not in event["data"]:
            return

        values = event["data"]["values"]
        skipped = values.pop("__skipped__")
        if skipped:
            return None

        user_id = event["source"]["user"]["id"]
        user_entity = session.query(
            "User where id is {}".format(user_id)
        ).one()

        job = session.create("Job", {
            "user": user_entity,
            "status": "running",
            "data": json.dumps({
                "description": "Delivery processing."
            })
        })
        session.commit()

        try:
            self.db_con.install()
            self.real_launch(session, entities, event)
            job["status"] = "done"

        except Exception:
            self.log.warning(
                "Failed during processing delivery action.",
                exc_info=True
            )

        finally:
            if job["status"] != "done":
                job["status"] = "failed"
            session.commit()
            self.db_con.uninstall()

    def real_launch(self, session, entities, event):
        self.log.info("Delivery action just started.")
        report_items = collections.defaultdict(list)

        values = event["data"]["values"]

        location_path = values.pop("__location_path__")
        anatomy_name = values.pop("__new_anatomies__")
        project_name = values.pop("__project_name__")

        repre_names = []
        for key, value in values.items():
            if value is True:
                repre_names.append(key)

        if not repre_names:
            return {
                "success": True,
                "message": "Not selected components to deliver."
            }

        location_path = location_path.strip()
        if location_path:
            location_path = os.path.normpath(location_path)
            if not os.path.exists(location_path):
                os.makedirs(location_path)

        self.db_con.Session["AVALON_PROJECT"] = project_name

        self.log.debug("Collecting representations to process.")
        version_ids = self._get_interest_version_ids(entities)
        repres_to_deliver = list(self.db_con.find({
            "type": "representation",
            "parent": {"$in": version_ids},
            "name": {"$in": repre_names}
        }))

        anatomy = Anatomy(project_name)

        format_dict = {}
        if location_path:
            location_path = location_path.replace("\\", "/")
            root_names = anatomy.root_names_from_templates(
                anatomy.templates["delivery"]
            )
            if root_names is None:
                format_dict["root"] = location_path
            else:
                format_dict["root"] = {}
                for name in root_names:
                    format_dict["root"][name] = location_path

        datetime_data = config.get_datetime_data()
        for repre in repres_to_deliver:
            source_path = repre.get("data", {}).get("path")
            debug_msg = "Processing representation {}".format(repre["_id"])
            if source_path:
                debug_msg += " with published path {}.".format(source_path)
            self.log.debug(debug_msg)

            # Get destination repre path
            anatomy_data = copy.deepcopy(repre["context"])
            anatomy_data.update(datetime_data)
            anatomy_filled = anatomy.format_all(anatomy_data)
            test_path = anatomy_filled["delivery"][anatomy_name]

            if not test_path.solved:
                msg = (
                    "Missing keys in Representation's context"
                    " for anatomy template \"{}\"."
                ).format(anatomy_name)

                if test_path.missing_keys:
                    keys = ", ".join(test_path.missing_keys)
                    sub_msg = (
                        "Representation: {}<br>- Missing keys: \"{}\"<br>"
                    ).format(str(repre["_id"]), keys)

                if test_path.invalid_types:
                    items = []
                    for key, value in test_path.invalid_types.items():
                        items.append("\"{}\" {}".format(key, str(value)))

                    keys = ", ".join(items)
                    sub_msg = (
                        "Representation: {}<br>"
                        "- Invalid value DataType: \"{}\"<br>"
                    ).format(str(repre["_id"]), keys)

                report_items[msg].append(sub_msg)
                self.log.warning(
                    "{} Representation: \"{}\" Filled: <{}>".format(
                        msg, str(repre["_id"]), str(test_path)
                    )
                )
                continue

            # Get source repre path
            frame = repre['context'].get('frame')

            if frame:
                repre["context"]["frame"] = len(str(frame)) * "#"

            repre_path = self.path_from_represenation(repre, anatomy)
            # TODO add backup solution where root of path from component
            # is repalced with root
            args = (
                repre_path,
                anatomy,
                anatomy_name,
                anatomy_data,
                format_dict,
                report_items
            )
            if not frame:
                self.process_single_file(*args)
            else:
                self.process_sequence(*args)

        return self.report(report_items)

    def process_single_file(
        self, repre_path, anatomy, anatomy_name, anatomy_data, format_dict,
        report_items
    ):
        anatomy_filled = anatomy.format(anatomy_data)
        if format_dict:
            template_result = anatomy_filled["delivery"][anatomy_name]
            delivery_path = template_result.rootless.format(**format_dict)
        else:
            delivery_path = anatomy_filled["delivery"][anatomy_name]

        delivery_folder = os.path.dirname(delivery_path)
        if not os.path.exists(delivery_folder):
            os.makedirs(delivery_folder)

        self.copy_file(repre_path, delivery_path)

    def process_sequence(
        self, repre_path, anatomy, anatomy_name, anatomy_data, format_dict,
        report_items
    ):
        dir_path, file_name = os.path.split(str(repre_path))

        base_name, ext = os.path.splitext(file_name)
        file_name_items = None
        if "#" in base_name:
            file_name_items = [part for part in base_name.split("#") if part]

        elif "%" in base_name:
            file_name_items = base_name.split("%")

        if not file_name_items:
            msg = "Source file was not found"
            report_items[msg].append(repre_path)
            self.log.warning("{} <{}>".format(msg, repre_path))
            return

        src_collections, remainder = clique.assemble(os.listdir(dir_path))
        src_collection = None
        for col in src_collections:
            if col.tail != ext:
                continue

            # skip if collection don't have same basename
            if not col.head.startswith(file_name_items[0]):
                continue

            src_collection = col
            break

        if src_collection is None:
            # TODO log error!
            msg = "Source collection of files was not found"
            report_items[msg].append(repre_path)
            self.log.warning("{} <{}>".format(msg, repre_path))
            return

        frame_indicator = "@####@"

        anatomy_data["frame"] = frame_indicator
        anatomy_filled = anatomy.format(anatomy_data)

        if format_dict:
            template_result = anatomy_filled["delivery"][anatomy_name]
            delivery_path = template_result.rootless.format(**format_dict)
        else:
            delivery_path = anatomy_filled["delivery"][anatomy_name]

        delivery_folder = os.path.dirname(delivery_path)
        dst_head, dst_tail = delivery_path.split(frame_indicator)
        dst_padding = src_collection.padding
        dst_collection = clique.Collection(
            head=dst_head,
            tail=dst_tail,
            padding=dst_padding
        )

        if not os.path.exists(delivery_folder):
            os.makedirs(delivery_folder)

        src_head = src_collection.head
        src_tail = src_collection.tail
        for index in src_collection.indexes:
            src_padding = src_collection.format("{padding}") % index
            src_file_name = "{}{}{}".format(src_head, src_padding, src_tail)
            src = os.path.normpath(
                os.path.join(dir_path, src_file_name)
            )

            dst_padding = dst_collection.format("{padding}") % index
            dst = "{}{}{}".format(dst_head, dst_padding, dst_tail)

            self.copy_file(src, dst)

    def path_from_represenation(self, representation, anatomy):
        try:
            template = representation["data"]["template"]

        except KeyError:
            return None

        try:
            context = representation["context"]
            context["root"] = anatomy.roots
            path = pipeline.format_template_with_optional_keys(
                context, template
            )

        except KeyError:
            # Template references unavailable data
            return None

        return os.path.normpath(path)

    def copy_file(self, src_path, dst_path):
        if os.path.exists(dst_path):
            return
        try:
            filelink.create(
                src_path,
                dst_path,
                filelink.HARDLINK
            )
        except OSError:
            shutil.copyfile(src_path, dst_path)

    def report(self, report_items):
        items = []
        title = "Delivery report"
        for msg, _items in report_items.items():
            if not _items:
                continue

            if items:
                items.append({"type": "label", "value": "---"})

            items.append({
                "type": "label",
                "value": "# {}".format(msg)
            })
            if not isinstance(_items, (list, tuple)):
                _items = [_items]
            __items = []
            for item in _items:
                __items.append(str(item))

            items.append({
                "type": "label",
                "value": '<p>{}</p>'.format("<br>".join(__items))
            })

        if not items:
            return {
                "success": True,
                "message": "Delivery Finished"
            }

        return {
            "items": items,
            "title": title,
            "success": False,
            "message": "Delivery Finished"
        }
Exemple #7
0
class DJVViewAction(BaseAction):
    """Launch DJVView action."""
    identifier = "djvview-launch-action"
    label = "DJV View"
    description = "DJV View Launcher"
    icon = statics_icon("app_icons", "djvView.png")

    type = 'Application'

    def __init__(self, session, plugins_presets):
        '''Expects a ftrack_api.Session instance'''
        super().__init__(session, plugins_presets)
        self.djv_path = None

        self.config_data = config.get_presets()['djv_view']['config']
        self.set_djv_path()

        if self.djv_path is None:
            return

        self.allowed_types = self.config_data.get('file_ext',
                                                  ["img", "mov", "exr"])

    def preregister(self):
        if self.djv_path is None:
            return ('DJV View is not installed'
                    ' or paths in presets are not set correctly')
        return True

    def discover(self, session, entities, event):
        """Return available actions based on *event*. """
        selection = event["data"].get("selection", [])
        if len(selection) != 1:
            return False

        entityType = selection[0].get("entityType", None)
        if entityType in ["assetversion", "task"]:
            return True
        return False

    def set_djv_path(self):
        for path in self.config_data.get("djv_paths", []):
            if os.path.exists(path):
                self.djv_path = path
                break

    def interface(self, session, entities, event):
        if event['data'].get('values', {}):
            return

        entity = entities[0]
        versions = []

        entity_type = entity.entity_type.lower()
        if entity_type == "assetversion":
            if (entity['components'][0]['file_type'][1:]
                    in self.allowed_types):
                versions.append(entity)
        else:
            master_entity = entity
            if entity_type == "task":
                master_entity = entity['parent']

            for asset in master_entity['assets']:
                for version in asset['versions']:
                    # Get only AssetVersion of selected task
                    if (entity_type == "task"
                            and version['task']['id'] != entity['id']):
                        continue
                    # Get only components with allowed type
                    filetype = version['components'][0]['file_type']
                    if filetype[1:] in self.allowed_types:
                        versions.append(version)

        if len(versions) < 1:
            return {
                'success': False,
                'message': 'There are no Asset Versions to open.'
            }

        items = []
        base_label = "v{0} - {1} - {2}"
        default_component = self.config_data.get('default_component', None)
        last_available = None
        select_value = None
        for version in versions:
            for component in version['components']:
                label = base_label.format(
                    str(version['version']).zfill(3),
                    version['asset']['type']['name'], component['name'])

                try:
                    location = component['component_locations'][0]['location']
                    file_path = location.get_filesystem_path(component)
                except Exception:
                    file_path = component['component_locations'][0][
                        'resource_identifier']

                if os.path.isdir(os.path.dirname(file_path)):
                    last_available = file_path
                    if component['name'] == default_component:
                        select_value = file_path
                    items.append({'label': label, 'value': file_path})

        if len(items) == 0:
            return {
                'success': False,
                'message':
                ('There are no Asset Versions with accessible path.')
            }

        item = {
            'label': 'Items to view',
            'type': 'enumerator',
            'name': 'path',
            'data': sorted(items, key=itemgetter('label'), reverse=True)
        }
        if select_value is not None:
            item['value'] = select_value
        else:
            item['value'] = last_available

        return {'items': [item]}

    def launch(self, session, entities, event):
        """Callback method for DJVView action."""

        # Launching application
        if "values" not in event["data"]:
            return
        filename = event['data']['values']['path']

        fps = entities[0].get('custom_attributes', {}).get('fps', None)

        cmd = []
        # DJV path
        cmd.append(os.path.normpath(self.djv_path))
        # DJV Options Start ##############################################
        # '''layer name'''
        # cmd.append('-file_layer (value)')
        # ''' Proxy scale: 1/2, 1/4, 1/8'''
        # cmd.append('-file_proxy 1/2')
        # ''' Cache: True, False.'''
        # cmd.append('-file_cache True')
        # ''' Start in full screen '''
        # cmd.append('-window_fullscreen')
        # ''' Toolbar controls: False, True.'''
        # cmd.append("-window_toolbar False")
        # ''' Window controls: False, True.'''
        # cmd.append("-window_playbar False")
        # ''' Grid overlay: None, 1x1, 10x10, 100x100.'''
        # cmd.append("-view_grid None")
        # ''' Heads up display: True, False.'''
        # cmd.append("-view_hud True")
        ''' Playback: Stop, Forward, Reverse.'''
        cmd.append("-playback Forward")
        # ''' Frame.'''
        # cmd.append("-playback_frame (value)")
        if fps is not None:
            cmd.append("-playback_speed {}".format(int(fps)))
        # ''' Timer: Sleep, Timeout. Value: Sleep.'''
        # cmd.append("-playback_timer (value)")
        # ''' Timer resolution (seconds): 0.001.'''
        # cmd.append("-playback_timer_resolution (value)")
        ''' Time units: Timecode, Frames.'''
        cmd.append("-time_units Frames")
        # DJV Options End ################################################

        # PATH TO COMPONENT
        cmd.append(os.path.normpath(filename))

        try:
            # Run DJV with these commands
            subprocess.Popen(' '.join(cmd))
        except FileNotFoundError:
            return {
                'success':
                False,
                'message':
                'File "{}" was not found.'.format(os.path.basename(filename))
            }

        return True
Exemple #8
0
class CustomAttributes(BaseAction):
    '''Edit meta data action.'''

    #: Action identifier.
    identifier = 'create.update.attributes'
    #: Action label.
    label = "Pype Admin"
    variant = '- Create/Update Avalon Attributes'
    #: Action description.
    description = 'Creates Avalon/Mongo ID for double check'
    icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
    settings_key = "create_update_attributes"

    required_keys = ("key", "label", "type")

    presetable_keys = ("default", "write_security_roles",
                       "read_security_roles")
    hierarchical_key = "is_hierarchical"

    type_posibilities = ("text", "boolean", "date", "enumerator",
                         "dynamic enumerator", "number")

    def discover(self, session, entities, event):
        '''
        Validation
        - action is only for Administrators
        '''
        return self.valid_roles(session, entities, event)

    def launch(self, session, entities, event):
        # JOB SETTINGS
        userId = event['source']['user']['id']
        user = session.query('User where id is ' + userId).one()

        job = session.create(
            'Job', {
                'user': user,
                'status': 'running',
                'data': json.dumps(
                    {'description': 'Custom Attribute creation.'})
            })
        session.commit()

        self.app_manager = ApplicationManager()

        try:
            self.prepare_global_data(session)
            self.avalon_mongo_id_attributes(session, event)
            self.applications_attribute(event)
            self.tools_attribute(event)
            self.intent_attribute(event)
            self.custom_attributes_from_file(event)

            job['status'] = 'done'
            session.commit()

        except Exception:
            session.rollback()
            job["status"] = "failed"
            session.commit()
            self.log.error("Creating custom attributes failed ({})",
                           exc_info=True)

        return True

    def prepare_global_data(self, session):
        self.types_per_name = {
            attr_type["name"].lower(): attr_type
            for attr_type in session.query("CustomAttributeType").all()
        }

        self.security_roles = {
            role["name"].lower(): role
            for role in session.query("SecurityRole").all()
        }

        object_types = session.query("ObjectType").all()
        self.object_types_per_id = {
            object_type["id"]: object_type
            for object_type in object_types
        }
        self.object_types_per_name = {
            object_type["name"].lower(): object_type
            for object_type in object_types
        }

        self.groups = {}

        self.ftrack_settings = get_system_settings()["modules"]["ftrack"]
        self.attrs_settings = self.prepare_attribute_settings()

    def prepare_attribute_settings(self):
        output = {}
        attr_settings = self.ftrack_settings["custom_attributes"]
        for entity_type, attr_data in attr_settings.items():
            # Lower entity type
            entity_type = entity_type.lower()
            # Just store if entity type is not "task"
            if entity_type != "task":
                output[entity_type] = attr_data
                continue

            # Prepare empty dictionary for entity type if not set yet
            if entity_type not in output:
                output[entity_type] = {}

            # Store presets per lowered object type
            for obj_type, _preset in attr_data.items():
                output[entity_type][obj_type.lower()] = _preset

        return output

    def avalon_mongo_id_attributes(self, session, event):
        self.create_hierarchical_mongo_attr(session, event)

        hierarchical_attr, object_type_attrs = (
            self.mongo_id_custom_attributes(session))
        if object_type_attrs:
            self.convert_mongo_id_to_hierarchical(hierarchical_attr,
                                                  object_type_attrs, session,
                                                  event)

    def mongo_id_custom_attributes(self, session):
        cust_attrs_query = (
            "select id, entity_type, object_type_id, is_hierarchical, default"
            " from CustomAttributeConfiguration"
            " where key = \"{}\"").format(CUST_ATTR_ID_KEY)

        mongo_id_avalon_attr = session.query(cust_attrs_query).all()
        heirarchical_attr = None
        object_type_attrs = []
        for cust_attr in mongo_id_avalon_attr:
            if cust_attr["is_hierarchical"]:
                heirarchical_attr = cust_attr

            else:
                object_type_attrs.append(cust_attr)

        return heirarchical_attr, object_type_attrs

    def create_hierarchical_mongo_attr(self, session, event):
        # Set security roles for attribute
        data = {
            "key": CUST_ATTR_ID_KEY,
            "label": "Avalon/Mongo ID",
            "type": "text",
            "default": "",
            "group": CUST_ATTR_GROUP,
            "is_hierarchical": True,
            "config": {
                "markdown": False
            }
        }

        self.process_attr_data(data, event)

    def convert_mongo_id_to_hierarchical(self, hierarchical_attr,
                                         object_type_attrs, session, event):
        user_msg = "Converting old custom attributes. This may take some time."
        self.show_message(event, user_msg, True)
        self.log.info(user_msg)

        object_types_per_id = {
            object_type["id"]: object_type
            for object_type in session.query("ObjectType").all()
        }

        cust_attr_query = (
            "select value, entity_id from ContextCustomAttributeValue "
            "where configuration_id is {}")
        for attr_def in object_type_attrs:
            attr_ent_type = attr_def["entity_type"]
            if attr_ent_type == "show":
                entity_type_label = "Project"
            elif attr_ent_type == "task":
                entity_type_label = (
                    object_types_per_id[attr_def["object_type_id"]]["name"])
            else:
                self.log.warning(
                    "Unsupported entity type: \"{}\". Skipping.".format(
                        attr_ent_type))
                continue

            self.log.debug(
                ("Converting Avalon MongoID attr for Entity type \"{}\"."
                 ).format(entity_type_label))

            call_expr = [{
                "action": "query",
                "expression": cust_attr_query.format(attr_def["id"])
            }]
            if hasattr(session, "call"):
                [values] = session.call(call_expr)
            else:
                [values] = session._call(call_expr)

            for value in values["data"]:
                table_values = collections.OrderedDict({
                    "configuration_id":
                    hierarchical_attr["id"],
                    "entity_id":
                    value["entity_id"]
                })

                session.recorded_operations.push(
                    ftrack_api.operation.UpdateEntityOperation(
                        "ContextCustomAttributeValue", table_values, "value",
                        ftrack_api.symbol.NOT_SET, value["value"]))

            try:
                session.commit()

            except Exception:
                session.rollback()
                self.log.warning(("Couldn't transfer Avalon Mongo ID"
                                  " attribute for entity type \"{}\"."
                                  ).format(entity_type_label),
                                 exc_info=True)

            try:
                session.delete(attr_def)
                session.commit()

            except Exception:
                session.rollback()
                self.log.warning(("Couldn't delete Avalon Mongo ID"
                                  " attribute for entity type \"{}\"."
                                  ).format(entity_type_label),
                                 exc_info=True)

    def app_defs_from_app_manager(self):
        app_definitions = []
        for app_name, app in self.app_manager.applications.items():
            if app.enabled and app.is_host:
                app_definitions.append({app_name: app.full_label})

        if not app_definitions:
            app_definitions.append({"empty": "< Empty >"})
        return app_definitions

    def applications_attribute(self, event):
        apps_data = self.app_defs_from_app_manager()

        applications_custom_attr_data = {
            "label": "Applications",
            "key": "applications",
            "type": "enumerator",
            "entity_type": "show",
            "group": CUST_ATTR_GROUP,
            "config": {
                "multiselect": True,
                "data": apps_data
            }
        }
        self.process_attr_data(applications_custom_attr_data, event)

    def tools_attribute(self, event):
        tools_data = []
        for tool_name in self.app_manager.tools.keys():
            tools_data.append({tool_name: tool_name})

        # Make sure there is at least one item
        if not tools_data:
            tools_data.append({"empty": "< Empty >"})

        tools_custom_attr_data = {
            "label": "Tools",
            "key": "tools_env",
            "type": "enumerator",
            "is_hierarchical": True,
            "group": CUST_ATTR_GROUP,
            "config": {
                "multiselect": True,
                "data": tools_data
            }
        }
        self.process_attr_data(tools_custom_attr_data, event)

    def intent_attribute(self, event):
        intent_key_values = self.ftrack_settings["intent"]["items"]

        intent_values = []
        for key, label in intent_key_values.items():
            if not key or not label:
                self.log.info(
                    ("Skipping intent row: {{\"{}\": \"{}\"}}"
                     " because of empty key or label.").format(key, label))
                continue

            intent_values.append({key: label})

        if not intent_values:
            return

        intent_custom_attr_data = {
            "label": "Intent",
            "key": "intent",
            "type": "enumerator",
            "entity_type": "assetversion",
            "group": CUST_ATTR_GROUP,
            "config": {
                "multiselect": False,
                "data": intent_values
            }
        }
        self.process_attr_data(intent_custom_attr_data, event)

    def custom_attributes_from_file(self, event):
        # Load json with custom attributes configurations
        cust_attr_def = default_custom_attributes_definition()
        attrs_data = []

        # Prepare data of hierarchical attributes
        hierarchical_attrs = cust_attr_def.pop(self.hierarchical_key, {})
        for key, cust_attr_data in hierarchical_attrs.items():
            cust_attr_data["key"] = key
            cust_attr_data["is_hierarchical"] = True
            attrs_data.append(cust_attr_data)

        # Prepare data of entity specific attributes
        for entity_type, cust_attr_datas in cust_attr_def.items():
            if entity_type.lower() != "task":
                for key, cust_attr_data in cust_attr_datas.items():
                    cust_attr_data["key"] = key
                    cust_attr_data["entity_type"] = entity_type
                    attrs_data.append(cust_attr_data)
                continue

            # Task should have nested level for object type
            for object_type, _cust_attr_datas in cust_attr_datas.items():
                for key, cust_attr_data in _cust_attr_datas.items():
                    cust_attr_data["key"] = key
                    cust_attr_data["entity_type"] = entity_type
                    cust_attr_data["object_type"] = object_type
                    attrs_data.append(cust_attr_data)

        # Process prepared data
        for cust_attr_data in attrs_data:
            # Add group
            cust_attr_data["group"] = CUST_ATTR_GROUP
            self.process_attr_data(cust_attr_data, event)

    def presets_for_attr_data(self, attr_data):
        output = {}

        attr_key = attr_data["key"]
        if attr_data.get("is_hierarchical"):
            entity_key = self.hierarchical_key
        else:
            entity_key = attr_data["entity_type"]

        entity_settings = self.attrs_settings.get(entity_key) or {}
        if entity_key.lower() == "task":
            object_type = attr_data["object_type"]
            entity_settings = entity_settings.get(object_type.lower()) or {}

        key_settings = entity_settings.get(attr_key) or {}
        for key, value in key_settings.items():
            if key in self.presetable_keys and value:
                output[key] = value
        return output

    def process_attr_data(self, cust_attr_data, event):
        attr_settings = self.presets_for_attr_data(cust_attr_data)
        cust_attr_data.update(attr_settings)

        try:
            data = {}
            # Get key, label, type
            data.update(self.get_required(cust_attr_data))
            # Get hierachical/ entity_type/ object_id
            data.update(self.get_entity_type(cust_attr_data))
            # Get group, default, security roles
            data.update(self.get_optional(cust_attr_data))
            # Process data
            self.process_attribute(data)

        except CustAttrException as cae:
            cust_attr_name = cust_attr_data.get("label", cust_attr_data["key"])

            if cust_attr_name:
                msg = 'Custom attribute error "{}" - {}'.format(
                    cust_attr_name, str(cae))
            else:
                msg = 'Custom attribute error - {}'.format(str(cae))
            self.log.warning(msg, exc_info=True)
            self.show_message(event, msg)

    def process_attribute(self, data):
        existing_attrs = self.session.query(
            "CustomAttributeConfiguration").all()
        matching = []
        for attr in existing_attrs:
            if (attr["key"] != data["key"]
                    or attr["type"]["name"] != data["type"]["name"]):
                continue

            if data.get("is_hierarchical") is True:
                if attr["is_hierarchical"] is True:
                    matching.append(attr)
            elif "object_type_id" in data:
                if (attr["entity_type"] == data["entity_type"]
                        and attr["object_type_id"] == data["object_type_id"]):
                    matching.append(attr)
            else:
                if attr["entity_type"] == data["entity_type"]:
                    matching.append(attr)

        if len(matching) == 0:
            self.session.create("CustomAttributeConfiguration", data)
            self.session.commit()
            self.log.debug("Custom attribute \"{}\" created".format(
                data["label"]))

        elif len(matching) == 1:
            attr_update = matching[0]
            for key in data:
                if key not in ("is_hierarchical", "entity_type",
                               "object_type_id"):
                    attr_update[key] = data[key]

            self.session.commit()
            self.log.debug("Custom attribute \"{}\" updated".format(
                data["label"]))

        else:
            raise CustAttrException(
                ("Custom attribute is duplicated. Key: \"{}\" Type: \"{}\""
                 ).format(data["key"], data["type"]["name"]))

    def get_required(self, attr):
        output = {}
        for key in self.required_keys:
            if key not in attr:
                raise CustAttrException(
                    "BUG: Key \"{}\" is required".format(key))

        if attr['type'].lower() not in self.type_posibilities:
            raise CustAttrException('Type {} is not valid'.format(
                attr['type']))

        output['key'] = attr['key']
        output['label'] = attr['label']

        type_name = attr['type'].lower()
        output['type'] = self.types_per_name[type_name]

        config = None
        if type_name == 'number':
            config = self.get_number_config(attr)
        elif type_name == 'text':
            config = self.get_text_config(attr)
        elif type_name == 'enumerator':
            config = self.get_enumerator_config(attr)

        if config is not None:
            output['config'] = config

        return output

    def get_number_config(self, attr):
        if 'config' in attr and 'isdecimal' in attr['config']:
            isdecimal = attr['config']['isdecimal']
        else:
            isdecimal = False

        config = json.dumps({'isdecimal': isdecimal})

        return config

    def get_text_config(self, attr):
        if 'config' in attr and 'markdown' in attr['config']:
            markdown = attr['config']['markdown']
        else:
            markdown = False
        config = json.dumps({'markdown': markdown})

        return config

    def get_enumerator_config(self, attr):
        if 'config' not in attr:
            raise CustAttrException('Missing config with data')
        if 'data' not in attr['config']:
            raise CustAttrException('Missing data in config')

        data = []
        for item in attr['config']['data']:
            item_data = {}
            for key in item:
                # TODO key check by regex
                item_data['menu'] = item[key]
                item_data['value'] = key
                data.append(item_data)

        multiSelect = False
        for k in attr['config']:
            if k.lower() == 'multiselect':
                if isinstance(attr['config'][k], bool):
                    multiSelect = attr['config'][k]
                else:
                    raise CustAttrException('Multiselect must be boolean')
                break

        config = json.dumps({
            'multiSelect': multiSelect,
            'data': json.dumps(data)
        })

        return config

    def get_group(self, attr):
        if isinstance(attr, dict):
            group_name = attr['group'].lower()
        else:
            group_name = attr
        if group_name in self.groups:
            return self.groups[group_name]

        query = 'CustomAttributeGroup where name is "{}"'.format(group_name)
        groups = self.session.query(query).all()

        if len(groups) == 1:
            group = groups[0]
            self.groups[group_name] = group

            return group

        elif len(groups) < 1:
            group = self.session.create('CustomAttributeGroup', {
                'name': group_name,
            })
            self.session.commit()

            return group

        else:
            raise CustAttrException(
                'Found more than one group "{}"'.format(group_name))

    def get_security_roles(self, security_roles):
        security_roles_lowered = tuple(name.lower() for name in security_roles)
        if (len(security_roles_lowered) == 0
                or "all" in security_roles_lowered):
            return list(self.security_roles.values())

        output = []
        if security_roles_lowered[0] == "except":
            excepts = security_roles_lowered[1:]
            for role_name, role in self.security_roles.items():
                if role_name not in excepts:
                    output.append(role)

        else:
            for role_name in security_roles_lowered:
                if role_name in self.security_roles:
                    output.append(self.security_roles[role_name])
                else:
                    raise CustAttrException(
                        ("Securit role \"{}\" was not found in Ftrack."
                         ).format(role_name))
        return output

    def get_default(self, attr):
        type = attr['type']
        default = attr['default']
        if default is None:
            return default
        err_msg = 'Default value is not'
        if type == 'number':
            if isinstance(default, (str)) and default.isnumeric():
                default = float(default)

            if not isinstance(default, (float, int)):
                raise CustAttrException('{} integer'.format(err_msg))
        elif type == 'text':
            if not isinstance(default, str):
                raise CustAttrException('{} string'.format(err_msg))
        elif type == 'boolean':
            if not isinstance(default, bool):
                raise CustAttrException('{} boolean'.format(err_msg))
        elif type == 'enumerator':
            if not isinstance(default, list):
                raise CustAttrException(
                    '{} array with strings'.format(err_msg))
            # TODO check if multiSelect is available
            # and if default is one of data menu
            if not isinstance(default[0], str):
                raise CustAttrException('{} array of strings'.format(err_msg))
        elif type == 'date':
            date_items = default.split(' ')
            try:
                if len(date_items) == 1:
                    default = arrow.get(default, 'YY.M.D')
                elif len(date_items) == 2:
                    default = arrow.get(default, 'YY.M.D H:m:s')
                else:
                    raise Exception
            except Exception:
                raise CustAttrException('Date is not in proper format')
        elif type == 'dynamic enumerator':
            raise CustAttrException('Dynamic enumerator can\'t have default')

        return default

    def get_optional(self, attr):
        output = {}
        if "group" in attr:
            output["group"] = self.get_group(attr)
        if "default" in attr:
            output["default"] = self.get_default(attr)

        roles_read = []
        roles_write = []
        if "read_security_roles" in attr:
            roles_read = attr["read_security_roles"]
        if "write_security_roles" in attr:
            roles_write = attr["write_security_roles"]

        output["read_security_roles"] = self.get_security_roles(roles_read)
        output["write_security_roles"] = self.get_security_roles(roles_write)
        return output

    def get_entity_type(self, attr):
        if attr.get("is_hierarchical", False):
            return {
                "is_hierarchical": True,
                "entity_type": attr.get("entity_type") or "show"
            }

        if 'entity_type' not in attr:
            raise CustAttrException('Missing entity_type')

        if attr['entity_type'].lower() != 'task':
            return {'entity_type': attr['entity_type']}

        if 'object_type' not in attr:
            raise CustAttrException('Missing object_type')

        object_type_name = attr['object_type']
        object_type_name_low = object_type_name.lower()
        object_type = self.object_types_per_name.get(object_type_name_low)
        if not object_type:
            raise CustAttrException(('Object type with name "{}" don\'t exist'
                                     ).format(object_type_name))

        return {
            'entity_type': attr['entity_type'],
            'object_type_id': object_type["id"]
        }
class CreateProjectFolders(BaseAction):
    """Action create folder structure and may create hierarchy in Ftrack.

    Creation of folder structure and hierarchy in Ftrack is based on presets.
    These presets are located in:
    `~/pype-config/presets/tools/project_folder_structure.json`

    Example of content:
    ```json
    {
        "__project_root__": {
            "prod" : {},
            "resources" : {
              "footage": {
                "plates": {},
                "offline": {}
              },
              "audio": {},
              "art_dept": {}
            },
            "editorial" : {},
            "assets[ftrack.Library]": {
              "characters[ftrack]": {},
              "locations[ftrack]": {}
            },
            "shots[ftrack.Sequence]": {
              "scripts": {},
              "editorial[ftrack.Folder]": {}
            }
        }
    }
    ```
    Key "__project_root__" indicates root folder (or entity). Each key in
    dictionary represents folder name. Value may contain another dictionary
    with subfolders.

    Identifier `[ftrack]` in name says that this should be also created in
    Ftrack hierarchy. It is possible to specify entity type of item with "." .
    If key is `assets[ftrack.Library]` then in ftrack will be created entity
    with name "assets" and entity type "Library". It is expected Library entity
    type exist in Ftrack.
    """

    identifier = "create.project.structure"
    label = "Create Project Structure"
    description = "Creates folder structure"
    role_list = ["Pypeclub", "Administrator", "Project Manager"]
    icon = statics_icon("ftrack", "action_icons", "CreateProjectFolders.svg")

    pattern_array = re.compile(r"\[.*\]")
    pattern_ftrack = re.compile(r".*\[[.]*ftrack[.]*")
    pattern_ent_ftrack = re.compile(r"ftrack\.[^.,\],\s,]*")
    project_root_key = "__project_root__"

    def discover(self, session, entities, event):
        if len(entities) != 1:
            return False

        if entities[0].entity_type.lower() != "project":
            return False

        return True

    def launch(self, session, entities, event):
        entity = entities[0]
        project = self.get_project_from_entity(entity)
        project_folder_presets = (config.get_presets().get(
            "tools", {}).get("project_folder_structure"))
        if not project_folder_presets:
            return {
                "success": False,
                "message": "Project structure presets are not set."
            }

        try:
            # Get paths based on presets
            basic_paths = self.get_path_items(project_folder_presets)
            anatomy = Anatomy(project["full_name"])
            self.create_folders(basic_paths, entity, project, anatomy)
            self.create_ftrack_entities(basic_paths, project)

        except Exception as exc:
            session.rollback()
            return {"success": False, "message": str(exc)}

        return True

    def get_ftrack_paths(self, paths_items):
        all_ftrack_paths = []
        for path_items in paths_items:
            ftrack_path_items = []
            is_ftrack = False
            for item in reversed(path_items):
                if item == self.project_root_key:
                    continue
                if is_ftrack:
                    ftrack_path_items.append(item)
                elif re.match(self.pattern_ftrack, item):
                    ftrack_path_items.append(item)
                    is_ftrack = True
            ftrack_path_items = list(reversed(ftrack_path_items))
            if ftrack_path_items:
                all_ftrack_paths.append(ftrack_path_items)
        return all_ftrack_paths

    def compute_ftrack_items(self, in_list, keys):
        if len(keys) == 0:
            return in_list
        key = keys[0]
        exist = None
        for index, subdict in enumerate(in_list):
            if key in subdict:
                exist = index
                break
        if exist is not None:
            in_list[exist][key] = self.compute_ftrack_items(
                in_list[exist][key], keys[1:])
        else:
            in_list.append({key: self.compute_ftrack_items([], keys[1:])})
        return in_list

    def translate_ftrack_items(self, paths_items):
        main = []
        for path_items in paths_items:
            main = self.compute_ftrack_items(main, path_items)
        return main

    def create_ftrack_entities(self, basic_paths, project_ent):
        only_ftrack_items = self.get_ftrack_paths(basic_paths)
        ftrack_paths = self.translate_ftrack_items(only_ftrack_items)

        for separation in ftrack_paths:
            parent = project_ent
            self.trigger_creation(separation, parent)

    def trigger_creation(self, separation, parent):
        for item, subvalues in separation.items():
            matches = re.findall(self.pattern_array, item)
            ent_type = "Folder"
            if len(matches) == 0:
                name = item
            else:
                match = matches[0]
                name = item.replace(match, "")
                ent_type_match = re.findall(self.pattern_ent_ftrack, match)
                if len(ent_type_match) > 0:
                    ent_type_split = ent_type_match[0].split(".")
                    if len(ent_type_split) == 2:
                        ent_type = ent_type_split[1]
            new_parent = self.create_ftrack_entity(name, ent_type, parent)
            if subvalues:
                for subvalue in subvalues:
                    self.trigger_creation(subvalue, new_parent)

    def create_ftrack_entity(self, name, ent_type, parent):
        for children in parent["children"]:
            if children["name"] == name:
                return children
        data = {"name": name, "parent_id": parent["id"]}
        if parent.entity_type.lower() == "project":
            data["project_id"] = parent["id"]
        else:
            data["project_id"] = parent["project"]["id"]

        existing_entity = self.session.query(
            ("TypedContext where name is \"{}\" and "
             "parent_id is \"{}\" and project_id is \"{}\"").format(
                 name, data["parent_id"], data["project_id"])).first()
        if existing_entity:
            return existing_entity

        new_ent = self.session.create(ent_type, data)
        self.session.commit()
        return new_ent

    def get_path_items(self, in_dict):
        output = []
        for key, value in in_dict.items():
            if not value:
                output.append(key)
            else:
                paths = self.get_path_items(value)
                for path in paths:
                    if not isinstance(path, (list, tuple)):
                        path = [path]

                    output.append([key, *path])

        return output

    def compute_paths(self, basic_paths_items, project_root):
        output = []
        for path_items in basic_paths_items:
            clean_items = []
            for path_item in path_items:
                matches = re.findall(self.pattern_array, path_item)
                if len(matches) > 0:
                    path_item = path_item.replace(matches[0], "")
                if path_item == self.project_root_key:
                    path_item = project_root
                clean_items.append(path_item)
            output.append(os.path.normpath(os.path.sep.join(clean_items)))
        return output

    def create_folders(self, basic_paths, entity, project, anatomy):
        roots_paths = []
        if isinstance(anatomy.roots, dict):
            for root in anatomy.roots:
                roots_paths.append(root.value)
        else:
            roots_paths.append(anatomy.roots.value)

        for root_path in roots_paths:
            project_root = os.path.join(root_path, project["full_name"])
            full_paths = self.compute_paths(basic_paths, project_root)
            # Create folders
            for path in full_paths:
                if os.path.exists(path):
                    continue
                os.makedirs(path.format(project_root=project_root))
Exemple #10
0
class StoreThumbnailsToAvalon(BaseAction):
    # Action identifier
    identifier = "store.thubmnail.to.avalon"
    # Action label
    label = "Pype Admin"
    # Action variant
    variant = "- Store Thumbnails to avalon"
    # Action description
    description = 'Test action'
    # roles that are allowed to register this action
    role_list = ["Pypeclub", "Administrator", "Project Manager"]
    icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")

    thumbnail_key = "AVALON_THUMBNAIL_ROOT"
    db_con = DbConnector()

    def discover(self, session, entities, event):
        for entity in entities:
            if entity.entity_type.lower() == "assetversion":
                return True
        return False

    def launch(self, session, entities, event):
        # DEBUG LINE
        # root_path = r"C:\Users\jakub.trllo\Desktop\Tests\ftrack_thumbnails"

        user = session.query("User where username is '{0}'".format(
            session.api_user)).one()
        action_job = session.create(
            "Job", {
                "user":
                user,
                "status":
                "running",
                "data":
                json.dumps({"description": "Storing thumbnails to avalon."})
            })
        session.commit()

        project = self.get_project_from_entity(entities[0])
        project_name = project["full_name"]
        anatomy = Anatomy(project_name)

        if "publish" not in anatomy.templates:
            msg = "Anatomy does not have set publish key!"

            action_job["status"] = "failed"
            session.commit()

            self.log.warning(msg)

            return {"success": False, "message": msg}

        if "thumbnail" not in anatomy.templates["publish"]:
            msg = (
                "There is not set \"thumbnail\""
                " template in Antomy for project \"{}\"").format(project_name)

            action_job["status"] = "failed"
            session.commit()

            self.log.warning(msg)

            return {"success": False, "message": msg}

        thumbnail_roots = os.environ.get(self.thumbnail_key)
        if ("{thumbnail_root}" in anatomy.templates["publish"]["thumbnail"]
                and not thumbnail_roots):
            msg = "`{}` environment is not set".format(self.thumbnail_key)

            action_job["status"] = "failed"
            session.commit()

            self.log.warning(msg)

            return {"success": False, "message": msg}

        existing_thumbnail_root = None
        for path in thumbnail_roots.split(os.pathsep):
            if os.path.exists(path):
                existing_thumbnail_root = path
                break

        if existing_thumbnail_root is None:
            msg = ("Can't access paths, set in `{}` ({})").format(
                self.thumbnail_key, thumbnail_roots)

            action_job["status"] = "failed"
            session.commit()

            self.log.warning(msg)

            return {"success": False, "message": msg}

        example_template_data = {
            "_id": "ID",
            "thumbnail_root": "THUBMNAIL_ROOT",
            "thumbnail_type": "THUMBNAIL_TYPE",
            "ext": ".EXT",
            "project": {
                "name": "PROJECT_NAME",
                "code": "PROJECT_CODE"
            },
            "asset": "ASSET_NAME",
            "subset": "SUBSET_NAME",
            "version": "VERSION_NAME",
            "hierarchy": "HIERARCHY"
        }
        tmp_filled = anatomy.format_all(example_template_data)
        thumbnail_result = tmp_filled["publish"]["thumbnail"]
        if not thumbnail_result.solved:
            missing_keys = thumbnail_result.missing_keys
            invalid_types = thumbnail_result.invalid_types
            submsg = ""
            if missing_keys:
                submsg += "Missing keys: {}".format(", ".join(
                    ["\"{}\"".format(key) for key in missing_keys]))

            if invalid_types:
                items = []
                for key, value in invalid_types.items():
                    items.append("{}{}".format(str(key), str(value)))
                submsg += "Invalid types: {}".format(", ".join(items))

            msg = ("Thumbnail Anatomy template expects more keys than action"
                   " can offer. {}").format(submsg)

            action_job["status"] = "failed"
            session.commit()

            self.log.warning(msg)

            return {"success": False, "message": msg}

        thumbnail_template = anatomy.templates["publish"]["thumbnail"]

        self.db_con.install()

        for entity in entities:
            # Skip if entity is not AssetVersion (never should happend, but..)
            if entity.entity_type.lower() != "assetversion":
                continue

            # Skip if AssetVersion don't have thumbnail
            thumbnail_ent = entity["thumbnail"]
            if thumbnail_ent is None:
                self.log.debug(("Skipping. AssetVersion don't "
                                "have set thumbnail. {}").format(entity["id"]))
                continue

            avalon_ents_result = self.get_avalon_entities_for_assetversion(
                entity, self.db_con)
            version_full_path = ("Asset: \"{project_name}/{asset_path}\""
                                 " | Subset: \"{subset_name}\""
                                 " | Version: \"{version_name}\"").format(
                                     **avalon_ents_result)

            version = avalon_ents_result["version"]
            if not version:
                self.log.warning(
                    ("AssetVersion does not have version in avalon. {}"
                     ).format(version_full_path))
                continue

            thumbnail_id = version["data"].get("thumbnail_id")
            if thumbnail_id:
                self.log.info(
                    ("AssetVersion skipped, already has thubmanil set. {}"
                     ).format(version_full_path))
                continue

            # Get thumbnail extension
            file_ext = thumbnail_ent["file_type"]
            if not file_ext.startswith("."):
                file_ext = ".{}".format(file_ext)

            avalon_project = avalon_ents_result["project"]
            avalon_asset = avalon_ents_result["asset"]
            hierarchy = ""
            parents = avalon_asset["data"].get("parents") or []
            if parents:
                hierarchy = "/".join(parents)

            # Prepare anatomy template fill data
            # 1. Create new id for thumbnail entity
            thumbnail_id = ObjectId()

            template_data = {
                "_id": str(thumbnail_id),
                "thumbnail_root": existing_thumbnail_root,
                "thumbnail_type": "thumbnail",
                "ext": file_ext,
                "project": {
                    "name": avalon_project["name"],
                    "code": avalon_project["data"].get("code")
                },
                "asset": avalon_ents_result["asset_name"],
                "subset": avalon_ents_result["subset_name"],
                "version": avalon_ents_result["version_name"],
                "hierarchy": hierarchy
            }

            anatomy_filled = anatomy.format(template_data)
            thumbnail_path = anatomy_filled["publish"]["thumbnail"]
            thumbnail_path = thumbnail_path.replace("..", ".")
            thumbnail_path = os.path.normpath(thumbnail_path)

            downloaded = False
            for loc in (thumbnail_ent.get("component_locations") or []):
                res_id = loc.get("resource_identifier")
                if not res_id:
                    continue

                thubmnail_url = self.get_thumbnail_url(res_id)
                if self.download_file(thubmnail_url, thumbnail_path):
                    downloaded = True
                    break

            if not downloaded:
                self.log.warning("Could not download thumbnail for {}".format(
                    version_full_path))
                continue

            # Clean template data from keys that are dynamic
            template_data.pop("_id")
            template_data.pop("thumbnail_root")

            thumbnail_entity = {
                "_id": thumbnail_id,
                "type": "thumbnail",
                "schema": "pype:thumbnail-1.0",
                "data": {
                    "template": thumbnail_template,
                    "template_data": template_data
                }
            }

            # Create thumbnail entity
            self.db_con.insert_one(thumbnail_entity)
            self.log.debug("Creating entity in database {}".format(
                str(thumbnail_entity)))

            # Set thumbnail id for version
            self.db_con.update_one(
                {"_id": version["_id"]},
                {"$set": {
                    "data.thumbnail_id": thumbnail_id
                }})

            self.db_con.update_one(
                {"_id": avalon_asset["_id"]},
                {"$set": {
                    "data.thumbnail_id": thumbnail_id
                }})

        action_job["status"] = "done"
        session.commit()

        return True

    def get_thumbnail_url(self, resource_identifier, size=None):
        # TODO use ftrack_api method rather (find way how to use it)
        url_string = (u'{url}/component/thumbnail?id={id}&username={username}'
                      u'&apiKey={apiKey}')
        url = url_string.format(url=self.session.server_url,
                                id=resource_identifier,
                                username=self.session.api_user,
                                apiKey=self.session.api_key)
        if size:
            url += u'&size={0}'.format(size)

        return url

    def download_file(self, source_url, dst_file_path):
        dir_path = os.path.dirname(dst_file_path)
        try:
            os.makedirs(dir_path)
        except OSError as exc:
            if exc.errno != errno.EEXIST:
                self.log.warning(
                    "Could not create folder: \"{}\"".format(dir_path))
                return False

        self.log.debug("Downloading file \"{}\" -> \"{}\"".format(
            source_url, dst_file_path))
        file_open = open(dst_file_path, "wb")
        try:
            file_open.write(requests.get(source_url).content)
        except Exception:
            self.log.warning(
                "Download of image `{}` failed.".format(source_url))
            return False
        finally:
            file_open.close()
        return True

    def get_avalon_entities_for_assetversion(self, asset_version, db_con):
        output = {
            "success": True,
            "message": None,
            "project": None,
            "project_name": None,
            "asset": None,
            "asset_name": None,
            "asset_path": None,
            "subset": None,
            "subset_name": None,
            "version": None,
            "version_name": None,
            "representations": None
        }

        db_con.install()

        ft_asset = asset_version["asset"]
        subset_name = ft_asset["name"]
        version = asset_version["version"]
        parent = ft_asset["parent"]
        ent_path = "/".join([ent["name"] for ent in parent["link"]])
        project = self.get_project_from_entity(asset_version)
        project_name = project["full_name"]

        output["project_name"] = project_name
        output["asset_name"] = parent["name"]
        output["asset_path"] = ent_path
        output["subset_name"] = subset_name
        output["version_name"] = version

        db_con.Session["AVALON_PROJECT"] = project_name

        avalon_project = db_con.find_one({"type": "project"})
        output["project"] = avalon_project

        if not avalon_project:
            output["success"] = False
            output["message"] = (
                "Project not synchronized to avalon `{}`".format(project_name))
            return output

        asset_ent = None
        asset_mongo_id = parent["custom_attributes"].get(CustAttrIdKey)
        if asset_mongo_id:
            try:
                asset_mongo_id = ObjectId(asset_mongo_id)
                asset_ent = db_con.find_one({
                    "type": "asset",
                    "_id": asset_mongo_id
                })
            except Exception:
                pass

        if not asset_ent:
            asset_ent = db_con.find_one({
                "type": "asset",
                "data.ftrackId": parent["id"]
            })

        output["asset"] = asset_ent

        if not asset_ent:
            output["success"] = False
            output["message"] = (
                "Not synchronized entity to avalon `{}`".format(ent_path))
            return output

        asset_mongo_id = asset_ent["_id"]

        subset_ent = db_con.find_one({
            "type": "subset",
            "parent": asset_mongo_id,
            "name": subset_name
        })

        output["subset"] = subset_ent

        if not subset_ent:
            output["success"] = False
            output["message"] = (
                "Subset `{}` does not exist under Asset `{}`").format(
                    subset_name, ent_path)
            return output

        version_ent = db_con.find_one({
            "type": "version",
            "name": version,
            "parent": subset_ent["_id"]
        })

        output["version"] = version_ent

        if not version_ent:
            output["success"] = False
            output["message"] = (
                "Version `{}` does not exist under Subset `{}` | Asset `{}`"
            ).format(version, subset_name, ent_path)
            return output

        repre_ents = list(
            db_con.find({
                "type": "representation",
                "parent": version_ent["_id"]
            }))

        output["representations"] = repre_ents
        return output
Exemple #11
0
class RVAction(BaseAction):
    """ Launch RV action """
    ignore_me = "rv" not in config.get_presets()
    identifier = "rv.launch.action"
    label = "rv"
    description = "rv Launcher"
    icon = statics_icon("ftrack", "action_icons", "RV.png")

    type = 'Application'

    def __init__(self, session, plugins_presets):
        """ Constructor

            :param session: ftrack Session
            :type session: :class:`ftrack_api.Session`
        """
        super().__init__(session, plugins_presets)
        self.rv_path = None
        self.config_data = None

        # RV_HOME should be set if properly installed
        if os.environ.get('RV_HOME'):
            self.rv_path = os.path.join(os.environ.get('RV_HOME'), 'bin', 'rv')
        else:
            # if not, fallback to config file location
            if "rv" in config.get_presets():
                self.config_data = config.get_presets()['rv']['config']
                self.set_rv_path()

        if self.rv_path is None:
            return

        self.allowed_types = self.config_data.get('file_ext',
                                                  ["img", "mov", "exr"])

    def discover(self, session, entities, event):
        """Return available actions based on *event*. """
        return True

    def set_rv_path(self):
        self.rv_path = self.config_data.get("rv_path")

    def preregister(self):
        if self.rv_path is None:
            return (
                'RV is not installed or paths in presets are not set correctly'
            )
        return True

    def get_components_from_entity(self, session, entity, components):
        """Get components from various entity types.

        The components dictionary is modifid in place, so nothing is returned.

            Args:
                entity (Ftrack entity)
                components (dict)
        """

        if entity.entity_type.lower() == "assetversion":
            for component in entity["components"]:
                if component["file_type"][1:] not in self.allowed_types:
                    continue

                try:
                    components[entity["asset"]["parent"]["name"]].append(
                        component)
                except KeyError:
                    components[entity["asset"]["parent"]["name"]] = [component]

            return

        if entity.entity_type.lower() == "task":
            query = "AssetVersion where task_id is '{0}'".format(entity["id"])
            for assetversion in session.query(query):
                self.get_components_from_entity(session, assetversion,
                                                components)

            return

        if entity.entity_type.lower() == "shot":
            query = "AssetVersion where asset.parent.id is '{0}'".format(
                entity["id"])
            for assetversion in session.query(query):
                self.get_components_from_entity(session, assetversion,
                                                components)

            return

        raise NotImplementedError(
            "\"{}\" entity type is not implemented yet.".format(
                entity.entity_type))

    def interface(self, session, entities, event):
        if event['data'].get('values', {}):
            return

        user = session.query("User where username is '{0}'".format(
            os.environ["FTRACK_API_USER"])).one()
        job = session.create(
            "Job", {
                "user": user,
                "status": "running",
                "data": json.dumps(
                    {"description": "RV: Collecting components."})
            })
        # Commit to feedback to user.
        session.commit()

        items = []
        try:
            items = self.get_interface_items(session, entities)
        except Exception:
            self.log.error(traceback.format_exc())
            job["status"] = "failed"
        else:
            job["status"] = "done"

        # Commit to end job.
        session.commit()

        return {"items": items}

    def get_interface_items(self, session, entities):

        components = {}
        for entity in entities:
            self.get_components_from_entity(session, entity, components)

        # Sort by version
        for parent_name, entities in components.items():
            version_mapping = {}
            for entity in entities:
                try:
                    version_mapping[entity["version"]["version"]].append(
                        entity)
                except KeyError:
                    version_mapping[entity["version"]["version"]] = [entity]

            # Sort same versions by date.
            for version, entities in version_mapping.items():
                version_mapping[version] = sorted(
                    entities, key=lambda x: x["version"]["date"], reverse=True)

            components[parent_name] = []
            for version in reversed(sorted(version_mapping.keys())):
                components[parent_name].extend(version_mapping[version])

        # Items to present to user.
        items = []
        label = "{} - v{} - {}"
        for parent_name, entities in components.items():
            data = []
            for entity in entities:
                data.append({
                    "label":
                    label.format(entity["version"]["asset"]["name"],
                                 str(entity["version"]["version"]).zfill(3),
                                 entity["file_type"][1:]),
                    "value":
                    entity["id"]
                })

            items.append({
                "label": parent_name,
                "type": "enumerator",
                "name": parent_name,
                "data": data,
                "value": data[0]["value"]
            })

        return items

    def launch(self, session, entities, event):
        """Callback method for RV action."""
        # Launching application
        if "values" not in event["data"]:
            return

        user = session.query("User where username is '{0}'".format(
            os.environ["FTRACK_API_USER"])).one()
        job = session.create(
            "Job", {
                "user": user,
                "status": "running",
                "data": json.dumps(
                    {"description": "RV: Collecting file paths."})
            })
        # Commit to feedback to user.
        session.commit()

        paths = []
        try:
            paths = self.get_file_paths(session, event)
        except Exception:
            self.log.error(traceback.format_exc())
            job["status"] = "failed"
        else:
            job["status"] = "done"

        # Commit to end job.
        session.commit()

        args = [os.path.normpath(self.rv_path)]

        fps = entities[0].get("custom_attributes", {}).get("fps", None)
        if fps is not None:
            args.extend(["-fps", str(fps)])

        args.extend(paths)

        self.log.info("Running rv: {}".format(args))

        subprocess.Popen(args)

        return True

    def get_file_paths(self, session, event):
        """Get file paths from selected components."""

        link = session.get("Component",
                           list(event["data"]["values"].values())
                           [0])["version"]["asset"]["parent"]["link"][0]
        project = session.get(link["type"], link["id"])
        os.environ["AVALON_PROJECT"] = project["name"]
        api.Session["AVALON_PROJECT"] = project["name"]
        io.install()

        location = ftrack_api.Session().pick_location()

        paths = []
        for parent_name in sorted(event["data"]["values"].keys()):
            component = session.get("Component",
                                    event["data"]["values"][parent_name])

            # Newer publishes have the source referenced in Ftrack.
            online_source = False
            for neighbour_component in component["version"]["components"]:
                if neighbour_component["name"] != "ftrackreview-mp4_src":
                    continue

                paths.append(location.get_filesystem_path(neighbour_component))
                online_source = True

            if online_source:
                continue

            asset = io.find_one({"type": "asset", "name": parent_name})
            subset = io.find_one({
                "type": "subset",
                "name": component["version"]["asset"]["name"],
                "parent": asset["_id"]
            })
            version = io.find_one({
                "type": "version",
                "name": component["version"]["version"],
                "parent": subset["_id"]
            })
            representation = io.find_one({
                "type": "representation",
                "parent": version["_id"],
                "name": component["file_type"][1:]
            })
            if representation is None:
                representation = io.find_one({
                    "type": "representation",
                    "parent": version["_id"],
                    "name": "preview"
                })
            paths.append(api.get_representation_path(representation))

        return paths
class ThumbToParent(BaseAction):
    '''Custom action.'''

    # Action identifier
    identifier = 'thumb.to.parent'
    # Action label
    label = 'Thumbnail'
    # Action variant
    variant = " to Parent"
    # Action icon
    icon = statics_icon("ftrack", "action_icons", "Thumbnail.svg")

    def discover(self, session, entities, event):
        '''Return action config if triggered on asset versions.'''

        if len(entities) <= 0 or entities[0].entity_type in ['Project']:
            return False

        return True

    def launch(self, session, entities, event):
        '''Callback method for action.'''

        userId = event['source']['user']['id']
        user = session.query('User where id is ' + userId).one()

        job = session.create(
            'Job', {
                'user': user,
                'status': 'running',
                'data': json.dumps(
                    {'description': 'Push thumbnails to parents'})
            })
        session.commit()
        try:
            for entity in entities:
                parent = None
                thumbid = None
                if entity.entity_type.lower() == 'assetversion':
                    parent = entity['task']

                    if parent is None:
                        par_ent = entity['link'][-2]
                        parent = session.get(par_ent['type'], par_ent['id'])
                else:
                    try:
                        parent = entity['parent']
                    except Exception as e:
                        msg = ("During Action 'Thumb to Parent'"
                               " went something wrong")
                        self.log.error(msg)
                        raise e
                thumbid = entity['thumbnail_id']

                if parent and thumbid:
                    parent['thumbnail_id'] = thumbid
                    status = 'done'
                else:
                    raise Exception(
                        "Parent or thumbnail id not found. Parent: {}. "
                        "Thumbnail id: {}".format(parent, thumbid))

            # inform the user that the job is done
            job['status'] = status or 'done'

        except Exception as exc:
            session.rollback()
            # fail the job if something goes wrong
            job['status'] = 'failed'
            raise exc

        finally:
            session.commit()

        return {
            'success': True,
            'message': 'Created job for updating thumbnails!'
        }
Exemple #13
0
class SeedDebugProject(BaseAction):
    '''Edit meta data action.'''

    #: Action identifier.
    identifier = "seed.debug.project"
    #: Action label.
    label = "Seed Debug Project"
    #: Action description.
    description = "Description"
    #: priority
    priority = 100
    #: roles that are allowed to register this action
    role_list = ["Pypeclub"]
    icon = statics_icon("ftrack", "action_icons", "SeedProject.svg")

    # Asset names which will be created in `Assets` entity
    assets = [
        "Addax", "Alpaca", "Ant", "Antelope", "Aye", "Badger", "Bear", "Bee",
        "Beetle", "Bluebird", "Bongo", "Bontebok", "Butterflie", "Caiman",
        "Capuchin", "Capybara", "Cat", "Caterpillar", "Coyote", "Crocodile",
        "Cuckoo", "Deer", "Dragonfly", "Duck", "Eagle", "Egret", "Elephant",
        "Falcon", "Fossa", "Fox", "Gazelle", "Gecko", "Gerbil",
        "GiantArmadillo", "Gibbon", "Giraffe", "Goose", "Gorilla",
        "Grasshoper", "Hare", "Hawk", "Hedgehog", "Heron", "Hog",
        "Hummingbird", "Hyena", "Chameleon", "Cheetah", "Iguana", "Jackal",
        "Jaguar", "Kingfisher", "Kinglet", "Kite", "Komodo", "Lemur",
        "Leopard", "Lion", "Lizard", "Macaw", "Malachite", "Mandrill",
        "Mantis", "Marmoset", "Meadowlark", "Meerkat", "Mockingbird",
        "Mongoose", "Monkey", "Nyal", "Ocelot", "Okapi", "Oribi", "Oriole",
        "Otter", "Owl", "Panda", "Parrot", "Pelican", "Pig", "Porcupine",
        "Reedbuck", "Rhinocero", "Sandpiper", "Servil", "Skink", "Sloth",
        "Snake", "Spider", "Squirrel", "Sunbird", "Swallow", "Swift", "Tiger",
        "Sylph", "Tanager", "Vulture", "Warthog", "Waterbuck", "Woodpecker",
        "Zebra"
    ]

    # Tasks which will be created for Assets
    asset_tasks = ["Modeling", "Lookdev", "Rigging"]
    # Tasks which will be created for Shots
    shot_tasks = ["Animation", "Lighting", "Compositing", "FX"]

    # Define how much sequences will be created
    default_seq_count = 5
    # Define how much shots will be created for each sequence
    default_shots_count = 10

    max_entities_created_at_one_commit = 50

    existing_projects = None
    new_project_item = "< New Project >"
    current_project_item = "< Current Project >"

    def discover(self, session, entities, event):
        ''' Validation '''
        return True

    def interface(self, session, entities, event):
        if event["data"].get("values", {}):
            return

        title = "Select Project where you want to create seed data"

        items = []
        item_splitter = {"type": "label", "value": "---"}

        description_label = {
            "type":
            "label",
            "value":
            ("WARNING: Action does NOT check if entities already exist !!!")
        }
        items.append(description_label)

        all_projects = session.query("select full_name from Project").all()
        self.existing_projects = [proj["full_name"] for proj in all_projects]
        projects_items = [{
            "label": proj,
            "value": proj
        } for proj in self.existing_projects]

        data_items = []

        data_items.append({
            "label": self.new_project_item,
            "value": self.new_project_item
        })

        data_items.append({
            "label": self.current_project_item,
            "value": self.current_project_item
        })

        data_items.extend(
            sorted(projects_items, key=itemgetter("label"), reverse=False))
        projects_item = {
            "label": "Choose Project",
            "type": "enumerator",
            "name": "project_name",
            "data": data_items,
            "value": self.current_project_item
        }
        items.append(projects_item)
        items.append(item_splitter)

        items.append({
            "label": "Number of assets",
            "type": "number",
            "name": "asset_count",
            "value": len(self.assets)
        })
        items.append({
            "label": "Number of sequences",
            "type": "number",
            "name": "seq_count",
            "value": self.default_seq_count
        })
        items.append({
            "label": "Number of shots",
            "type": "number",
            "name": "shots_count",
            "value": self.default_shots_count
        })
        items.append(item_splitter)

        note_label = {
            "type":
            "label",
            "value":
            ("<p><i>NOTE: Enter project name and choose schema if you "
             "chose `\"< New Project >\"`(code is optional)</i><p>")
        }
        items.append(note_label)
        items.append({
            "label": "Project name",
            "name": "new_project_name",
            "type": "text",
            "value": ""
        })

        project_schemas = [
            sch["name"] for sch in self.session.query("ProjectSchema").all()
        ]
        schemas_item = {
            "label": "Choose Schema",
            "type": "enumerator",
            "name": "new_schema_name",
            "data": [{
                "label": sch,
                "value": sch
            } for sch in project_schemas],
            "value": project_schemas[0]
        }
        items.append(schemas_item)

        items.append({
            "label": "*Project code",
            "name": "new_project_code",
            "type": "text",
            "value": "",
            "empty_text": "Optional..."
        })

        return {"items": items, "title": title}

    def launch(self, session, in_entities, event):
        if "values" not in event["data"]:
            return

        # THIS IS THE PROJECT PART
        values = event["data"]["values"]
        selected_project = values["project_name"]
        if selected_project == self.new_project_item:
            project_name = values["new_project_name"]
            if project_name in self.existing_projects:
                msg = "Project \"{}\" already exist".format(project_name)
                self.log.error(msg)
                return {"success": False, "message": msg}

            project_code = values["new_project_code"]
            project_schema_name = values["new_schema_name"]
            if not project_code:
                project_code = project_name
            project_code = project_code.lower().replace(" ", "_").strip()
            _project = session.query(
                "Project where name is \"{}\"".format(project_code)).first()
            if _project:
                msg = "Project with code \"{}\" already exist".format(
                    project_code)
                self.log.error(msg)
                return {"success": False, "message": msg}

            project_schema = session.query(
                "ProjectSchema where name is \"{}\"".format(
                    project_schema_name)).one()
            # Create the project with the chosen schema.
            self.log.debug(
                ("*** Creating Project: name <{}>, code <{}>, schema <{}>"
                 ).format(project_name, project_code, project_schema_name))
            project = session.create(
                "Project", {
                    "name": project_code,
                    "full_name": project_name,
                    "project_schema": project_schema
                })
            session.commit()

        elif selected_project == self.current_project_item:
            entity = in_entities[0]
            if entity.entity_type.lower() == "project":
                project = entity
            else:
                if "project" in entity:
                    project = entity["project"]
                else:
                    project = entity["parent"]["project"]
            project_schema = project["project_schema"]
            self.log.debug((
                "*** Using Project: name <{}>, code <{}>, schema <{}>").format(
                    project["full_name"], project["name"],
                    project_schema["name"]))
        else:
            project = session.query("Project where full_name is \"{}\"".format(
                selected_project)).one()
            project_schema = project["project_schema"]
            self.log.debug((
                "*** Using Project: name <{}>, code <{}>, schema <{}>").format(
                    project["full_name"], project["name"],
                    project_schema["name"]))

        # THIS IS THE MAGIC PART
        task_types = {}
        for _type in project_schema["_task_type_schema"]["types"]:
            if _type["name"] not in task_types:
                task_types[_type["name"]] = _type
        self.task_types = task_types

        asset_count = values.get("asset_count") or len(self.assets)
        seq_count = values.get("seq_count") or self.default_seq_count
        shots_count = values.get("shots_count") or self.default_shots_count

        self.create_assets(project, asset_count)
        self.create_shots(project, seq_count, shots_count)

        return True

    def create_assets(self, project, asset_count):
        self.log.debug("*** Creating assets:")

        try:
            asset_count = int(asset_count)
        except ValueError:
            asset_count = 0

        if asset_count <= 0:
            self.log.debug("No assets to create")
            return

        main_entity = self.session.create("Folder", {
            "name": "Assets",
            "parent": project
        })
        self.log.debug("- Assets")
        available_assets = len(self.assets)
        repetitive_times = (int(asset_count / available_assets) +
                            (asset_count % available_assets > 0))

        index = 0
        created_entities = 0
        to_create_length = asset_count + (asset_count * len(self.asset_tasks))
        for _asset_name in self.assets:
            if created_entities >= to_create_length:
                break
            for asset_num in range(1, repetitive_times + 1):
                if created_entities >= asset_count:
                    break
                asset_name = "%s_%02d" % (_asset_name, asset_num)
                asset = self.session.create("AssetBuild", {
                    "name": asset_name,
                    "parent": main_entity
                })
                self.log.debug("- Assets/{}".format(asset_name))

                created_entities += 1
                index += 1
                if self.temp_commit(index, created_entities, to_create_length):
                    index = 0

                for task_name in self.asset_tasks:
                    self.session.create(
                        "Task", {
                            "name": task_name,
                            "parent": asset,
                            "type": self.task_types[task_name]
                        })
                    self.log.debug("- Assets/{}/{}".format(
                        asset_name, task_name))

                    created_entities += 1
                    index += 1
                    if self.temp_commit(index, created_entities,
                                        to_create_length):
                        index = 0

        self.log.debug("*** Commiting Assets")
        self.log.debug("Commiting entities. {}/{}".format(
            created_entities, to_create_length))
        self.session.commit()

    def create_shots(self, project, seq_count, shots_count):
        self.log.debug("*** Creating shots:")

        # Convert counts to integers
        try:
            seq_count = int(seq_count)
        except ValueError:
            seq_count = 0

        try:
            shots_count = int(shots_count)
        except ValueError:
            shots_count = 0

        # Check if both are higher than 0
        missing = []
        if seq_count <= 0:
            missing.append("sequences")

        if shots_count <= 0:
            missing.append("shots")

        if missing:
            self.log.debug("No {} to create".format(" and ".join(missing)))
            return

        # Create Folder "Shots"
        main_entity = self.session.create("Folder", {
            "name": "Shots",
            "parent": project
        })
        self.log.debug("- Shots")

        index = 0
        created_entities = 0
        to_create_length = (seq_count + (seq_count * shots_count) +
                            (seq_count * shots_count * len(self.shot_tasks)))
        for seq_num in range(1, seq_count + 1):
            seq_name = "sq%03d" % seq_num
            seq = self.session.create("Sequence", {
                "name": seq_name,
                "parent": main_entity
            })
            self.log.debug("- Shots/{}".format(seq_name))

            created_entities += 1
            index += 1
            if self.temp_commit(index, created_entities, to_create_length):
                index = 0

            for shot_num in range(1, shots_count + 1):
                shot_name = "%ssh%04d" % (seq_name, (shot_num * 10))
                shot = self.session.create("Shot", {
                    "name": shot_name,
                    "parent": seq
                })
                self.log.debug("- Shots/{}/{}".format(seq_name, shot_name))

                created_entities += 1
                index += 1
                if self.temp_commit(index, created_entities, to_create_length):
                    index = 0

                for task_name in self.shot_tasks:
                    self.session.create(
                        "Task", {
                            "name": task_name,
                            "parent": shot,
                            "type": self.task_types[task_name]
                        })
                    self.log.debug("- Shots/{}/{}/{}".format(
                        seq_name, shot_name, task_name))

                    created_entities += 1
                    index += 1
                    if self.temp_commit(index, created_entities,
                                        to_create_length):
                        index = 0

        self.log.debug("*** Commiting Shots")
        self.log.debug("Commiting entities. {}/{}".format(
            created_entities, to_create_length))
        self.session.commit()

    def temp_commit(self, index, created_entities, to_create_length):
        if index < self.max_entities_created_at_one_commit:
            return False
        self.log.debug("Commiting {} entities. {}/{}".format(
            index, created_entities, to_create_length))
        self.session.commit()
        return True
class CustomAttributes(BaseAction):
    '''Edit meta data action.'''

    #: Action identifier.
    identifier = 'create.update.attributes'
    #: Action label.
    label = "Pype Admin"
    variant = '- Create/Update Avalon Attributes'
    #: Action description.
    description = 'Creates Avalon/Mongo ID for double check'
    #: roles that are allowed to register this action
    role_list = ['Pypeclub', 'Administrator']
    icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")

    required_keys = ['key', 'label', 'type']
    type_posibilities = [
        'text', 'boolean', 'date', 'enumerator', 'dynamic enumerator', 'number'
    ]

    def discover(self, session, entities, event):
        '''
        Validation
        - action is only for Administrators
        '''
        return True

    def launch(self, session, entities, event):
        # JOB SETTINGS
        userId = event['source']['user']['id']
        user = session.query('User where id is ' + userId).one()

        job = session.create(
            'Job', {
                'user': user,
                'status': 'running',
                'data': json.dumps(
                    {'description': 'Custom Attribute creation.'})
            })
        session.commit()
        try:
            self.prepare_global_data(session)
            self.avalon_mongo_id_attributes(session, event)
            self.custom_attributes_from_file(session, event)

            job['status'] = 'done'
            session.commit()

        except Exception as exc:
            session.rollback()
            job['status'] = 'failed'
            session.commit()
            self.log.error(
                'Creating custom attributes failed ({})'.format(exc),
                exc_info=True)

        return True

    def prepare_global_data(self, session):
        self.types_per_name = {
            attr_type["name"].lower(): attr_type
            for attr_type in session.query("CustomAttributeType").all()
        }

        self.security_roles = {
            role["name"].lower(): role
            for role in session.query("SecurityRole").all()
        }

        object_types = session.query("ObjectType").all()
        self.object_types_per_id = {
            object_type["id"]: object_type
            for object_type in object_types
        }
        self.object_types_per_name = {
            object_type["name"].lower(): object_type
            for object_type in object_types
        }

        self.groups = {}

    def avalon_mongo_id_attributes(self, session, event):
        hierarchical_attr, object_type_attrs = (
            self.mongo_id_custom_attributes(session))

        if hierarchical_attr is None:
            self.create_hierarchical_mongo_attr(session)
            hierarchical_attr, object_type_attrs = (
                self.mongo_id_custom_attributes(session))

        if hierarchical_attr is None:
            return

        if object_type_attrs:
            self.convert_mongo_id_to_hierarchical(hierarchical_attr,
                                                  object_type_attrs, session,
                                                  event)

    def mongo_id_custom_attributes(self, session):
        cust_attrs_query = (
            "select id, entity_type, object_type_id, is_hierarchical, default"
            " from CustomAttributeConfiguration"
            " where key = \"{}\"").format(CustAttrIdKey)

        mongo_id_avalon_attr = session.query(cust_attrs_query).all()
        heirarchical_attr = None
        object_type_attrs = []
        for cust_attr in mongo_id_avalon_attr:
            if cust_attr["is_hierarchical"]:
                heirarchical_attr = cust_attr

            else:
                object_type_attrs.append(cust_attr)

        return heirarchical_attr, object_type_attrs

    def create_hierarchical_mongo_attr(self, session):
        # Attribute Name and Label
        cust_attr_label = "Avalon/Mongo ID"

        # Set security roles for attribute
        role_list = ("API", "Administrator", "Pypeclub")
        roles = self.get_security_roles(role_list)
        # Set Text type of Attribute
        custom_attribute_type = self.types_per_name["text"]
        # Set group to 'avalon'
        group = self.get_group("avalon")

        data = {
            "key": CustAttrIdKey,
            "label": cust_attr_label,
            "type": custom_attribute_type,
            "default": "",
            "write_security_roles": roles,
            "read_security_roles": roles,
            "group": group,
            "is_hierarchical": True,
            "entity_type": "show",
            "config": json.dumps({"markdown": False})
        }

        self.process_attribute(data)

    def convert_mongo_id_to_hierarchical(self, hierarchical_attr,
                                         object_type_attrs, session, event):
        user_msg = "Converting old custom attributes. This may take some time."
        self.show_message(event, user_msg, True)
        self.log.info(user_msg)

        object_types_per_id = {
            object_type["id"]: object_type
            for object_type in session.query("ObjectType").all()
        }

        cust_attr_query = (
            "select value, entity_id from ContextCustomAttributeValue "
            "where configuration_id is {}")
        for attr_def in object_type_attrs:
            attr_ent_type = attr_def["entity_type"]
            if attr_ent_type == "show":
                entity_type_label = "Project"
            elif attr_ent_type == "task":
                entity_type_label = (
                    object_types_per_id[attr_def["object_type_id"]]["name"])
            else:
                self.log.warning(
                    "Unsupported entity type: \"{}\". Skipping.".format(
                        attr_ent_type))
                continue

            self.log.debug(
                ("Converting Avalon MongoID attr for Entity type \"{}\"."
                 ).format(entity_type_label))

            call_expr = [{
                "action": "query",
                "expression": cust_attr_query.format(attr_def["id"])
            }]
            if hasattr(session, "call"):
                [values] = session.call(call_expr)
            else:
                [values] = session._call(call_expr)

            for value in values["data"]:
                table_values = collections.OrderedDict({
                    "configuration_id":
                    hierarchical_attr["id"],
                    "entity_id":
                    value["entity_id"]
                })

                session.recorded_operations.push(
                    ftrack_api.operation.UpdateEntityOperation(
                        "ContextCustomAttributeValue", table_values, "value",
                        ftrack_api.symbol.NOT_SET, value["value"]))

            try:
                session.commit()

            except Exception:
                session.rollback()
                self.log.warning(("Couldn't transfer Avalon Mongo ID"
                                  " attribute for entity type \"{}\"."
                                  ).format(entity_type_label),
                                 exc_info=True)

            try:
                session.delete(attr_def)
                session.commit()

            except Exception:
                session.rollback()
                self.log.warning(("Couldn't delete Avalon Mongo ID"
                                  " attribute for entity type \"{}\"."
                                  ).format(entity_type_label),
                                 exc_info=True)

    def custom_attributes_from_file(self, session, event):
        presets = config.get_presets()['ftrack']['ftrack_custom_attributes']

        for cust_attr_data in presets:
            cust_attr_name = cust_attr_data.get('label',
                                                cust_attr_data.get('key'))
            try:
                data = {}
                # Get key, label, type
                data.update(self.get_required(cust_attr_data))
                # Get hierachical/ entity_type/ object_id
                data.update(self.get_entity_type(cust_attr_data))
                # Get group, default, security roles
                data.update(self.get_optional(cust_attr_data))
                # Process data
                self.process_attribute(data)

            except CustAttrException as cae:
                if cust_attr_name:
                    msg = 'Custom attribute error "{}" - {}'.format(
                        cust_attr_name, str(cae))
                else:
                    msg = 'Custom attribute error - {}'.format(str(cae))
                self.log.warning(msg, exc_info=True)
                self.show_message(event, msg)

        return True

    def process_attribute(self, data):
        existing_atr = self.session.query('CustomAttributeConfiguration').all()
        matching = []
        for attr in existing_atr:
            if (attr['key'] != data['key']
                    or attr['type']['name'] != data['type']['name']):
                continue

            if data.get('is_hierarchical', False) is True:
                if attr['is_hierarchical'] is True:
                    matching.append(attr)
            elif 'object_type_id' in data:
                if (attr['entity_type'] == data['entity_type']
                        and attr['object_type_id'] == data['object_type_id']):
                    matching.append(attr)
            else:
                if attr['entity_type'] == data['entity_type']:
                    matching.append(attr)

        if len(matching) == 0:
            self.session.create('CustomAttributeConfiguration', data)
            self.session.commit()
            self.log.debug('{}: "{}" created'.format(self.label,
                                                     data['label']))

        elif len(matching) == 1:
            attr_update = matching[0]
            for key in data:
                if (key not in [
                        'is_hierarchical', 'entity_type', 'object_type_id'
                ]):
                    attr_update[key] = data[key]

            self.log.debug('{}: "{}" updated'.format(self.label,
                                                     data['label']))
            self.session.commit()

        else:
            raise CustAttrException('Is duplicated')

    def get_required(self, attr):
        output = {}
        for key in self.required_keys:
            if key not in attr:
                raise CustAttrException(
                    'Key {} is required - please set'.format(key))

        if attr['type'].lower() not in self.type_posibilities:
            raise CustAttrException('Type {} is not valid'.format(
                attr['type']))

        output['key'] = attr['key']
        output['label'] = attr['label']

        type_name = attr['type'].lower()
        output['type'] = self.types_per_name[type_name]

        config = None
        if type_name == 'number':
            config = self.get_number_config(attr)
        elif type_name == 'text':
            config = self.get_text_config(attr)
        elif type_name == 'enumerator':
            config = self.get_enumerator_config(attr)

        if config is not None:
            output['config'] = config

        return output

    def get_number_config(self, attr):
        if 'config' in attr and 'isdecimal' in attr['config']:
            isdecimal = attr['config']['isdecimal']
        else:
            isdecimal = False

        config = json.dumps({'isdecimal': isdecimal})

        return config

    def get_text_config(self, attr):
        if 'config' in attr and 'markdown' in attr['config']:
            markdown = attr['config']['markdown']
        else:
            markdown = False
        config = json.dumps({'markdown': markdown})

        return config

    def get_enumerator_config(self, attr):
        if 'config' not in attr:
            raise CustAttrException('Missing config with data')
        if 'data' not in attr['config']:
            raise CustAttrException('Missing data in config')

        data = []
        for item in attr['config']['data']:
            item_data = {}
            for key in item:
                # TODO key check by regex
                item_data['menu'] = item[key]
                item_data['value'] = key
                data.append(item_data)

        multiSelect = False
        for k in attr['config']:
            if k.lower() == 'multiselect':
                if isinstance(attr['config'][k], bool):
                    multiSelect = attr['config'][k]
                else:
                    raise CustAttrException('Multiselect must be boolean')
                break

        config = json.dumps({
            'multiSelect': multiSelect,
            'data': json.dumps(data)
        })

        return config

    def get_group(self, attr):
        if isinstance(attr, dict):
            group_name = attr['group'].lower()
        else:
            group_name = attr
        if group_name in self.groups:
            return self.groups[group_name]

        query = 'CustomAttributeGroup where name is "{}"'.format(group_name)
        groups = self.session.query(query).all()

        if len(groups) == 1:
            group = groups[0]
            self.groups[group_name] = group

            return group

        elif len(groups) < 1:
            group = self.session.create('CustomAttributeGroup', {
                'name': group_name,
            })
            self.session.commit()

            return group

        else:
            raise CustAttrException(
                'Found more than one group "{}"'.format(group_name))

    def get_security_roles(self, security_roles):
        security_roles_lowered = tuple(name.lower() for name in security_roles)
        if (len(security_roles_lowered) == 0
                or "all" in security_roles_lowered):
            return list(self.security_roles.values())

        output = []
        if security_roles_lowered[0] == "except":
            excepts = security_roles_lowered[1:]
            for role_name, role in self.security_roles.items():
                if role_name not in excepts:
                    output.append(role)

        else:
            for role_name in security_roles_lowered:
                if role_name in self.security_roles:
                    output.append(self.security_roles[role_name])
                else:
                    raise CustAttrException(
                        ("Securit role \"{}\" was not found in Ftrack."
                         ).format(role_name))
        return output

    def get_default(self, attr):
        type = attr['type']
        default = attr['default']
        if default is None:
            return default
        err_msg = 'Default value is not'
        if type == 'number':
            if not isinstance(default, (float, int)):
                raise CustAttrException('{} integer'.format(err_msg))
        elif type == 'text':
            if not isinstance(default, str):
                raise CustAttrException('{} string'.format(err_msg))
        elif type == 'boolean':
            if not isinstance(default, bool):
                raise CustAttrException('{} boolean'.format(err_msg))
        elif type == 'enumerator':
            if not isinstance(default, list):
                raise CustAttrException(
                    '{} array with strings'.format(err_msg))
            # TODO check if multiSelect is available
            # and if default is one of data menu
            if not isinstance(default[0], str):
                raise CustAttrException('{} array of strings'.format(err_msg))
        elif type == 'date':
            date_items = default.split(' ')
            try:
                if len(date_items) == 1:
                    default = arrow.get(default, 'YY.M.D')
                elif len(date_items) == 2:
                    default = arrow.get(default, 'YY.M.D H:m:s')
                else:
                    raise Exception
            except Exception:
                raise CustAttrException('Date is not in proper format')
        elif type == 'dynamic enumerator':
            raise CustAttrException('Dynamic enumerator can\'t have default')

        return default

    def get_optional(self, attr):
        output = {}
        if 'group' in attr:
            output['group'] = self.get_group(attr)
        if 'default' in attr:
            output['default'] = self.get_default(attr)

        roles_read = []
        roles_write = []
        if 'read_security_roles' in output:
            roles_read = attr['read_security_roles']
        if 'read_security_roles' in output:
            roles_write = attr['write_security_roles']
        output['read_security_roles'] = self.get_security_roles(roles_read)
        output['write_security_roles'] = self.get_security_roles(roles_write)

        return output

    def get_entity_type(self, attr):
        if attr.get("is_hierarchical", False):
            return {
                "is_hierarchical": True,
                "entity_type": attr.get("entity_type") or "show"
            }

        if 'entity_type' not in attr:
            raise CustAttrException('Missing entity_type')

        if attr['entity_type'].lower() != 'task':
            return {'entity_type': attr['entity_type']}

        if 'object_type' not in attr:
            raise CustAttrException('Missing object_type')

        object_type_name = attr['object_type']
        object_type_name_low = object_type_name.lower()
        object_type = self.object_types_per_name.get(object_type_name_low)
        if not object_type:
            raise CustAttrException(('Object type with name "{}" don\'t exist'
                                     ).format(object_type_name))

        return {
            'entity_type': attr['entity_type'],
            'object_type_id': object_type["id"]
        }
Exemple #15
0
class CreateFolders(BaseAction):
    identifier = "create.folders"
    label = "Create Folders"
    icon = statics_icon("ftrack", "action_icons", "CreateFolders.svg")

    def discover(self, session, entities, event):
        if len(entities) != 1:
            return False

        not_allowed = ["assetversion", "project"]
        if entities[0].entity_type.lower() in not_allowed:
            return False

        return True

    def interface(self, session, entities, event):
        if event["data"].get("values", {}):
            return
        entity = entities[0]
        without_interface = True
        for child in entity["children"]:
            if child["object_type"]["name"].lower() != "task":
                without_interface = False
                break
        self.without_interface = without_interface
        if without_interface:
            return
        title = "Create folders"

        entity_name = entity["name"]
        msg = ("<h2>Do you want create folders also"
               " for all children of \"{}\"?</h2>")
        if entity.entity_type.lower() == "project":
            entity_name = entity["full_name"]
            msg = msg.replace(" also", "")
            msg += "<h3>(Project root won't be created if not checked)</h3>"
        items = []
        item_msg = {"type": "label", "value": msg.format(entity_name)}
        item_label = {"type": "label", "value": "With all chilren entities"}
        item = {"name": "children_included", "type": "boolean", "value": False}
        items.append(item_msg)
        items.append(item_label)
        items.append(item)

        return {"items": items, "title": title}

    def launch(self, session, entities, event):
        '''Callback method for custom action.'''
        with_childrens = True
        if self.without_interface is False:
            if "values" not in event["data"]:
                return
            with_childrens = event["data"]["values"]["children_included"]

        entity = entities[0]
        if entity.entity_type.lower() == "project":
            proj = entity
        else:
            proj = entity["project"]
        project_name = proj["full_name"]
        project_code = proj["name"]

        if entity.entity_type.lower() == 'project' and with_childrens is False:
            return {'success': True, 'message': 'Nothing was created'}

        all_entities = []
        all_entities.append(entity)
        if with_childrens:
            all_entities = self.get_notask_children(entity)

        anatomy = Anatomy(project_name)
        project_settings = get_project_settings(project_name)

        work_keys = ["work", "folder"]
        work_template = anatomy.templates
        for key in work_keys:
            work_template = work_template[key]
        work_has_apps = "{app" in work_template

        publish_keys = ["publish", "folder"]
        publish_template = anatomy.templates
        for key in publish_keys:
            publish_template = publish_template[key]
        publish_has_apps = "{app" in publish_template

        tools_settings = project_settings["global"]["tools"]
        app_presets = tools_settings["Workfiles"]["sw_folders"]
        app_manager_apps = None
        if app_presets and (work_has_apps or publish_has_apps):
            app_manager_apps = ApplicationManager().applications

        cached_apps = {}
        collected_paths = []
        for entity in all_entities:
            if entity.entity_type.lower() == "project":
                continue
            ent_data = {
                "project": {
                    "name": project_name,
                    "code": project_code
                }
            }

            ent_data["asset"] = entity["name"]

            parents = entity["link"][1:-1]
            hierarchy_names = [p["name"] for p in parents]
            hierarchy = ""
            if hierarchy_names:
                hierarchy = os.path.sep.join(hierarchy_names)
            ent_data["hierarchy"] = hierarchy

            tasks_created = False
            for child in entity["children"]:
                if child["object_type"]["name"].lower() != "task":
                    continue
                tasks_created = True
                task_type_name = child["type"]["name"].lower()
                task_data = ent_data.copy()
                task_data["task"] = child["name"]

                apps = []
                if app_manager_apps:
                    possible_apps = app_presets.get(task_type_name) or []
                    for app_name in possible_apps:

                        if app_name in cached_apps:
                            apps.append(cached_apps[app_name])
                            continue

                        app_def = app_manager_apps.get(app_name)
                        if app_def and app_def.is_host:
                            app_dir = app_def.host_name
                        else:
                            app_dir = app_name
                        cached_apps[app_name] = app_dir
                        apps.append(app_dir)

                # Template wok
                if work_has_apps:
                    app_data = task_data.copy()
                    for app in apps:
                        app_data["app"] = app
                        collected_paths.append(
                            self.compute_template(anatomy, app_data,
                                                  work_keys))
                else:
                    collected_paths.append(
                        self.compute_template(anatomy, task_data, work_keys))

                # Template publish
                if publish_has_apps:
                    app_data = task_data.copy()
                    for app in apps:
                        app_data["app"] = app
                        collected_paths.append(
                            self.compute_template(anatomy, app_data,
                                                  publish_keys))
                else:
                    collected_paths.append(
                        self.compute_template(anatomy, task_data,
                                              publish_keys))

            if not tasks_created:
                # create path for entity
                collected_paths.append(
                    self.compute_template(anatomy, ent_data, work_keys))
                collected_paths.append(
                    self.compute_template(anatomy, ent_data, publish_keys))

        if len(collected_paths) == 0:
            return {
                "success": True,
                "message": "No project folders to create."
            }

        self.log.info("Creating folders:")

        for path in set(collected_paths):
            self.log.info(path)
            if not os.path.exists(path):
                os.makedirs(path)

        return {
            "success": True,
            "message": "Successfully created project folders."
        }

    def get_notask_children(self, entity):
        output = []
        if entity.entity_type.lower() == "task":
            return output

        output.append(entity)
        for child in entity["children"]:
            output.extend(self.get_notask_children(child))
        return output

    def compute_template(self, anatomy, data, anatomy_keys):
        filled_template = anatomy.format_all(data)
        for key in anatomy_keys:
            filled_template = filled_template[key]

        if filled_template.solved:
            return os.path.normpath(filled_template)

        self.log.warning("Template \"{}\" was not fully filled \"{}\"".format(
            filled_template.template, filled_template))
        return os.path.normpath(filled_template.split("{")[0])
Exemple #16
0
class JobKiller(BaseAction):
    '''Edit meta data action.'''

    #: Action identifier.
    identifier = 'job.killer'
    #: Action label.
    label = "Pype Admin"
    variant = '- Job Killer'
    #: Action description.
    description = 'Killing selected running jobs'
    #: roles that are allowed to register this action
    icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
    settings_key = "job_killer"

    def discover(self, session, entities, event):
        ''' Validation '''
        return self.valid_roles(session, entities, event)

    def interface(self, session, entities, event):
        if not event['data'].get('values', {}):
            title = 'Select jobs to kill'

            jobs = session.query(
                'select id, status from Job'
                ' where status in ("queued", "running")').all()

            items = []

            item_splitter = {'type': 'label', 'value': '---'}
            for job in jobs:
                try:
                    data = json.loads(job['data'])
                    desctiption = data['description']
                except Exception:
                    desctiption = '*No description*'
                user = job['user']['username']
                created = job['created_at'].strftime('%d.%m.%Y %H:%M:%S')
                label = '{} - {} - {}'.format(desctiption, created, user)
                item_label = {'type': 'label', 'value': label}
                item = {'name': job['id'], 'type': 'boolean', 'value': False}
                if len(items) > 0:
                    items.append(item_splitter)
                items.append(item_label)
                items.append(item)

            if len(items) == 0:
                return {
                    'success': False,
                    'message': 'Didn\'t found any running jobs'
                }
            else:
                return {'items': items, 'title': title}

    def launch(self, session, entities, event):
        """ GET JOB """
        if 'values' not in event['data']:
            return

        values = event['data']['values']
        if len(values) <= 0:
            return {'success': True, 'message': 'No jobs to kill!'}
        jobs = []
        job_ids = []

        for k, v in values.items():
            if v is True:
                job_ids.append(k)

        for id in job_ids:
            query = 'Job where id is "{}"'.format(id)
            jobs.append(session.query(query).one())
        # Update all the queried jobs, setting the status to failed.
        for job in jobs:
            try:
                origin_status = job["status"]
                job['status'] = 'failed'
                session.commit()
                self.log.debug(
                    ('Changing Job ({}) status: {} -> failed').format(
                        job['id'], origin_status))
            except Exception:
                session.rollback()
                self.log.warning(
                    ('Changing Job ({}) has failed').format(job['id']))

        self.log.info('All running jobs were killed Successfully!')
        return {
            'success': True,
            'message': 'All running jobs were killed Successfully!'
        }
class DeleteOldVersions(BaseAction):

    identifier = "delete.old.versions"
    label = "Pype Admin"
    variant = "- Delete old versions"
    description = ("Delete files from older publishes so project can be"
                   " archived with only lates versions.")
    role_list = ["Pypeclub", "Project Manager", "Administrator"]
    icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")

    dbcon = DbConnector()

    inteface_title = "Choose your preferences"
    splitter_item = {"type": "label", "value": "---"}
    sequence_splitter = "__sequence_splitter__"

    def discover(self, session, entities, event):
        ''' Validation '''
        selection = event["data"].get("selection") or []
        for entity in selection:
            entity_type = (entity.get("entityType") or "").lower()
            if entity_type == "assetversion":
                return True
        return False

    def interface(self, session, entities, event):
        # TODO Add roots existence validation
        items = []
        values = event["data"].get("values")
        if values:
            versions_count = int(values["last_versions_count"])
            if versions_count >= 1:
                return
            items.append({
                "type": "label",
                "value": ("# You have to keep at least 1 version!")
            })

        items.append({
            "type":
            "label",
            "value":
            ("<i><b>WARNING:</b> This will remove published files of older"
             " versions from disk so we don't recommend use"
             " this action on \"live\" project.</i>")
        })

        items.append(self.splitter_item)

        # How many versions to keep
        items.append({
            "type": "label",
            "value": "## Choose how many versions you want to keep:"
        })
        items.append({
            "type":
            "label",
            "value":
            ("<i><b>NOTE:</b> We do recommend to keep 2 versions.</i>")
        })
        items.append({
            "type": "number",
            "name": "last_versions_count",
            "label": "Versions",
            "value": 2
        })

        items.append(self.splitter_item)

        items.append({
            "type":
            "label",
            "value": ("## Remove publish folder even if there"
                      " are other than published files:")
        })
        items.append({
            "type":
            "label",
            "value":
            ("<i><b>WARNING:</b> This may remove more than you want.</i>")
        })
        items.append({
            "type": "boolean",
            "name": "force_delete_publish_folder",
            "label": "Are You sure?",
            "value": False
        })

        return {"items": items, "title": self.inteface_title}

    def launch(self, session, entities, event):
        values = event["data"].get("values")
        if not values:
            return

        versions_count = int(values["last_versions_count"])
        force_to_remove = values["force_delete_publish_folder"]

        _val1 = "OFF"
        if force_to_remove:
            _val1 = "ON"

        _val3 = "s"
        if versions_count == 1:
            _val3 = ""

        self.log.debug(
            ("Process started. Force to delete publish folder is set to [{0}]"
             " and will keep {1} latest version{2}.").format(
                 _val1, versions_count, _val3))

        self.dbcon.install()

        project = None
        avalon_asset_names = []
        asset_versions_by_parent_id = collections.defaultdict(list)
        subset_names_by_asset_name = collections.defaultdict(list)

        ftrack_assets_by_name = {}
        for entity in entities:
            ftrack_asset = entity["asset"]

            parent_ent = ftrack_asset["parent"]
            parent_ftrack_id = parent_ent["id"]
            parent_name = parent_ent["name"]

            if parent_name not in avalon_asset_names:
                avalon_asset_names.append(parent_name)

            # Group asset versions by parent entity
            asset_versions_by_parent_id[parent_ftrack_id].append(entity)

            # Get project
            if project is None:
                project = parent_ent["project"]

            # Collect subset names per asset
            subset_name = ftrack_asset["name"]
            subset_names_by_asset_name[parent_name].append(subset_name)

            if subset_name not in ftrack_assets_by_name:
                ftrack_assets_by_name[subset_name] = ftrack_asset

        # Set Mongo collection
        project_name = project["full_name"]
        anatomy = Anatomy(project_name)
        self.dbcon.Session["AVALON_PROJECT"] = project_name
        self.log.debug("Project is set to {}".format(project_name))

        # Get Assets from avalon database
        assets = list(
            self.dbcon.find({
                "type": "asset",
                "name": {
                    "$in": avalon_asset_names
                }
            }))
        asset_id_to_name_map = {
            asset["_id"]: asset["name"]
            for asset in assets
        }
        asset_ids = list(asset_id_to_name_map.keys())

        self.log.debug("Collected assets ({})".format(len(asset_ids)))

        # Get Subsets
        subsets = list(
            self.dbcon.find({
                "type": "subset",
                "parent": {
                    "$in": asset_ids
                }
            }))
        subsets_by_id = {}
        subset_ids = []
        for subset in subsets:
            asset_id = subset["parent"]
            asset_name = asset_id_to_name_map[asset_id]
            available_subsets = subset_names_by_asset_name[asset_name]

            if subset["name"] not in available_subsets:
                continue

            subset_ids.append(subset["_id"])
            subsets_by_id[subset["_id"]] = subset

        self.log.debug("Collected subsets ({})".format(len(subset_ids)))

        # Get Versions
        versions = list(
            self.dbcon.find({
                "type": "version",
                "parent": {
                    "$in": subset_ids
                }
            }))

        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        def sort_func(ent):
            return int(ent["name"])

        all_last_versions = []
        for parent_id, _versions in versions_by_parent.items():
            for idx, version in enumerate(
                    sorted(_versions, key=sort_func, reverse=True)):
                if idx >= versions_count:
                    break
                all_last_versions.append(version)

        self.log.debug("Collected versions ({})".format(len(versions)))

        # Filter latest versions
        for version in all_last_versions:
            versions.remove(version)

        # Update versions_by_parent without filtered versions
        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        # Filter already deleted versions
        versions_to_pop = []
        for version in versions:
            version_tags = version["data"].get("tags")
            if version_tags and "deleted" in version_tags:
                versions_to_pop.append(version)

        for version in versions_to_pop:
            subset = subsets_by_id[version["parent"]]
            asset_id = subset["parent"]
            asset_name = asset_id_to_name_map[asset_id]
            msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format(
                asset_name, subset["name"], version["name"])
            self.log.warning(
                ("Skipping version. Already tagged as `deleted`. < {} >"
                 ).format(msg))
            versions.remove(version)

        version_ids = [ent["_id"] for ent in versions]

        self.log.debug("Filtered versions to delete ({})".format(
            len(version_ids)))

        if not version_ids:
            msg = "Skipping processing. Nothing to delete."
            self.log.debug(msg)
            return {"success": True, "message": msg}

        repres = list(
            self.dbcon.find({
                "type": "representation",
                "parent": {
                    "$in": version_ids
                }
            }))

        self.log.debug("Collected representations to remove ({})".format(
            len(repres)))

        dir_paths = {}
        file_paths_by_dir = collections.defaultdict(list)
        for repre in repres:
            file_path, seq_path = self.path_from_represenation(repre, anatomy)
            if file_path is None:
                self.log.warning(
                    ("Could not format path for represenation \"{}\"").format(
                        str(repre)))
                continue

            dir_path = os.path.dirname(file_path)
            dir_id = None
            for _dir_id, _dir_path in dir_paths.items():
                if _dir_path == dir_path:
                    dir_id = _dir_id
                    break

            if dir_id is None:
                dir_id = uuid.uuid4()
                dir_paths[dir_id] = dir_path

            file_paths_by_dir[dir_id].append([file_path, seq_path])

        dir_ids_to_pop = []
        for dir_id, dir_path in dir_paths.items():
            if os.path.exists(dir_path):
                continue

            dir_ids_to_pop.append(dir_id)

        # Pop dirs from both dictionaries
        for dir_id in dir_ids_to_pop:
            dir_paths.pop(dir_id)
            paths = file_paths_by_dir.pop(dir_id)
            # TODO report of missing directories?
            paths_msg = ", ".join(
                ["'{}'".format(path[0].replace("\\", "/")) for path in paths])
            self.log.warning(
                ("Folder does not exist. Deleting it's files skipped: {}"
                 ).format(paths_msg))

        if force_to_remove:
            self.delete_whole_dir_paths(dir_paths.values())
        else:
            self.delete_only_repre_files(dir_paths, file_paths_by_dir)

        mongo_changes_bulk = []
        for version in versions:
            orig_version_tags = version["data"].get("tags") or []
            version_tags = [tag for tag in orig_version_tags]
            if "deleted" not in version_tags:
                version_tags.append("deleted")

            if version_tags == orig_version_tags:
                continue

            update_query = {"_id": version["_id"]}
            update_data = {"$set": {"data.tags": version_tags}}
            mongo_changes_bulk.append(UpdateOne(update_query, update_data))

        if mongo_changes_bulk:
            self.dbcon.bulk_write(mongo_changes_bulk)

        self.dbcon.uninstall()

        # Set attribute `is_published` to `False` on ftrack AssetVersions
        for subset_id, _versions in versions_by_parent.items():
            subset_name = None
            for subset in subsets:
                if subset["_id"] == subset_id:
                    subset_name = subset["name"]
                    break

            if subset_name is None:
                self.log.warning("Subset with ID `{}` was not found.".format(
                    str(subset_id)))
                continue

            ftrack_asset = ftrack_assets_by_name.get(subset_name)
            if not ftrack_asset:
                self.log.warning(("Could not find Ftrack asset with name `{}`"
                                  ).format(subset_name))
                continue

            version_numbers = [int(ver["name"]) for ver in _versions]
            for version in ftrack_asset["versions"]:
                if int(version["version"]) in version_numbers:
                    version["is_published"] = False

        try:
            session.commit()

        except Exception:
            msg = ("Could not set `is_published` attribute to `False`"
                   " for selected AssetVersions.")
            self.log.warning(msg, exc_info=True)

            return {"success": False, "message": msg}

        return True

    def delete_whole_dir_paths(self, dir_paths):
        for dir_path in dir_paths:
            # Delete all files and fodlers in dir path
            for root, dirs, files in os.walk(dir_path, topdown=False):
                for name in files:
                    os.remove(os.path.join(root, name))

                for name in dirs:
                    os.rmdir(os.path.join(root, name))

            # Delete even the folder and it's parents folders if they are empty
            while True:
                if not os.path.exists(dir_path):
                    dir_path = os.path.dirname(dir_path)
                    continue

                if len(os.listdir(dir_path)) != 0:
                    break

                os.rmdir(os.path.join(dir_path))

    def delete_only_repre_files(self, dir_paths, file_paths):
        for dir_id, dir_path in dir_paths.items():
            dir_files = os.listdir(dir_path)
            collections, remainders = clique.assemble(dir_files)
            for file_path, seq_path in file_paths[dir_id]:
                file_path_base = os.path.split(file_path)[1]
                # Just remove file if `frame` key was not in context or
                # filled path is in remainders (single file sequence)
                if not seq_path or file_path_base in remainders:
                    if not os.path.exists(file_path):
                        self.log.warning(
                            "File was not found: {}".format(file_path))
                        continue
                    os.remove(file_path)
                    self.log.debug("Removed file: {}".format(file_path))
                    remainders.remove(file_path_base)
                    continue

                seq_path_base = os.path.split(seq_path)[1]
                head, tail = seq_path_base.split(self.sequence_splitter)

                final_col = None
                for collection in collections:
                    if head != collection.head or tail != collection.tail:
                        continue
                    final_col = collection
                    break

                if final_col is not None:
                    # Fill full path to head
                    final_col.head = os.path.join(dir_path, final_col.head)
                    for _file_path in final_col:
                        if os.path.exists(_file_path):
                            os.remove(_file_path)
                    _seq_path = final_col.format("{head}{padding}{tail}")
                    self.log.debug("Removed files: {}".format(_seq_path))
                    collections.remove(final_col)

                elif os.path.exists(file_path):
                    os.remove(file_path)
                    self.log.debug("Removed file: {}".format(file_path))

                else:
                    self.log.warning(
                        "File was not found: {}".format(file_path))

        # Delete as much as possible parent folders
        for dir_path in dir_paths.values():
            while True:
                if not os.path.exists(dir_path):
                    dir_path = os.path.dirname(dir_path)
                    continue

                if len(os.listdir(dir_path)) != 0:
                    break

                self.log.debug("Removed folder: {}".format(dir_path))
                os.rmdir(dir_path)

    def path_from_represenation(self, representation, anatomy):
        try:
            template = representation["data"]["template"]

        except KeyError:
            return (None, None)

        sequence_path = None
        try:
            context = representation["context"]
            context["root"] = anatomy.roots
            path = avalon.pipeline.format_template_with_optional_keys(
                context, template)
            if "frame" in context:
                context["frame"] = self.sequence_splitter
                sequence_path = os.path.normpath(
                    avalon.pipeline.format_template_with_optional_keys(
                        context, template))

        except KeyError:
            # Template references unavailable data
            return (None, None)

        return (os.path.normpath(path), sequence_path)
class DeleteAssetSubset(BaseAction):
    '''Edit meta data action.'''

    #: Action identifier.
    identifier = "delete.asset.subset"
    #: Action label.
    label = "Delete Asset/Subsets"
    #: Action description.
    description = "Removes from Avalon with all childs and asset from Ftrack"
    icon = statics_icon("ftrack", "action_icons", "DeleteAsset.svg")
    #: roles that are allowed to register this action
    role_list = ["Pypeclub", "Administrator", "Project Manager"]
    #: Db connection
    dbcon = DbConnector()

    splitter = {"type": "label", "value": "---"}
    action_data_by_id = {}
    asset_prefix = "asset:"
    subset_prefix = "subset:"

    def discover(self, session, entities, event):
        """ Validation """
        task_ids = []
        for ent_info in event["data"]["selection"]:
            entType = ent_info.get("entityType", "")
            if entType == "task":
                task_ids.append(ent_info["entityId"])

        for entity in entities:
            ftrack_id = entity["id"]
            if ftrack_id not in task_ids:
                continue
            if entity.entity_type.lower() != "task":
                return True
        return False

    def _launch(self, event):
        try:
            entities = self._translate_event(event)
            if "values" not in event["data"]:
                self.dbcon.install()
                return self._interface(self.session, entities, event)

            confirmation = self.confirm_delete(entities, event)
            if confirmation:
                return confirmation

            self.dbcon.install()
            response = self.launch(self.session, entities, event)
        finally:
            self.dbcon.uninstall()

        return self._handle_result(response)

    def interface(self, session, entities, event):
        self.show_message(event, "Preparing data...", True)
        items = []
        title = "Choose items to delete"

        # Filter selection and get ftrack ids
        selection = event["data"].get("selection") or []
        ftrack_ids = []
        project_in_selection = False
        for entity in selection:
            entity_type = (entity.get("entityType") or "").lower()
            if entity_type != "task":
                if entity_type == "show":
                    project_in_selection = True
                continue

            ftrack_id = entity.get("entityId")
            if not ftrack_id:
                continue

            ftrack_ids.append(ftrack_id)

        if project_in_selection:
            msg = "It is not possible to use this action on project entity."
            self.show_message(event, msg, True)

        # Filter event even more (skip task entities)
        # - task entities are not relevant for avalon
        entity_mapping = {}
        for entity in entities:
            ftrack_id = entity["id"]
            if ftrack_id not in ftrack_ids:
                continue

            if entity.entity_type.lower() == "task":
                ftrack_ids.remove(ftrack_id)

            entity_mapping[ftrack_id] = entity

        if not ftrack_ids:
            # It is bug if this happens!
            return {
                "success": False,
                "message": "Invalid selection for this action (Bug)"
            }

        if entities[0].entity_type.lower() == "project":
            project = entities[0]
        else:
            project = entities[0]["project"]

        project_name = project["full_name"]
        self.dbcon.Session["AVALON_PROJECT"] = project_name

        selected_av_entities = list(
            self.dbcon.find({
                "type": "asset",
                "data.ftrackId": {
                    "$in": ftrack_ids
                }
            }))
        found_without_ftrack_id = {}
        if len(selected_av_entities) != len(ftrack_ids):
            found_ftrack_ids = [
                ent["data"]["ftrackId"] for ent in selected_av_entities
            ]
            for ftrack_id, entity in entity_mapping.items():
                if ftrack_id in found_ftrack_ids:
                    continue

                av_ents_by_name = list(
                    self.dbcon.find({
                        "type": "asset",
                        "name": entity["name"]
                    }))
                if not av_ents_by_name:
                    continue

                ent_path_items = [ent["name"] for ent in entity["link"]]
                parents = ent_path_items[1:len(ent_path_items) - 1:]
                # TODO we should say to user that
                # few of them are missing in avalon
                for av_ent in av_ents_by_name:
                    if av_ent["data"]["parents"] != parents:
                        continue

                    # TODO we should say to user that found entity
                    # with same name does not match same ftrack id?
                    if "ftrackId" not in av_ent["data"]:
                        selected_av_entities.append(av_ent)
                        found_without_ftrack_id[str(av_ent["_id"])] = ftrack_id
                        break

        if not selected_av_entities:
            return {
                "success": False,
                "message": "Didn't found entities in avalon"
            }

        # Remove cached action older than 2 minutes
        old_action_ids = []
        for id, data in self.action_data_by_id.items():
            created_at = data.get("created_at")
            if not created_at:
                old_action_ids.append(id)
                continue
            cur_time = datetime.now()
            existing_in_sec = (created_at - cur_time).total_seconds()
            if existing_in_sec > 60 * 2:
                old_action_ids.append(id)

        for id in old_action_ids:
            self.action_data_by_id.pop(id, None)

        # Store data for action id
        action_id = str(uuid.uuid1())
        self.action_data_by_id[action_id] = {
            "attempt": 1,
            "created_at": datetime.now(),
            "project_name": project_name,
            "subset_ids_by_name": {},
            "subset_ids_by_parent": {},
            "without_ftrack_id": found_without_ftrack_id
        }

        id_item = {"type": "hidden", "name": "action_id", "value": action_id}

        items.append(id_item)
        asset_ids = [ent["_id"] for ent in selected_av_entities]
        subsets_for_selection = self.dbcon.find({
            "type": "subset",
            "parent": {
                "$in": asset_ids
            }
        })

        asset_ending = ""
        if len(selected_av_entities) > 1:
            asset_ending = "s"

        asset_title = {
            "type": "label",
            "value": "# Delete asset{}:".format(asset_ending)
        }
        asset_note = {
            "type":
            "label",
            "value": ("<p><i>NOTE: Action will delete checked entities"
                      " in Ftrack and Avalon with all children entities and"
                      " published content.</i></p>")
        }

        items.append(asset_title)
        items.append(asset_note)

        asset_items = collections.defaultdict(list)
        for asset in selected_av_entities:
            ent_path_items = [project_name]
            ent_path_items.extend(asset.get("data", {}).get("parents") or [])
            ent_path_to_parent = "/".join(ent_path_items) + "/"
            asset_items[ent_path_to_parent].append(asset)

        for asset_parent_path, assets in sorted(asset_items.items()):
            items.append({
                "type": "label",
                "value": "## <b>- {}</b>".format(asset_parent_path)
            })
            for asset in assets:
                items.append({
                    "label":
                    asset["name"],
                    "name":
                    "{}{}".format(self.asset_prefix, str(asset["_id"])),
                    "type":
                    'boolean',
                    "value":
                    False
                })

        subset_ids_by_name = collections.defaultdict(list)
        subset_ids_by_parent = collections.defaultdict(list)
        for subset in subsets_for_selection:
            subset_id = subset["_id"]
            name = subset["name"]
            parent_id = subset["parent"]
            subset_ids_by_name[name].append(subset_id)
            subset_ids_by_parent[parent_id].append(subset_id)

        if not subset_ids_by_name:
            return {"items": items, "title": title}

        subset_ending = ""
        if len(subset_ids_by_name.keys()) > 1:
            subset_ending = "s"

        subset_title = {
            "type": "label",
            "value": "# Subset{} to delete:".format(subset_ending)
        }
        subset_note = {
            "type":
            "label",
            "value": ("<p><i>WARNING: Subset{} will be removed"
                      " for all <b>selected</b> entities.</i></p>"
                      ).format(subset_ending)
        }

        items.append(self.splitter)
        items.append(subset_title)
        items.append(subset_note)

        for name in subset_ids_by_name:
            items.append({
                "label": "<b>{}</b>".format(name),
                "name": "{}{}".format(self.subset_prefix, name),
                "type": "boolean",
                "value": False
            })

        self.action_data_by_id[action_id]["subset_ids_by_parent"] = (
            subset_ids_by_parent)
        self.action_data_by_id[action_id]["subset_ids_by_name"] = (
            subset_ids_by_name)

        return {"items": items, "title": title}

    def confirm_delete(self, entities, event):
        values = event["data"]["values"]
        action_id = values.get("action_id")
        spec_data = self.action_data_by_id.get(action_id)
        if not spec_data:
            # it is a bug if this happens!
            return {
                "success": False,
                "message": "Something bad has happened. Please try again."
            }

        # Process Delete confirmation
        delete_key = values.get("delete_key")
        if delete_key:
            delete_key = delete_key.lower().strip()
            # Go to launch part if user entered `delete`
            if delete_key == "delete":
                return
            # Skip whole process if user didn't enter any text
            elif delete_key == "":
                self.action_data_by_id.pop(action_id, None)
                return {
                    "success": True,
                    "message": "Deleting cancelled (delete entry was empty)"
                }
            # Get data to show again
            to_delete = spec_data["to_delete"]

        else:
            to_delete = collections.defaultdict(list)
            for key, value in values.items():
                if not value:
                    continue
                if key.startswith(self.asset_prefix):
                    _key = key.replace(self.asset_prefix, "")
                    to_delete["assets"].append(_key)

                elif key.startswith(self.subset_prefix):
                    _key = key.replace(self.subset_prefix, "")
                    to_delete["subsets"].append(_key)

            self.action_data_by_id[action_id]["to_delete"] = to_delete

        asset_to_delete = len(to_delete.get("assets") or []) > 0
        subset_to_delete = len(to_delete.get("subsets") or []) > 0

        if not asset_to_delete and not subset_to_delete:
            self.action_data_by_id.pop(action_id, None)
            return {
                "success": True,
                "message": "Nothing was selected to delete"
            }

        attempt = spec_data["attempt"]
        if attempt > 3:
            self.action_data_by_id.pop(action_id, None)
            return {
                "success": False,
                "message": "You didn't enter \"DELETE\" properly 3 times!"
            }

        self.action_data_by_id[action_id]["attempt"] += 1

        title = "Confirmation of deleting"

        if asset_to_delete:
            asset_len = len(to_delete["assets"])
            asset_ending = ""
            if asset_len > 1:
                asset_ending = "s"
            title += " {} Asset{}".format(asset_len, asset_ending)
            if subset_to_delete:
                title += " and"

        if subset_to_delete:
            sub_len = len(to_delete["subsets"])
            type_ending = ""
            sub_ending = ""
            if sub_len == 1:
                subset_ids_by_name = spec_data["subset_ids_by_name"]
                if len(subset_ids_by_name[to_delete["subsets"][0]]) > 1:
                    sub_ending = "s"

            elif sub_len > 1:
                type_ending = "s"
                sub_ending = "s"

            title += " {} type{} of subset{}".format(sub_len, type_ending,
                                                     sub_ending)

        items = []

        id_item = {"type": "hidden", "name": "action_id", "value": action_id}
        delete_label = {
            'type': 'label',
            'value': '# Please enter "DELETE" to confirm #'
        }
        delete_item = {
            "name": "delete_key",
            "type": "text",
            "value": "",
            "empty_text": "Type Delete here..."
        }

        items.append(id_item)
        items.append(delete_label)
        items.append(delete_item)

        return {"items": items, "title": title}

    def launch(self, session, entities, event):
        self.show_message(event, "Processing...", True)
        values = event["data"]["values"]
        action_id = values.get("action_id")
        spec_data = self.action_data_by_id.get(action_id)
        if not spec_data:
            # it is a bug if this happens!
            return {
                "success": False,
                "message": "Something bad has happened. Please try again."
            }

        report_messages = collections.defaultdict(list)

        project_name = spec_data["project_name"]
        to_delete = spec_data["to_delete"]
        self.dbcon.Session["AVALON_PROJECT"] = project_name

        assets_to_delete = to_delete.get("assets") or []
        subsets_to_delete = to_delete.get("subsets") or []

        # Convert asset ids to ObjectId obj
        assets_to_delete = [ObjectId(id) for id in assets_to_delete if id]

        subset_ids_by_parent = spec_data["subset_ids_by_parent"]
        subset_ids_by_name = spec_data["subset_ids_by_name"]

        subset_ids_to_archive = []
        asset_ids_to_archive = []
        ftrack_ids_to_delete = []
        if len(assets_to_delete) > 0:
            map_av_ftrack_id = spec_data["without_ftrack_id"]
            # Prepare data when deleting whole avalon asset
            avalon_assets = self.dbcon.find({"type": "asset"})
            avalon_assets_by_parent = collections.defaultdict(list)
            for asset in avalon_assets:
                asset_id = asset["_id"]
                parent_id = asset["data"]["visualParent"]
                avalon_assets_by_parent[parent_id].append(asset)
                if asset_id in assets_to_delete:
                    ftrack_id = map_av_ftrack_id.get(str(asset_id))
                    if not ftrack_id:
                        ftrack_id = asset["data"].get("ftrackId")

                    if not ftrack_id:
                        continue
                    ftrack_ids_to_delete.append(ftrack_id)

            children_queue = Queue()
            for mongo_id in assets_to_delete:
                children_queue.put(mongo_id)

            while not children_queue.empty():
                mongo_id = children_queue.get()
                if mongo_id in asset_ids_to_archive:
                    continue

                asset_ids_to_archive.append(mongo_id)
                for subset_id in subset_ids_by_parent.get(mongo_id, []):
                    if subset_id not in subset_ids_to_archive:
                        subset_ids_to_archive.append(subset_id)

                children = avalon_assets_by_parent.get(mongo_id)
                if not children:
                    continue

                for child in children:
                    child_id = child["_id"]
                    if child_id not in asset_ids_to_archive:
                        children_queue.put(child_id)

        # Prepare names of assets in ftrack and ids of subsets in mongo
        asset_names_to_delete = []
        if len(subsets_to_delete) > 0:
            for name in subsets_to_delete:
                asset_names_to_delete.append(name)
                for subset_id in subset_ids_by_name[name]:
                    if subset_id in subset_ids_to_archive:
                        continue
                    subset_ids_to_archive.append(subset_id)

        # Get ftrack ids of entities where will be delete only asset
        not_deleted_entities_id = []
        ftrack_id_name_map = {}
        if asset_names_to_delete:
            for entity in entities:
                ftrack_id = entity["id"]
                ftrack_id_name_map[ftrack_id] = entity["name"]
                if ftrack_id not in ftrack_ids_to_delete:
                    not_deleted_entities_id.append(ftrack_id)

        mongo_proc_txt = "MongoProcessing: "
        ftrack_proc_txt = "Ftrack processing: "
        if asset_ids_to_archive:
            self.log.debug("{}Archivation of assets <{}>".format(
                mongo_proc_txt,
                ", ".join([str(id) for id in asset_ids_to_archive])))
            self.dbcon.update_many(
                {
                    "_id": {
                        "$in": asset_ids_to_archive
                    },
                    "type": "asset"
                }, {"$set": {
                    "type": "archived_asset"
                }})

        if subset_ids_to_archive:
            self.log.debug("{}Archivation of subsets <{}>".format(
                mongo_proc_txt,
                ", ".join([str(id) for id in subset_ids_to_archive])))
            self.dbcon.update_many(
                {
                    "_id": {
                        "$in": subset_ids_to_archive
                    },
                    "type": "subset"
                }, {"$set": {
                    "type": "archived_subset"
                }})

        if ftrack_ids_to_delete:
            self.log.debug("{}Deleting Ftrack Entities <{}>".format(
                ftrack_proc_txt, ", ".join(ftrack_ids_to_delete)))

            ftrack_ents_to_delete = (self._filter_entities_to_delete(
                ftrack_ids_to_delete, session))
            for entity in ftrack_ents_to_delete:
                session.delete(entity)
                try:
                    session.commit()
                except Exception:
                    ent_path = "/".join(
                        [ent["name"] for ent in entity["link"]])
                    msg = "Failed to delete entity"
                    report_messages[msg].append(ent_path)
                    session.rollback()
                    self.log.warning("{} <{}>".format(msg, ent_path),
                                     exc_info=True)

        if not_deleted_entities_id:
            joined_not_deleted = ", ".join([
                "\"{}\"".format(ftrack_id)
                for ftrack_id in not_deleted_entities_id
            ])
            joined_asset_names = ", ".join(
                ["\"{}\"".format(name) for name in asset_names_to_delete])
            # Find assets of selected entities with names of checked subsets
            assets = session.query(
                ("select id from Asset where"
                 " context_id in ({}) and name in ({})").format(
                     joined_not_deleted, joined_asset_names)).all()

            self.log.debug("{}Deleting Ftrack Assets <{}>".format(
                ftrack_proc_txt, ", ".join([asset["id"] for asset in assets])))
            for asset in assets:
                session.delete(asset)
                try:
                    session.commit()
                except Exception:
                    session.rollback()
                    msg = "Failed to delete asset"
                    report_messages[msg].append(asset["id"])
                    self.log.warning("Asset: {} <{}>".format(
                        asset["name"], asset["id"]),
                                     exc_info=True)

        return self.report_handle(report_messages, project_name, event)

    def _filter_entities_to_delete(self, ftrack_ids_to_delete, session):
        """Filter children entities to avoid CircularDependencyError."""
        joined_ids_to_delete = ", ".join(
            ["\"{}\"".format(id) for id in ftrack_ids_to_delete])
        to_delete_entities = session.query(
            "select id, link from TypedContext where id in ({})".format(
                joined_ids_to_delete)).all()
        filtered = to_delete_entities[:]
        while True:
            changed = False
            _filtered = filtered[:]
            for entity in filtered:
                entity_id = entity["id"]

                for _entity in tuple(_filtered):
                    if entity_id == _entity["id"]:
                        continue

                    for _link in _entity["link"]:
                        if entity_id == _link["id"] and _entity in _filtered:
                            _filtered.remove(_entity)
                            changed = True
                            break

            filtered = _filtered

            if not changed:
                break

        return filtered

    def report_handle(self, report_messages, project_name, event):
        if not report_messages:
            return {"success": True, "message": "Deletion was successful!"}

        title = "Delete report ({}):".format(project_name)
        items = []
        items.append({
            "type": "label",
            "value": "# Deleting was not completely successful"
        })
        items.append({
            "type": "label",
            "value": "<p><i>Check logs for more information</i></p>"
        })
        for msg, _items in report_messages.items():
            if not _items or not msg:
                continue

            items.append({"type": "label", "value": "# {}".format(msg)})

            if isinstance(_items, str):
                _items = [_items]
            items.append({
                "type": "label",
                "value": '<p>{}</p>'.format("<br>".join(_items))
            })
            items.append(self.splitter)

        self.show_interface(items, title, event)

        return {
            "success": False,
            "message": "Deleting finished. Read report messages."
        }
Exemple #19
0
class SyncClocifyLocal(BaseAction):
    '''Synchronise project names and task types.'''

    #: Action identifier.
    identifier = 'clockify.sync.local'
    #: Action label.
    label = 'Sync To Clockify (local)'
    #: Action description.
    description = 'Synchronise data to Clockify workspace'
    #: roles that are allowed to register this action
    role_list = ["Pypeclub", "Administrator", "project Manager"]
    #: icon
    icon = statics_icon("app_icons", "clockify-white.png")

    def __init__(self, *args, **kwargs):
        super(SyncClocifyLocal, self).__init__(*args, **kwargs)
        #: CLockifyApi
        self.clockapi = ClockifyAPI()

    def discover(self, session, entities, event):
        if (len(entities) == 1
                and entities[0].entity_type.lower() == "project"):
            return True
        return False

    def launch(self, session, entities, event):
        self.clockapi.set_api()
        if self.clockapi.workspace_id is None:
            return {
                "success": False,
                "message": "Clockify Workspace or API key are not set!"
            }

        if self.clockapi.validate_workspace_perm() is False:
            return {
                "success": False,
                "message": "Missing permissions for this action!"
            }

        # JOB SETTINGS
        userId = event['source']['user']['id']
        user = session.query('User where id is ' + userId).one()

        job = session.create(
            'Job', {
                'user': user,
                'status': 'running',
                'data': json.dumps({'description': 'Sync Ftrack to Clockify'})
            })
        session.commit()

        project_entity = entities[0]
        if project_entity.entity_type.lower() != "project":
            project_entity = self.get_project_from_entity(project_entity)

        project_name = project_entity["full_name"]
        self.log.info(
            "Synchronization of project \"{}\" to clockify begins.".format(
                project_name))
        task_types = (
            project_entity["project_schema"]["_task_type_schema"]["types"])
        task_type_names = [task_type["name"] for task_type in task_types]
        try:
            clockify_projects = self.clockapi.get_projects()
            if project_name not in clockify_projects:
                response = self.clockapi.add_project(project_name)
                if "id" not in response:
                    self.log.warning(
                        "Project \"{}\" can't be created. Response: {}".format(
                            project_name, response))
                    return {
                        "success":
                        False,
                        "message": ("Can't create clockify project \"{}\"."
                                    " Unexpected error.").format(project_name)
                    }

            clockify_workspace_tags = self.clockapi.get_tags()
            for task_type_name in task_type_names:
                if task_type_name in clockify_workspace_tags:
                    self.log.debug(
                        "Task \"{}\" already exist".format(task_type_name))
                    continue

                response = self.clockapi.add_tag(task_type_name)
                if "id" not in response:
                    self.log.warning(
                        "Task \"{}\" can't be created. Response: {}".format(
                            task_type_name, response))

            job["status"] = "done"

        except Exception:
            pass

        finally:
            if job["status"] != "done":
                job["status"] = "failed"
            session.commit()

        return True
Exemple #20
0
class CleanHierarchicalAttrsAction(BaseAction):
    identifier = "clean.hierarchical.attr"
    label = "Pype Admin"
    variant = "- Clean hierarchical custom attributes"
    description = "Unset empty hierarchical attribute values."
    role_list = ["Pypeclub", "Administrator", "Project Manager"]
    icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")

    all_project_entities_query = (
        "select id, name, parent_id, link"
        " from TypedContext where project_id is \"{}\"")
    cust_attr_query = (
        "select value, entity_id from CustomAttributeValue "
        "where entity_id in ({}) and configuration_id is \"{}\"")

    def discover(self, session, entities, event):
        """Show only on project entity."""
        if len(entities) == 1 and entities[0].entity_type.lower() == "project":
            return True
        return False

    def launch(self, session, entities, event):
        project = entities[0]

        user_message = "This may take some time"
        self.show_message(event, user_message, result=True)
        self.log.debug("Preparing entities for cleanup.")

        all_entities = session.query(
            self.all_project_entities_query.format(project["id"])).all()

        all_entities_ids = [
            "\"{}\"".format(entity["id"]) for entity in all_entities
            if entity.entity_type.lower() != "task"
        ]
        self.log.debug("Collected {} entities to process.".format(
            len(all_entities_ids)))
        entity_ids_joined = ", ".join(all_entities_ids)

        attrs, hier_attrs = get_avalon_attr(session)

        for attr in hier_attrs:
            configuration_key = attr["key"]
            self.log.debug(
                "Looking for cleanup of custom attribute \"{}\"".format(
                    configuration_key))
            configuration_id = attr["id"]
            call_expr = [{
                "action":
                "query",
                "expression":
                self.cust_attr_query.format(entity_ids_joined,
                                            configuration_id)
            }]

            [values] = self.session.call(call_expr)

            data = {}
            for item in values["data"]:
                value = item["value"]
                if value is None:
                    data[item["entity_id"]] = value

            if not data:
                self.log.debug(
                    "Nothing to clean for \"{}\".".format(configuration_key))
                continue

            self.log.debug("Cleaning up {} values for \"{}\".".format(
                len(data), configuration_key))
            for entity_id, value in data.items():
                entity_key = collections.OrderedDict({
                    "configuration_id": configuration_id,
                    "entity_id": entity_id
                })
                session.recorded_operations.push(
                    ftrack_api.operation.DeleteEntityOperation(
                        "CustomAttributeValue", entity_key))
            session.commit()

        return True
Exemple #21
0
class DJVViewAction(BaseAction):
    """Launch DJVView action."""
    identifier = "djvview-launch-action"
    label = "DJV View"
    description = "DJV View Launcher"
    icon = statics_icon("app_icons", "djvView.png")

    type = 'Application'

    allowed_types = [
        "cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg",
        "mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut",
        "1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf",
        "sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img"
    ]

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        self.djv_path = self.find_djv_path()

    def preregister(self):
        if self.djv_path is None:
            return ('DJV View is not installed'
                    ' or paths in presets are not set correctly')
        return True

    def discover(self, session, entities, event):
        """Return available actions based on *event*. """
        selection = event["data"].get("selection", [])
        if len(selection) != 1:
            return False

        entityType = selection[0].get("entityType", None)
        if entityType in ["assetversion", "task"]:
            return True
        return False

    def find_djv_path(self):
        for path in (os.environ.get("DJV_PATH") or "").split(os.pathsep):
            if os.path.exists(path):
                return path

    def interface(self, session, entities, event):
        if event['data'].get('values', {}):
            return

        entity = entities[0]
        versions = []

        entity_type = entity.entity_type.lower()
        if entity_type == "assetversion":
            if (entity['components'][0]['file_type'][1:]
                    in self.allowed_types):
                versions.append(entity)
        else:
            master_entity = entity
            if entity_type == "task":
                master_entity = entity['parent']

            for asset in master_entity['assets']:
                for version in asset['versions']:
                    # Get only AssetVersion of selected task
                    if (entity_type == "task"
                            and version['task']['id'] != entity['id']):
                        continue
                    # Get only components with allowed type
                    filetype = version['components'][0]['file_type']
                    if filetype[1:] in self.allowed_types:
                        versions.append(version)

        if len(versions) < 1:
            return {
                'success': False,
                'message': 'There are no Asset Versions to open.'
            }

        items = []
        base_label = "v{0} - {1} - {2}"
        default_component = None
        last_available = None
        select_value = None
        for version in versions:
            for component in version['components']:
                label = base_label.format(
                    str(version['version']).zfill(3),
                    version['asset']['type']['name'], component['name'])

                try:
                    location = component['component_locations'][0]['location']
                    file_path = location.get_filesystem_path(component)
                except Exception:
                    file_path = component['component_locations'][0][
                        'resource_identifier']

                if os.path.isdir(os.path.dirname(file_path)):
                    last_available = file_path
                    if component['name'] == default_component:
                        select_value = file_path
                    items.append({'label': label, 'value': file_path})

        if len(items) == 0:
            return {
                'success': False,
                'message':
                ('There are no Asset Versions with accessible path.')
            }

        item = {
            'label': 'Items to view',
            'type': 'enumerator',
            'name': 'path',
            'data': sorted(items, key=itemgetter('label'), reverse=True)
        }
        if select_value is not None:
            item['value'] = select_value
        else:
            item['value'] = last_available

        return {'items': [item]}

    def launch(self, session, entities, event):
        """Callback method for DJVView action."""

        # Launching application
        if "values" not in event["data"]:
            return
        filpath = event['data']['values']['path']

        cmd = [
            # DJV path
            os.path.normpath(self.djv_path),
            # PATH TO COMPONENT
            os.path.normpath(filpath)
        ]

        try:
            # Run DJV with these commands
            subprocess.Popen(cmd)
        except FileNotFoundError:
            return {
                'success':
                False,
                'message':
                'File "{}" was not found.'.format(os.path.basename(filpath))
            }

        return True
class MultipleNotes(BaseAction):
    '''Edit meta data action.'''

    #: Action identifier.
    identifier = 'multiple.notes'
    #: Action label.
    label = 'Multiple Notes'
    #: Action description.
    description = 'Add same note to multiple Asset Versions'
    icon = statics_icon("ftrack", "action_icons", "MultipleNotes.svg")

    def discover(self, session, entities, event):
        ''' Validation '''
        valid = True
        for entity in entities:
            if entity.entity_type.lower() != 'assetversion':
                valid = False
                break
        return valid

    def interface(self, session, entities, event):
        if not event['data'].get('values', {}):
            note_label = {
                'type': 'label',
                'value': '# Enter note: #'
            }

            note_value = {
                'name': 'note',
                'type': 'textarea'
            }

            category_label = {
                'type': 'label',
                'value': '## Category: ##'
            }

            category_data = []
            category_data.append({
                'label': '- None -',
                'value': 'none'
            })
            all_categories = session.query('NoteCategory').all()
            for cat in all_categories:
                category_data.append({
                    'label': cat['name'],
                    'value': cat['id']
                })
            category_value = {
                'type': 'enumerator',
                'name': 'category',
                'data': category_data,
                'value': 'none'
            }

            splitter = {
                'type': 'label',
                'value': '{}'.format(200*"-")
            }

            items = []
            items.append(note_label)
            items.append(note_value)
            items.append(splitter)
            items.append(category_label)
            items.append(category_value)
            return items

    def launch(self, session, entities, event):
        if 'values' not in event['data']:
            return

        values = event['data']['values']
        if len(values) <= 0 or 'note' not in values:
            return False
        # Get Note text
        note_value = values['note']
        if note_value.lower().strip() == '':
            return False
        # Get User
        user = session.query(
            'User where username is "{}"'.format(session.api_user)
        ).one()
        # Base note data
        note_data = {
            'content': note_value,
            'author': user
        }
        # Get category
        category_value = values['category']
        if category_value != 'none':
            category = session.query(
                'NoteCategory where id is "{}"'.format(category_value)
            ).one()
            note_data['category'] = category
        # Create notes for entities
        for entity in entities:
            new_note = session.create('Note', note_data)
            entity['notes'].append(new_note)
            session.commit()
        return True
class PrepareProject(BaseAction):
    '''Edit meta data action.'''

    #: Action identifier.
    identifier = 'prepare.project'
    #: Action label.
    label = 'Prepare Project'
    #: Action description.
    description = 'Set basic attributes on the project'
    #: roles that are allowed to register this action
    icon = statics_icon("ftrack", "action_icons", "PrepareProject.svg")

    settings_key = "prepare_project"

    # Key to store info about trigerring create folder structure
    create_project_structure_key = "create_folder_structure"
    item_splitter = {'type': 'label', 'value': '---'}

    def discover(self, session, entities, event):
        ''' Validation '''
        if (
            len(entities) != 1
            or entities[0].entity_type.lower() != "project"
        ):
            return False

        return self.valid_roles(session, entities, event)

    def interface(self, session, entities, event):
        if event['data'].get('values', {}):
            return

        # Inform user that this may take a while
        self.show_message(event, "Preparing data... Please wait", True)
        self.log.debug("Preparing data which will be shown")

        self.log.debug("Loading custom attributes")

        project_name = entities[0]["full_name"]

        project_defaults = (
            config.get_presets(project_name)
            .get("ftrack", {})
            .get("project_defaults", {})
        )

        anatomy = Anatomy(project_name)
        if not anatomy.roots:
            return {
                "success": False,
                "message": (
                    "Have issues with loading Roots for project \"{}\"."
                ).format(anatomy.project_name)
            }

        root_items = self.prepare_root_items(anatomy)

        ca_items, multiselect_enumerators = (
            self.prepare_custom_attribute_items(project_defaults)
        )

        self.log.debug("Heavy items are ready. Preparing last items group.")

        title = "Prepare Project"
        items = []

        # Add root items
        items.extend(root_items)
        items.append(self.item_splitter)

        # Ask if want to trigger Action Create Folder Structure
        items.append({
            "type": "label",
            "value": "<h3>Want to create basic Folder Structure?</h3>"
        })
        items.append({
            "name": self.create_project_structure_key,
            "type": "boolean",
            "value": False,
            "label": "Check if Yes"
        })

        items.append(self.item_splitter)
        items.append({
            "type": "label",
            "value": "<h3>Set basic Attributes:</h3>"
        })

        items.extend(ca_items)

        # This item will be last (before enumerators)
        # - sets value of auto synchronization
        auto_sync_name = "avalon_auto_sync"
        auto_sync_item = {
            "name": auto_sync_name,
            "type": "boolean",
            "value": project_defaults.get(auto_sync_name, False),
            "label": "AutoSync to Avalon"
        }
        # Add autosync attribute
        items.append(auto_sync_item)

        # Add enumerator items at the end
        for item in multiselect_enumerators:
            items.append(item)

        return {
            "items": items,
            "title": title
        }

    def prepare_root_items(self, anatomy):
        root_items = []
        self.log.debug("Root items preparation begins.")

        root_names = anatomy.root_names()
        roots = anatomy.roots

        root_items.append({
            "type": "label",
            "value": "<h3>Check your Project root settings</h3>"
        })
        root_items.append({
            "type": "label",
            "value": (
                "<p><i>NOTE: Roots are <b>crutial</b> for path filling"
                " (and creating folder structure).</i></p>"
            )
        })
        root_items.append({
            "type": "label",
            "value": (
                "<p><i>WARNING: Do not change roots on running project,"
                " that <b>will cause workflow issues</b>.</i></p>"
            )
        })

        default_roots = anatomy.roots
        while isinstance(default_roots, dict):
            key = tuple(default_roots.keys())[0]
            default_roots = default_roots[key]

        empty_text = "Enter root path here..."

        # Root names is None when anatomy templates contain "{root}"
        all_platforms = ["windows", "linux", "darwin"]
        if root_names is None:
            root_items.append(self.item_splitter)
            # find first possible key
            for platform in all_platforms:
                value = default_roots.raw_data.get(platform) or ""
                root_items.append({
                    "label": platform,
                    "name": "__root__{}".format(platform),
                    "type": "text",
                    "value": value,
                    "empty_text": empty_text
                })
            return root_items

        root_name_data = {}
        missing_roots = []
        for root_name in root_names:
            root_name_data[root_name] = {}
            if not isinstance(roots, dict):
                missing_roots.append(root_name)
                continue

            root_item = roots.get(root_name)
            if not root_item:
                missing_roots.append(root_name)
                continue

            for platform in all_platforms:
                root_name_data[root_name][platform] = (
                    root_item.raw_data.get(platform) or ""
                )

        if missing_roots:
            default_values = {}
            for platform in all_platforms:
                default_values[platform] = (
                    default_roots.raw_data.get(platform) or ""
                )

            for root_name in missing_roots:
                root_name_data[root_name] = default_values

        root_names = list(root_name_data.keys())
        root_items.append({
            "type": "hidden",
            "name": "__rootnames__",
            "value": json.dumps(root_names)
        })

        for root_name, values in root_name_data.items():
            root_items.append(self.item_splitter)
            root_items.append({
                "type": "label",
                "value": "Root: \"{}\"".format(root_name)
            })
            for platform, value in values.items():
                root_items.append({
                    "label": platform,
                    "name": "__root__{}{}".format(root_name, platform),
                    "type": "text",
                    "value": value,
                    "empty_text": empty_text
                })

        self.log.debug("Root items preparation ended.")
        return root_items

    def _attributes_to_set(self, project_defaults):
        attributes_to_set = {}

        cust_attrs, hier_cust_attrs = get_pype_attr(self.session, True)

        for attr in hier_cust_attrs:
            key = attr["key"]
            if key.startswith("avalon_"):
                continue
            attributes_to_set[key] = {
                "label": attr["label"],
                "object": attr,
                "default": project_defaults.get(key)
            }

        for attr in cust_attrs:
            if attr["entity_type"].lower() != "show":
                continue
            key = attr["key"]
            if key.startswith("avalon_"):
                continue
            attributes_to_set[key] = {
                "label": attr["label"],
                "object": attr,
                "default": project_defaults.get(key)
            }

        # Sort by label
        attributes_to_set = dict(sorted(
            attributes_to_set.items(),
            key=lambda x: x[1]["label"]
        ))
        return attributes_to_set

    def prepare_custom_attribute_items(self, project_defaults):
        items = []
        multiselect_enumerators = []
        attributes_to_set = self._attributes_to_set(project_defaults)

        self.log.debug("Preparing interface for keys: \"{}\"".format(
            str([key for key in attributes_to_set])
        ))

        for key, in_data in attributes_to_set.items():
            attr = in_data["object"]

            # initial item definition
            item = {
                "name": key,
                "label": in_data["label"]
            }

            # cust attr type - may have different visualization
            type_name = attr["type"]["name"].lower()
            easy_types = ["text", "boolean", "date", "number"]

            easy_type = False
            if type_name in easy_types:
                easy_type = True

            elif type_name == "enumerator":

                attr_config = json.loads(attr["config"])
                attr_config_data = json.loads(attr_config["data"])

                if attr_config["multiSelect"] is True:
                    multiselect_enumerators.append(self.item_splitter)
                    multiselect_enumerators.append({
                        "type": "label",
                        "value": in_data["label"]
                    })

                    default = in_data["default"]
                    names = []
                    for option in sorted(
                        attr_config_data, key=lambda x: x["menu"]
                    ):
                        name = option["value"]
                        new_name = "__{}__{}".format(key, name)
                        names.append(new_name)
                        item = {
                            "name": new_name,
                            "type": "boolean",
                            "label": "- {}".format(option["menu"])
                        }
                        if default:
                            if isinstance(default, (list, tuple)):
                                if name in default:
                                    item["value"] = True
                            else:
                                if name == default:
                                    item["value"] = True

                        multiselect_enumerators.append(item)

                    multiselect_enumerators.append({
                        "type": "hidden",
                        "name": "__hidden__{}".format(key),
                        "value": json.dumps(names)
                    })
                else:
                    easy_type = True
                    item["data"] = attr_config_data

            else:
                self.log.warning((
                    "Custom attribute \"{}\" has type \"{}\"."
                    " I don't know how to handle"
                ).format(key, type_name))
                items.append({
                    "type": "label",
                    "value": (
                        "!!! Can't handle Custom attritubte type \"{}\""
                        " (key: \"{}\")"
                    ).format(type_name, key)
                })

            if easy_type:
                item["type"] = type_name

                # default value in interface
                default = in_data["default"]
                if default is not None:
                    item["value"] = default

                items.append(item)

        return items, multiselect_enumerators

    def launch(self, session, entities, event):
        if not event['data'].get('values', {}):
            return

        in_data = event['data']['values']

        root_values = {}
        root_key = "__root__"
        for key in tuple(in_data.keys()):
            if key.startswith(root_key):
                _key = key[len(root_key):]
                root_values[_key] = in_data.pop(key)

        root_names = in_data.pop("__rootnames__", None)
        root_data = {}
        if root_names:
            for root_name in json.loads(root_names):
                root_data[root_name] = {}
                for key, value in tuple(root_values.items()):
                    if key.startswith(root_name):
                        _key = key[len(root_name):]
                        root_data[root_name][_key] = value

        else:
            for key, value in root_values.items():
                root_data[key] = value

        # TODO implement creating of anatomy for new projects
        # project_name = entities[0]["full_name"]
        # anatomy = Anatomy(project_name)

        # pop out info about creating project structure
        create_proj_struct = in_data.pop(self.create_project_structure_key)

        # Find hidden items for multiselect enumerators
        keys_to_process = []
        for key in in_data:
            if key.startswith("__hidden__"):
                keys_to_process.append(key)

        self.log.debug("Preparing data for Multiselect Enumerators")
        enumerators = {}
        for key in keys_to_process:
            new_key = key.replace("__hidden__", "")
            enumerator_items = in_data.pop(key)
            enumerators[new_key] = json.loads(enumerator_items)

        # find values set for multiselect enumerator
        for key, enumerator_items in enumerators.items():
            in_data[key] = []

            name = "__{}__".format(key)

            for item in enumerator_items:
                value = in_data.pop(item)
                if value is True:
                    new_key = item.replace(name, "")
                    in_data[key].append(new_key)

        self.log.debug("Setting Custom Attribute values:")
        entity = entities[0]
        for key, value in in_data.items():
            entity["custom_attributes"][key] = value
            self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))

        session.commit()

        # Create project structure
        self.create_project_specific_config(entities[0]["full_name"], in_data)

        # Trigger Create Project Structure action
        if create_proj_struct is True:
            self.trigger_action("create.project.structure", event)

        return True

    def create_project_specific_config(self, project_name, json_data):
        self.log.debug("*** Creating project specifig configs ***")
        project_specific_path = project_overrides_dir_path(project_name)
        if not os.path.exists(project_specific_path):
            os.makedirs(project_specific_path)
            self.log.debug((
                "Project specific config folder for project \"{}\" created."
            ).format(project_name))

        # Presets ####################################
        self.log.debug("--- Processing Presets Begins: ---")

        project_defaults_dir = os.path.normpath(os.path.join(
            project_specific_path, "presets", "ftrack"
        ))
        project_defaults_path = os.path.normpath(os.path.join(
            project_defaults_dir, "project_defaults.json"
        ))
        # Create folder if not exist
        if not os.path.exists(project_defaults_dir):
            self.log.debug("Creating Ftrack Presets folder: \"{}\"".format(
                project_defaults_dir
            ))
            os.makedirs(project_defaults_dir)

        with open(project_defaults_path, 'w') as file_stream:
            json.dump(json_data, file_stream, indent=4)

        self.log.debug("*** Creating project specifig configs Finished ***")