class AvalonRestApiResource: def __init__(self, avalon_module, server_manager): self.module = avalon_module self.server_manager = server_manager self.dbcon = AvalonMongoDB() self.dbcon.install() self.prefix = "/avalon" self.endpoint_defs = ( ( "GET", "/projects", AvalonProjectsEndpoint(self) ), ( "GET", "/projects/{project_name}", AvalonProjectEndpoint(self) ), ( "GET", "/projects/{project_name}/assets", AvalonAssetsEndpoint(self) ), ( "GET", "/projects/{project_name}/assets/{asset_name}", AvalonAssetEndpoint(self) ) ) self.register() def register(self): for methods, url, endpoint in self.endpoint_defs: final_url = self.prefix + url self.server_manager.add_route( methods, final_url, endpoint.dispatch ) @staticmethod def json_dump_handler(value): if isinstance(value, datetime.datetime): return value.isoformat() if isinstance(value, ObjectId): return str(value) raise TypeError(value) @classmethod def encode(cls, data): return json.dumps( data, indent=4, default=cls.json_dump_handler ).encode("utf-8")
def get_app_environments_for_context(project_name, asset_name, task_name, app_name, env=None): """Prepare environment variables by context. Args: project_name (str): Name of project. asset_name (str): Name of asset. task_name (str): Name of task. app_name (str): Name of application that is launched and can be found by ApplicationManager. env (dict): Initial environment variables. `os.environ` is used when not passed. Returns: dict: Environments for passed context and application. """ from avalon.api import AvalonMongoDB # Avalon database connection dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name dbcon.install() # Project document project_doc = dbcon.find_one({"type": "project"}) asset_doc = dbcon.find_one({"type": "asset", "name": asset_name}) # Prepare app object which can be obtained only from ApplciationManager app_manager = ApplicationManager() app = app_manager.applications[app_name] # Project's anatomy anatomy = Anatomy(project_name) data = EnvironmentPrepData({ "project_name": project_name, "asset_name": asset_name, "task_name": task_name, "app_name": app_name, "app": app, "dbcon": dbcon, "project_doc": project_doc, "asset_doc": asset_doc, "anatomy": anatomy, "env": env }) prepare_host_environments(data) prepare_context_environments(data) # Discard avalon connection dbcon.uninstall() return data["env"]
def start_timer(self, project_name, asset_name, task_name, hierarchy): """ Start timer for 'project_name', 'asset_name' and 'task_name' Called from REST api by hosts. Args: project_name (string) asset_name (string) task_name (string) hierarchy (string) """ dbconn = AvalonMongoDB() dbconn.install() dbconn.Session["AVALON_PROJECT"] = project_name asset_doc = dbconn.find_one({"type": "asset", "name": asset_name}) if not asset_doc: raise ValueError("Uknown asset {}".format(asset_name)) task_type = '' try: task_type = asset_doc["data"]["tasks"][task_name]["type"] except KeyError: self.log.warning( "Couldn't find task_type for {}".format(task_name)) hierarchy = hierarchy.split("\\") hierarchy.append(asset_name) data = { "project_name": project_name, "task_name": task_name, "task_type": task_type, "hierarchy": hierarchy } self.timer_started(None, data)
class Delivery(BaseAction): identifier = "delivery.action" label = "Delivery" description = "Deliver data to client" role_list = ["Pypeclub", "Administrator", "Project manager"] icon = statics_icon("ftrack", "action_icons", "Delivery.svg") settings_key = "delivery_action" def __init__(self, *args, **kwargs): self.db_con = AvalonMongoDB() super(Delivery, self).__init__(*args, **kwargs) def discover(self, session, entities, event): is_valid = False for entity in entities: if entity.entity_type.lower() == "assetversion": is_valid = True break if is_valid: is_valid = self.valid_roles(session, entities, event) return is_valid def interface(self, session, entities, event): if event["data"].get("values", {}): return title = "Delivery data to Client" items = [] item_splitter = {"type": "label", "value": "---"} project_entity = self.get_project_from_entity(entities[0]) project_name = project_entity["full_name"] self.db_con.install() self.db_con.Session["AVALON_PROJECT"] = project_name project_doc = self.db_con.find_one({"type": "project"}) if not project_doc: return { "success": False, "message": ("Didn't found project \"{}\" in avalon.").format(project_name) } repre_names = self._get_repre_names(entities) self.db_con.uninstall() items.append({ "type": "hidden", "name": "__project_name__", "value": project_name }) # Prpeare anatomy data anatomy = Anatomy(project_name) new_anatomies = [] first = None for key, template in (anatomy.templates.get("delivery") or {}).items(): # Use only keys with `{root}` or `{root[*]}` in value if isinstance(template, str) and "{root" in template: new_anatomies.append({"label": key, "value": key}) if first is None: first = key skipped = False # Add message if there are any common components if not repre_names or not new_anatomies: skipped = True items.append({ "type": "label", "value": "<h1>Something went wrong:</h1>" }) items.append({ "type": "hidden", "name": "__skipped__", "value": skipped }) if not repre_names: if len(entities) == 1: items.append({ "type": "label", "value": ("- Selected entity doesn't have components to deliver.") }) else: items.append({ "type": "label", "value": ("- Selected entities don't have common components.") }) # Add message if delivery anatomies are not set if not new_anatomies: items.append({ "type": "label", "value": ("- `\"delivery\"` anatomy key is not set in config.") }) # Skip if there are any data shortcomings if skipped: return {"items": items, "title": title} items.append({ "value": "<h1>Choose Components to deliver</h1>", "type": "label" }) for repre_name in repre_names: items.append({ "type": "boolean", "value": False, "label": repre_name, "name": repre_name }) items.append(item_splitter) items.append({ "value": "<h2>Location for delivery</h2>", "type": "label" }) items.append({ "type": "label", "value": ("<i>NOTE: It is possible to replace `root` key in anatomy.</i>") }) items.append({ "type": "text", "name": "__location_path__", "empty_text": "Type location path here...(Optional)" }) items.append(item_splitter) items.append({ "value": "<h2>Anatomy of delivery files</h2>", "type": "label" }) items.append({ "type": "label", "value": ("<p><i>NOTE: These can be set in Anatomy.yaml" " within `delivery` key.</i></p>") }) items.append({ "type": "enumerator", "name": "__new_anatomies__", "data": new_anatomies, "value": first }) return {"items": items, "title": title} def _get_repre_names(self, entities): version_ids = self._get_interest_version_ids(entities) repre_docs = self.db_con.find({ "type": "representation", "parent": { "$in": version_ids } }) return list(sorted(repre_docs.distinct("name"))) def _get_interest_version_ids(self, entities): parent_ent_by_id = {} subset_names = set() version_nums = set() for entity in entities: asset = entity["asset"] parent = asset["parent"] parent_ent_by_id[parent["id"]] = parent subset_name = asset["name"] subset_names.add(subset_name) version = entity["version"] version_nums.add(version) asset_docs_by_ftrack_id = self._get_asset_docs(parent_ent_by_id) subset_docs = self._get_subset_docs(asset_docs_by_ftrack_id, subset_names, entities) version_docs = self._get_version_docs(asset_docs_by_ftrack_id, subset_docs, version_nums, entities) return [version_doc["_id"] for version_doc in version_docs] def _get_version_docs(self, asset_docs_by_ftrack_id, subset_docs, version_nums, entities): subset_docs_by_id = { subset_doc["_id"]: subset_doc for subset_doc in subset_docs } version_docs = list( self.db_con.find({ "type": "version", "parent": { "$in": list(subset_docs_by_id.keys()) }, "name": { "$in": list(version_nums) } })) version_docs_by_parent_id = collections.defaultdict(dict) for version_doc in version_docs: subset_doc = subset_docs_by_id[version_doc["parent"]] asset_id = subset_doc["parent"] subset_name = subset_doc["name"] version = version_doc["name"] if version_docs_by_parent_id[asset_id].get(subset_name) is None: version_docs_by_parent_id[asset_id][subset_name] = {} version_docs_by_parent_id[asset_id][subset_name][version] = ( version_doc) filtered_versions = [] for entity in entities: asset = entity["asset"] parent = asset["parent"] asset_doc = asset_docs_by_ftrack_id[parent["id"]] subsets_by_name = version_docs_by_parent_id.get(asset_doc["_id"]) if not subsets_by_name: continue subset_name = asset["name"] version_docs_by_version = subsets_by_name.get(subset_name) if not version_docs_by_version: continue version = entity["version"] version_doc = version_docs_by_version.get(version) if version_doc: filtered_versions.append(version_doc) return filtered_versions def _get_subset_docs(self, asset_docs_by_ftrack_id, subset_names, entities): asset_doc_ids = list() for asset_doc in asset_docs_by_ftrack_id.values(): asset_doc_ids.append(asset_doc["_id"]) subset_docs = list( self.db_con.find({ "type": "subset", "parent": { "$in": asset_doc_ids }, "name": { "$in": list(subset_names) } })) subset_docs_by_parent_id = collections.defaultdict(dict) for subset_doc in subset_docs: asset_id = subset_doc["parent"] subset_name = subset_doc["name"] subset_docs_by_parent_id[asset_id][subset_name] = subset_doc filtered_subsets = [] for entity in entities: asset = entity["asset"] parent = asset["parent"] asset_doc = asset_docs_by_ftrack_id[parent["id"]] subsets_by_name = subset_docs_by_parent_id.get(asset_doc["_id"]) if not subsets_by_name: continue subset_name = asset["name"] subset_doc = subsets_by_name.get(subset_name) if subset_doc: filtered_subsets.append(subset_doc) return filtered_subsets def _get_asset_docs(self, parent_ent_by_id): asset_docs = list( self.db_con.find({ "type": "asset", "data.ftrackId": { "$in": list(parent_ent_by_id.keys()) } })) asset_docs_by_ftrack_id = { asset_doc["data"]["ftrackId"]: asset_doc for asset_doc in asset_docs } entities_by_mongo_id = {} entities_by_names = {} for ftrack_id, entity in parent_ent_by_id.items(): if ftrack_id not in asset_docs_by_ftrack_id: parent_mongo_id = entity["custom_attributes"].get( CUST_ATTR_ID_KEY) if parent_mongo_id: entities_by_mongo_id[ObjectId(parent_mongo_id)] = entity else: entities_by_names[entity["name"]] = entity expressions = [] if entities_by_mongo_id: expression = { "type": "asset", "_id": { "$in": list(entities_by_mongo_id.keys()) } } expressions.append(expression) if entities_by_names: expression = { "type": "asset", "name": { "$in": list(entities_by_names.keys()) } } expressions.append(expression) if expressions: if len(expressions) == 1: filter = expressions[0] else: filter = {"$or": expressions} asset_docs = self.db_con.find(filter) for asset_doc in asset_docs: if asset_doc["_id"] in entities_by_mongo_id: entity = entities_by_mongo_id[asset_doc["_id"]] asset_docs_by_ftrack_id[entity["id"]] = asset_doc elif asset_doc["name"] in entities_by_names: entity = entities_by_names[asset_doc["name"]] asset_docs_by_ftrack_id[entity["id"]] = asset_doc return asset_docs_by_ftrack_id def launch(self, session, entities, event): if "values" not in event["data"]: return values = event["data"]["values"] skipped = values.pop("__skipped__") if skipped: return None user_id = event["source"]["user"]["id"] user_entity = session.query( "User where id is {}".format(user_id)).one() job = session.create( "Job", { "user": user_entity, "status": "running", "data": json.dumps({"description": "Delivery processing."}) }) session.commit() try: self.db_con.install() self.real_launch(session, entities, event) job["status"] = "done" except Exception: self.log.warning("Failed during processing delivery action.", exc_info=True) finally: if job["status"] != "done": job["status"] = "failed" session.commit() self.db_con.uninstall() if job["status"] == "failed": return { "success": False, "message": "Delivery failed. Check logs for more information." } return True def real_launch(self, session, entities, event): self.log.info("Delivery action just started.") report_items = collections.defaultdict(list) values = event["data"]["values"] location_path = values.pop("__location_path__") anatomy_name = values.pop("__new_anatomies__") project_name = values.pop("__project_name__") repre_names = [] for key, value in values.items(): if value is True: repre_names.append(key) if not repre_names: return { "success": True, "message": "Not selected components to deliver." } location_path = location_path.strip() if location_path: location_path = os.path.normpath(location_path) if not os.path.exists(location_path): os.makedirs(location_path) self.db_con.Session["AVALON_PROJECT"] = project_name self.log.debug("Collecting representations to process.") version_ids = self._get_interest_version_ids(entities) repres_to_deliver = list( self.db_con.find({ "type": "representation", "parent": { "$in": version_ids }, "name": { "$in": repre_names } })) anatomy = Anatomy(project_name) format_dict = {} if location_path: location_path = location_path.replace("\\", "/") root_names = anatomy.root_names_from_templates( anatomy.templates["delivery"]) if root_names is None: format_dict["root"] = location_path else: format_dict["root"] = {} for name in root_names: format_dict["root"][name] = location_path datetime_data = config.get_datetime_data() for repre in repres_to_deliver: source_path = repre.get("data", {}).get("path") debug_msg = "Processing representation {}".format(repre["_id"]) if source_path: debug_msg += " with published path {}.".format(source_path) self.log.debug(debug_msg) # Get destination repre path anatomy_data = copy.deepcopy(repre["context"]) anatomy_data.update(datetime_data) anatomy_filled = anatomy.format_all(anatomy_data) test_path = anatomy_filled["delivery"][anatomy_name] if not test_path.solved: msg = ("Missing keys in Representation's context" " for anatomy template \"{}\".").format(anatomy_name) if test_path.missing_keys: keys = ", ".join(test_path.missing_keys) sub_msg = ( "Representation: {}<br>- Missing keys: \"{}\"<br>" ).format(str(repre["_id"]), keys) if test_path.invalid_types: items = [] for key, value in test_path.invalid_types.items(): items.append("\"{}\" {}".format(key, str(value))) keys = ", ".join(items) sub_msg = ("Representation: {}<br>" "- Invalid value DataType: \"{}\"<br>").format( str(repre["_id"]), keys) report_items[msg].append(sub_msg) self.log.warning( "{} Representation: \"{}\" Filled: <{}>".format( msg, str(repre["_id"]), str(test_path))) continue # Get source repre path frame = repre['context'].get('frame') if frame: repre["context"]["frame"] = len(str(frame)) * "#" repre_path = self.path_from_represenation(repre, anatomy) # TODO add backup solution where root of path from component # is repalced with root args = (repre_path, anatomy, anatomy_name, anatomy_data, format_dict, report_items) if not frame: self.process_single_file(*args) else: self.process_sequence(*args) return self.report(report_items) def process_single_file(self, repre_path, anatomy, anatomy_name, anatomy_data, format_dict, report_items): anatomy_filled = anatomy.format(anatomy_data) if format_dict: template_result = anatomy_filled["delivery"][anatomy_name] delivery_path = template_result.rootless.format(**format_dict) else: delivery_path = anatomy_filled["delivery"][anatomy_name] delivery_folder = os.path.dirname(delivery_path) if not os.path.exists(delivery_folder): os.makedirs(delivery_folder) self.copy_file(repre_path, delivery_path) def process_sequence(self, repre_path, anatomy, anatomy_name, anatomy_data, format_dict, report_items): dir_path, file_name = os.path.split(str(repre_path)) base_name, ext = os.path.splitext(file_name) file_name_items = None if "#" in base_name: file_name_items = [part for part in base_name.split("#") if part] elif "%" in base_name: file_name_items = base_name.split("%") if not file_name_items: msg = "Source file was not found" report_items[msg].append(repre_path) self.log.warning("{} <{}>".format(msg, repre_path)) return src_collections, remainder = clique.assemble(os.listdir(dir_path)) src_collection = None for col in src_collections: if col.tail != ext: continue # skip if collection don't have same basename if not col.head.startswith(file_name_items[0]): continue src_collection = col break if src_collection is None: # TODO log error! msg = "Source collection of files was not found" report_items[msg].append(repre_path) self.log.warning("{} <{}>".format(msg, repre_path)) return frame_indicator = "@####@" anatomy_data["frame"] = frame_indicator anatomy_filled = anatomy.format(anatomy_data) if format_dict: template_result = anatomy_filled["delivery"][anatomy_name] delivery_path = template_result.rootless.format(**format_dict) else: delivery_path = anatomy_filled["delivery"][anatomy_name] delivery_folder = os.path.dirname(delivery_path) dst_head, dst_tail = delivery_path.split(frame_indicator) dst_padding = src_collection.padding dst_collection = clique.Collection(head=dst_head, tail=dst_tail, padding=dst_padding) if not os.path.exists(delivery_folder): os.makedirs(delivery_folder) src_head = src_collection.head src_tail = src_collection.tail for index in src_collection.indexes: src_padding = src_collection.format("{padding}") % index src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) src = os.path.normpath(os.path.join(dir_path, src_file_name)) dst_padding = dst_collection.format("{padding}") % index dst = "{}{}{}".format(dst_head, dst_padding, dst_tail) self.copy_file(src, dst) def path_from_represenation(self, representation, anatomy): try: template = representation["data"]["template"] except KeyError: return None try: context = representation["context"] context["root"] = anatomy.roots path = pipeline.format_template_with_optional_keys( context, template) except KeyError: # Template references unavailable data return None return os.path.normpath(path) def copy_file(self, src_path, dst_path): if os.path.exists(dst_path): return try: filelink.create(src_path, dst_path, filelink.HARDLINK) except OSError: shutil.copyfile(src_path, dst_path) def report(self, report_items): items = [] title = "Delivery report" for msg, _items in report_items.items(): if not _items: continue if items: items.append({"type": "label", "value": "---"}) items.append({"type": "label", "value": "# {}".format(msg)}) if not isinstance(_items, (list, tuple)): _items = [_items] __items = [] for item in _items: __items.append(str(item)) items.append({ "type": "label", "value": '<p>{}</p>'.format("<br>".join(__items)) }) if not items: return {"success": True, "message": "Delivery Finished"} return { "items": items, "title": title, "success": False, "message": "Delivery Finished" }
class AppplicationsAction(BaseAction): """Application Action class. Args: session (ftrack_api.Session): Session where action will be registered. label (str): A descriptive string identifing your action. varaint (str, optional): To group actions together, give them the same label and specify a unique variant per action. identifier (str): An unique identifier for app. description (str): A verbose descriptive text for you action. icon (str): Url path to icon which will be shown in Ftrack web. """ type = "Application" label = "Application action" identifier = "pype_app.{}.".format(str(uuid4())) icon_url = os.environ.get("OPENPYPE_STATICS_SERVER") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.application_manager = ApplicationManager() self.dbcon = AvalonMongoDB() def construct_requirements_validations(self): # Override validation as this action does not need them return def register(self): """Registers the action, subscribing the discover and launch topics.""" discovery_subscription = ( "topic=ftrack.action.discover and source.user.username={0}" ).format(self.session.api_user) self.session.event_hub.subscribe(discovery_subscription, self._discover, priority=self.priority) launch_subscription = ("topic=ftrack.action.launch" " and data.actionIdentifier={0}" " and source.user.username={1}").format( self.identifier + "*", self.session.api_user) self.session.event_hub.subscribe(launch_subscription, self._launch) def _discover(self, event): entities = self._translate_event(event) items = self.discover(self.session, entities, event) if items: return {"items": items} def discover(self, session, entities, event): """Return true if we can handle the selected entities. Args: session (ftrack_api.Session): Helps to query necessary data. entities (list): Object of selected entities. event (ftrack_api.Event): Ftrack event causing discover callback. """ if (len(entities) != 1 or entities[0].entity_type.lower() != "task"): return False entity = entities[0] if entity["parent"].entity_type.lower() == "project": return False avalon_project_apps = event["data"].get("avalon_project_apps", None) avalon_project_doc = event["data"].get("avalon_project_doc", None) if avalon_project_apps is None: if avalon_project_doc is None: ft_project = self.get_project_from_entity(entity) project_name = ft_project["full_name"] if not self.dbcon.is_installed(): self.dbcon.install() self.dbcon.Session["AVALON_PROJECT"] = project_name avalon_project_doc = self.dbcon.find_one({"type": "project" }) or False event["data"]["avalon_project_doc"] = avalon_project_doc if not avalon_project_doc: return False project_apps_config = avalon_project_doc["config"].get("apps", []) avalon_project_apps = [app["name"] for app in project_apps_config] or False event["data"]["avalon_project_apps"] = avalon_project_apps if not avalon_project_apps: return False items = [] for app_name in avalon_project_apps: app = self.application_manager.applications.get(app_name) if not app or not app.enabled: continue app_icon = app.icon if app_icon and self.icon_url: try: app_icon = app_icon.format(self.icon_url) except Exception: self.log.warning( ("Couldn't fill icon path. Icon template: \"{}\"" " --- Icon url: \"{}\"").format( app_icon, self.icon_url)) app_icon = None items.append({ "label": app.group.label, "variant": app.label, "description": None, "actionIdentifier": self.identifier + app_name, "icon": app_icon }) return items def launch(self, session, entities, event): """Callback method for the custom action. return either a bool (True if successful or False if the action failed) or a dictionary with they keys `message` and `success`, the message should be a string and will be displayed as feedback to the user, success should be a bool, True if successful or False if the action failed. *session* is a `ftrack_api.Session` instance *entities* is a list of tuples each containing the entity type and the entity id. If the entity is a hierarchical you will always get the entity type TypedContext, once retrieved through a get operation you will have the "real" entity type ie. example Shot, Sequence or Asset Build. *event* the unmodified original event """ identifier = event["data"]["actionIdentifier"] app_name = identifier[len(self.identifier):] entity = entities[0] task_name = entity["name"] asset_name = entity["parent"]["name"] project_name = entity["project"]["full_name"] self.log.info( ("Ftrack launch app: \"{}\" on Project/Asset/Task: {}/{}/{}" ).format(app_name, project_name, asset_name, task_name)) try: self.application_manager.launch(app_name, project_name=project_name, asset_name=asset_name, task_name=task_name) except ApplictionExecutableNotFound as exc: self.log.warning(exc.exc_msg) return {"success": False, "message": exc.msg} except ApplicationLaunchFailed as exc: self.log.error(str(exc)) return {"success": False, "message": str(exc)} except Exception: msg = "Unexpected failure of application launch {}".format( self.label) self.log.error(msg, exc_info=True) return {"success": False, "message": msg} return {"success": True, "message": "Launching {0}".format(self.label)}
class DeleteOldVersions(api.Loader): representations = ["*"] families = ["*"] label = "Delete Old Versions" icon = "trash" color = "#d8d8d8" options = [ qargparse.Integer("versions_to_keep", default=2, min=0, help="Versions to keep:"), qargparse.Boolean("remove_publish_folder", help="Remove publish folder:") ] def sizeof_fmt(self, num, suffix='B'): for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: if abs(num) < 1024.0: return "%3.1f%s%s" % (num, unit, suffix) num /= 1024.0 return "%.1f%s%s" % (num, 'Yi', suffix) def delete_whole_dir_paths(self, dir_paths, delete=True): size = 0 for dir_path in dir_paths: # Delete all files and fodlers in dir path for root, dirs, files in os.walk(dir_path, topdown=False): for name in files: file_path = os.path.join(root, name) size += os.path.getsize(file_path) if delete: os.remove(file_path) self.log.debug("Removed file: {}".format(file_path)) for name in dirs: if delete: os.rmdir(os.path.join(root, name)) if not delete: continue # Delete even the folder and it's parents folders if they are empty while True: if not os.path.exists(dir_path): dir_path = os.path.dirname(dir_path) continue if len(os.listdir(dir_path)) != 0: break os.rmdir(os.path.join(dir_path)) return size def path_from_representation(self, representation, anatomy): try: template = representation["data"]["template"] except KeyError: return (None, None) sequence_path = None try: context = representation["context"] context["root"] = anatomy.roots path = avalon.pipeline.format_template_with_optional_keys( context, template) if "frame" in context: context["frame"] = self.sequence_splitter sequence_path = os.path.normpath( avalon.pipeline.format_template_with_optional_keys( context, template)) except KeyError: # Template references unavailable data return (None, None) return (os.path.normpath(path), sequence_path) def delete_only_repre_files(self, dir_paths, file_paths, delete=True): size = 0 for dir_id, dir_path in dir_paths.items(): dir_files = os.listdir(dir_path) collections, remainders = clique.assemble(dir_files) for file_path, seq_path in file_paths[dir_id]: file_path_base = os.path.split(file_path)[1] # Just remove file if `frame` key was not in context or # filled path is in remainders (single file sequence) if not seq_path or file_path_base in remainders: if not os.path.exists(file_path): self.log.debug( "File was not found: {}".format(file_path)) continue size += os.path.getsize(file_path) if delete: os.remove(file_path) self.log.debug("Removed file: {}".format(file_path)) remainders.remove(file_path_base) continue seq_path_base = os.path.split(seq_path)[1] head, tail = seq_path_base.split(self.sequence_splitter) final_col = None for collection in collections: if head != collection.head or tail != collection.tail: continue final_col = collection break if final_col is not None: # Fill full path to head final_col.head = os.path.join(dir_path, final_col.head) for _file_path in final_col: if os.path.exists(_file_path): size += os.path.getsize(_file_path) if delete: os.remove(_file_path) self.log.debug( "Removed file: {}".format(_file_path)) _seq_path = final_col.format("{head}{padding}{tail}") self.log.debug("Removed files: {}".format(_seq_path)) collections.remove(final_col) elif os.path.exists(file_path): size += os.path.getsize(file_path) if delete: os.remove(file_path) self.log.debug("Removed file: {}".format(file_path)) else: self.log.debug("File was not found: {}".format(file_path)) # Delete as much as possible parent folders if not delete: return size for dir_path in dir_paths.values(): while True: if not os.path.exists(dir_path): dir_path = os.path.dirname(dir_path) continue if len(os.listdir(dir_path)) != 0: break self.log.debug("Removed folder: {}".format(dir_path)) os.rmdir(dir_path) return size def message(self, text): msgBox = QtWidgets.QMessageBox() msgBox.setText(text) msgBox.setStyleSheet(style.load_stylesheet()) msgBox.setWindowFlags(msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint) msgBox.exec_() def get_data(self, context, versions_count): subset = context["subset"] asset = context["asset"] anatomy = Anatomy(context["project"]["name"]) self.dbcon = AvalonMongoDB() self.dbcon.Session["AVALON_PROJECT"] = context["project"]["name"] self.dbcon.install() versions = list( self.dbcon.find({ "type": "version", "parent": { "$in": [subset["_id"]] } })) versions_by_parent = collections.defaultdict(list) for ent in versions: versions_by_parent[ent["parent"]].append(ent) def sort_func(ent): return int(ent["name"]) all_last_versions = [] for _parent_id, _versions in versions_by_parent.items(): for idx, version in enumerate( sorted(_versions, key=sort_func, reverse=True)): if idx >= versions_count: break all_last_versions.append(version) self.log.debug("Collected versions ({})".format(len(versions))) # Filter latest versions for version in all_last_versions: versions.remove(version) # Update versions_by_parent without filtered versions versions_by_parent = collections.defaultdict(list) for ent in versions: versions_by_parent[ent["parent"]].append(ent) # Filter already deleted versions versions_to_pop = [] for version in versions: version_tags = version["data"].get("tags") if version_tags and "deleted" in version_tags: versions_to_pop.append(version) for version in versions_to_pop: msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format( asset["name"], subset["name"], version["name"]) self.log.debug( ("Skipping version. Already tagged as `deleted`. < {} >" ).format(msg)) versions.remove(version) version_ids = [ent["_id"] for ent in versions] self.log.debug("Filtered versions to delete ({})".format( len(version_ids))) if not version_ids: msg = "Skipping processing. Nothing to delete." self.log.info(msg) self.message(msg) return repres = list( self.dbcon.find({ "type": "representation", "parent": { "$in": version_ids } })) self.log.debug("Collected representations to remove ({})".format( len(repres))) dir_paths = {} file_paths_by_dir = collections.defaultdict(list) for repre in repres: file_path, seq_path = self.path_from_representation(repre, anatomy) if file_path is None: self.log.debug( ("Could not format path for represenation \"{}\"").format( str(repre))) continue dir_path = os.path.dirname(file_path) dir_id = None for _dir_id, _dir_path in dir_paths.items(): if _dir_path == dir_path: dir_id = _dir_id break if dir_id is None: dir_id = uuid.uuid4() dir_paths[dir_id] = dir_path file_paths_by_dir[dir_id].append([file_path, seq_path]) dir_ids_to_pop = [] for dir_id, dir_path in dir_paths.items(): if os.path.exists(dir_path): continue dir_ids_to_pop.append(dir_id) # Pop dirs from both dictionaries for dir_id in dir_ids_to_pop: dir_paths.pop(dir_id) paths = file_paths_by_dir.pop(dir_id) # TODO report of missing directories? paths_msg = ", ".join( ["'{}'".format(path[0].replace("\\", "/")) for path in paths]) self.log.debug( ("Folder does not exist. Deleting it's files skipped: {}" ).format(paths_msg)) data = { "dir_paths": dir_paths, "file_paths_by_dir": file_paths_by_dir, "versions": versions, "asset": asset, "subset": subset, "archive_subset": versions_count == 0 } return data def main(self, data, remove_publish_folder): # Size of files. size = 0 if remove_publish_folder: size = self.delete_whole_dir_paths(data["dir_paths"].values()) else: size = self.delete_only_repre_files(data["dir_paths"], data["file_paths_by_dir"]) mongo_changes_bulk = [] for version in data["versions"]: orig_version_tags = version["data"].get("tags") or [] version_tags = [tag for tag in orig_version_tags] if "deleted" not in version_tags: version_tags.append("deleted") if version_tags == orig_version_tags: continue update_query = {"_id": version["_id"]} update_data = {"$set": {"data.tags": version_tags}} mongo_changes_bulk.append(UpdateOne(update_query, update_data)) if data["archive_subset"]: mongo_changes_bulk.append( UpdateOne({ "_id": data["subset"]["_id"], "type": "subset" }, {"$set": { "type": "archived_subset" }})) if mongo_changes_bulk: self.dbcon.bulk_write(mongo_changes_bulk) self.dbcon.uninstall() # Set attribute `is_published` to `False` on ftrack AssetVersions session = ftrack_api.Session() query = ("AssetVersion where asset.parent.id is \"{}\"" " and asset.name is \"{}\"" " and version is \"{}\"") for v in data["versions"]: try: ftrack_version = session.query( query.format(data["asset"]["data"]["ftrackId"], data["subset"]["name"], v["name"])).one() except ftrack_api.exception.NoResultFoundError: continue ftrack_version["is_published"] = False try: session.commit() except Exception: msg = ("Could not set `is_published` attribute to `False`" " for selected AssetVersions.") self.log.error(msg) self.message(msg) msg = "Total size of files: " + self.sizeof_fmt(size) self.log.info(msg) self.message(msg) def load(self, context, name=None, namespace=None, options=None): try: versions_to_keep = 2 remove_publish_folder = False if options: versions_to_keep = options.get("versions_to_keep", versions_to_keep) remove_publish_folder = options.get("remove_publish_folder", remove_publish_folder) data = self.get_data(context, versions_to_keep) self.main(data, remove_publish_folder) except Exception: self.log.error("Failed to delete versions.", exc_info=True)
class StoreThumbnailsToAvalon(BaseAction): # Action identifier identifier = "store.thubmnail.to.avalon" # Action label label = "OpenPype Admin" # Action variant variant = "- Store Thumbnails to avalon" # Action description description = 'Test action' # roles that are allowed to register this action icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg") settings_key = "store_thubmnail_to_avalon" thumbnail_key = "AVALON_THUMBNAIL_ROOT" def __init__(self, *args, **kwargs): self.db_con = AvalonMongoDB() super(StoreThumbnailsToAvalon, self).__init__(*args, **kwargs) def discover(self, session, entities, event): is_valid = False for entity in entities: if entity.entity_type.lower() == "assetversion": is_valid = True break if is_valid: is_valid = self.valid_roles(session, entities, event) return is_valid def launch(self, session, entities, event): user = session.query("User where username is '{0}'".format( session.api_user)).one() action_job = session.create( "Job", { "user": user, "status": "running", "data": json.dumps({"description": "Storing thumbnails to avalon."}) }) session.commit() project = self.get_project_from_entity(entities[0]) project_name = project["full_name"] anatomy = Anatomy(project_name) if "publish" not in anatomy.templates: msg = "Anatomy does not have set publish key!" action_job["status"] = "failed" session.commit() self.log.warning(msg) return {"success": False, "message": msg} if "thumbnail" not in anatomy.templates["publish"]: msg = ( "There is not set \"thumbnail\"" " template in Antomy for project \"{}\"").format(project_name) action_job["status"] = "failed" session.commit() self.log.warning(msg) return {"success": False, "message": msg} thumbnail_roots = os.environ.get(self.thumbnail_key) if ("{thumbnail_root}" in anatomy.templates["publish"]["thumbnail"] and not thumbnail_roots): msg = "`{}` environment is not set".format(self.thumbnail_key) action_job["status"] = "failed" session.commit() self.log.warning(msg) return {"success": False, "message": msg} existing_thumbnail_root = None for path in thumbnail_roots.split(os.pathsep): if os.path.exists(path): existing_thumbnail_root = path break if existing_thumbnail_root is None: msg = ("Can't access paths, set in `{}` ({})").format( self.thumbnail_key, thumbnail_roots) action_job["status"] = "failed" session.commit() self.log.warning(msg) return {"success": False, "message": msg} example_template_data = { "_id": "ID", "thumbnail_root": "THUBMNAIL_ROOT", "thumbnail_type": "THUMBNAIL_TYPE", "ext": ".EXT", "project": { "name": "PROJECT_NAME", "code": "PROJECT_CODE" }, "asset": "ASSET_NAME", "subset": "SUBSET_NAME", "version": "VERSION_NAME", "hierarchy": "HIERARCHY" } tmp_filled = anatomy.format_all(example_template_data) thumbnail_result = tmp_filled["publish"]["thumbnail"] if not thumbnail_result.solved: missing_keys = thumbnail_result.missing_keys invalid_types = thumbnail_result.invalid_types submsg = "" if missing_keys: submsg += "Missing keys: {}".format(", ".join( ["\"{}\"".format(key) for key in missing_keys])) if invalid_types: items = [] for key, value in invalid_types.items(): items.append("{}{}".format(str(key), str(value))) submsg += "Invalid types: {}".format(", ".join(items)) msg = ("Thumbnail Anatomy template expects more keys than action" " can offer. {}").format(submsg) action_job["status"] = "failed" session.commit() self.log.warning(msg) return {"success": False, "message": msg} thumbnail_template = anatomy.templates["publish"]["thumbnail"] self.db_con.install() for entity in entities: # Skip if entity is not AssetVersion (never should happend, but..) if entity.entity_type.lower() != "assetversion": continue # Skip if AssetVersion don't have thumbnail thumbnail_ent = entity["thumbnail"] if thumbnail_ent is None: self.log.debug(("Skipping. AssetVersion don't " "have set thumbnail. {}").format(entity["id"])) continue avalon_ents_result = self.get_avalon_entities_for_assetversion( entity, self.db_con) version_full_path = ("Asset: \"{project_name}/{asset_path}\"" " | Subset: \"{subset_name}\"" " | Version: \"{version_name}\"").format( **avalon_ents_result) version = avalon_ents_result["version"] if not version: self.log.warning( ("AssetVersion does not have version in avalon. {}" ).format(version_full_path)) continue thumbnail_id = version["data"].get("thumbnail_id") if thumbnail_id: self.log.info( ("AssetVersion skipped, already has thubmanil set. {}" ).format(version_full_path)) continue # Get thumbnail extension file_ext = thumbnail_ent["file_type"] if not file_ext.startswith("."): file_ext = ".{}".format(file_ext) avalon_project = avalon_ents_result["project"] avalon_asset = avalon_ents_result["asset"] hierarchy = "" parents = avalon_asset["data"].get("parents") or [] if parents: hierarchy = "/".join(parents) # Prepare anatomy template fill data # 1. Create new id for thumbnail entity thumbnail_id = ObjectId() template_data = { "_id": str(thumbnail_id), "thumbnail_root": existing_thumbnail_root, "thumbnail_type": "thumbnail", "ext": file_ext, "project": { "name": avalon_project["name"], "code": avalon_project["data"].get("code") }, "asset": avalon_ents_result["asset_name"], "subset": avalon_ents_result["subset_name"], "version": avalon_ents_result["version_name"], "hierarchy": hierarchy } anatomy_filled = anatomy.format(template_data) thumbnail_path = anatomy_filled["publish"]["thumbnail"] thumbnail_path = thumbnail_path.replace("..", ".") thumbnail_path = os.path.normpath(thumbnail_path) downloaded = False for loc in (thumbnail_ent.get("component_locations") or []): res_id = loc.get("resource_identifier") if not res_id: continue thubmnail_url = self.get_thumbnail_url(res_id) if self.download_file(thubmnail_url, thumbnail_path): downloaded = True break if not downloaded: self.log.warning("Could not download thumbnail for {}".format( version_full_path)) continue # Clean template data from keys that are dynamic template_data.pop("_id") template_data.pop("thumbnail_root") thumbnail_entity = { "_id": thumbnail_id, "type": "thumbnail", "schema": "openpype:thumbnail-1.0", "data": { "template": thumbnail_template, "template_data": template_data } } # Create thumbnail entity self.db_con.insert_one(thumbnail_entity) self.log.debug("Creating entity in database {}".format( str(thumbnail_entity))) # Set thumbnail id for version self.db_con.update_one( {"_id": version["_id"]}, {"$set": { "data.thumbnail_id": thumbnail_id }}) self.db_con.update_one( {"_id": avalon_asset["_id"]}, {"$set": { "data.thumbnail_id": thumbnail_id }}) action_job["status"] = "done" session.commit() return True def get_thumbnail_url(self, resource_identifier, size=None): # TODO use ftrack_api method rather (find way how to use it) url_string = (u'{url}/component/thumbnail?id={id}&username={username}' u'&apiKey={apiKey}') url = url_string.format(url=self.session.server_url, id=resource_identifier, username=self.session.api_user, apiKey=self.session.api_key) if size: url += u'&size={0}'.format(size) return url def download_file(self, source_url, dst_file_path): dir_path = os.path.dirname(dst_file_path) try: os.makedirs(dir_path) except OSError as exc: if exc.errno != errno.EEXIST: self.log.warning( "Could not create folder: \"{}\"".format(dir_path)) return False self.log.debug("Downloading file \"{}\" -> \"{}\"".format( source_url, dst_file_path)) file_open = open(dst_file_path, "wb") try: file_open.write(requests.get(source_url).content) except Exception: self.log.warning( "Download of image `{}` failed.".format(source_url)) return False finally: file_open.close() return True def get_avalon_entities_for_assetversion(self, asset_version, db_con): output = { "success": True, "message": None, "project": None, "project_name": None, "asset": None, "asset_name": None, "asset_path": None, "subset": None, "subset_name": None, "version": None, "version_name": None, "representations": None } db_con.install() ft_asset = asset_version["asset"] subset_name = ft_asset["name"] version = asset_version["version"] parent = ft_asset["parent"] ent_path = "/".join([ent["name"] for ent in parent["link"]]) project = self.get_project_from_entity(asset_version) project_name = project["full_name"] output["project_name"] = project_name output["asset_name"] = parent["name"] output["asset_path"] = ent_path output["subset_name"] = subset_name output["version_name"] = version db_con.Session["AVALON_PROJECT"] = project_name avalon_project = db_con.find_one({"type": "project"}) output["project"] = avalon_project if not avalon_project: output["success"] = False output["message"] = ( "Project not synchronized to avalon `{}`".format(project_name)) return output asset_ent = None asset_mongo_id = parent["custom_attributes"].get(CUST_ATTR_ID_KEY) if asset_mongo_id: try: asset_mongo_id = ObjectId(asset_mongo_id) asset_ent = db_con.find_one({ "type": "asset", "_id": asset_mongo_id }) except Exception: pass if not asset_ent: asset_ent = db_con.find_one({ "type": "asset", "data.ftrackId": parent["id"] }) output["asset"] = asset_ent if not asset_ent: output["success"] = False output["message"] = ( "Not synchronized entity to avalon `{}`".format(ent_path)) return output asset_mongo_id = asset_ent["_id"] subset_ent = db_con.find_one({ "type": "subset", "parent": asset_mongo_id, "name": subset_name }) output["subset"] = subset_ent if not subset_ent: output["success"] = False output["message"] = ( "Subset `{}` does not exist under Asset `{}`").format( subset_name, ent_path) return output version_ent = db_con.find_one({ "type": "version", "name": version, "parent": subset_ent["_id"] }) output["version"] = version_ent if not version_ent: output["success"] = False output["message"] = ( "Version `{}` does not exist under Subset `{}` | Asset `{}`" ).format(version, subset_name, ent_path) return output repre_ents = list( db_con.find({ "type": "representation", "parent": version_ent["_id"] })) output["representations"] = repre_ents return output
class AvalonRestApi(RestApi): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.dbcon = AvalonMongoDB() self.dbcon.install() @RestApi.route("/projects/<project_name>", url_prefix="/avalon", methods="GET") def get_project(self, request): project_name = request.url_data["project_name"] if not project_name: output = {} for project_name in self.dbcon.tables(): project = self.dbcon[project_name].find_one({ "type": "project" }) output[project_name] = project return CallbackResult(data=self.result_to_json(output)) project = self.dbcon[project_name].find_one({"type": "project"}) if project: return CallbackResult(data=self.result_to_json(project)) abort(404, "Project \"{}\" was not found in database".format( project_name )) @RestApi.route("/projects/<project_name>/assets/<asset>", url_prefix="/avalon", methods="GET") def get_assets(self, request): _project_name = request.url_data["project_name"] _asset = request.url_data["asset"] if not self.dbcon.exist_table(_project_name): abort(404, "Project \"{}\" was not found in database".format( _project_name )) if not _asset: assets = self.dbcon[_project_name].find({"type": "asset"}) output = self.result_to_json(assets) return CallbackResult(data=output) # identificator can be specified with url query (default is `name`) identificator = request.query.get("identificator", "name") asset = self.dbcon[_project_name].find_one({ "type": "asset", identificator: _asset }) if asset: id = asset["_id"] asset["_id"] = str(id) return asset abort(404, "Asset \"{}\" with {} was not found in project {}".format( _asset, identificator, _project_name )) def result_to_json(self, result): """ Converts result of MongoDB query to dict without $oid (ObjectId) keys with help of regex matching. ..note: This will convert object type entries similar to ObjectId. """ bson_json = bson.json_util.dumps(result) # Replace "{$oid: "{entity id}"}" with "{entity id}" regex1 = '(?P<id>{\"\$oid\": \"[^\"]+\"})' regex2 = '{\"\$oid\": (?P<id>\"[^\"]+\")}' for value in re.findall(regex1, bson_json): for substr in re.findall(regex2, value): bson_json = bson_json.replace(value, substr) return json.loads(bson_json)