def get_anatomy_filled(self): root_path = api.registered_root() project_name = self._S["AVALON_PROJECT"] asset_name = self._S["AVALON_ASSET"] io.install() project_entity = io.find_one({ "type": "project", "name": project_name }) assert project_entity, ( "Project '{0}' was not found." ).format(project_name) log.debug("Collected Project \"{}\"".format(project_entity)) asset_entity = io.find_one({ "type": "asset", "name": asset_name, "parent": project_entity["_id"] }) assert asset_entity, ( "No asset found by the name '{0}' in project '{1}'" ).format(asset_name, project_name) project_name = project_entity["name"] log.info( "Anatomy object collected for project \"{}\".".format(project_name) ) hierarchy_items = asset_entity["data"]["parents"] hierarchy = "" if hierarchy_items: hierarchy = os.path.join(*hierarchy_items) template_data = { "root": root_path, "project": { "name": project_name, "code": project_entity["data"].get("code") }, "asset": asset_entity["name"], "hierarchy": hierarchy.replace("\\", "/"), "task": self._S["AVALON_TASK"], "ext": self.workfile_ext, "version": 1, "username": os.getenv("PYPE_USERNAME", "").strip() } avalon_app_name = os.environ.get("AVALON_APP_NAME") if avalon_app_name: application_def = lib.get_application(avalon_app_name) app_dir = application_def.get("application_dir") if app_dir: template_data["app"] = app_dir anatomy = Anatomy(project_name) anatomy_filled = anatomy.format_all(template_data).get_solved() return anatomy_filled
def CreateNukeWorkfile(nodes=None, nodes_effects=None, to_timeline=False, **kwargs): ''' Creating nuke workfile with particular version with given nodes Also it is creating timeline track items as precomps. Arguments: nodes(list of dict): each key in dict is knob order is important to_timeline(type): will build trackItem with metadata Returns: bool: True if done Raises: Exception: with traceback ''' import hiero.core from avalon.nuke import imprint from pype.hosts.nuke import (lib as nklib) # check if the file exists if does then Raise "File exists!" if os.path.exists(filepath): raise FileExistsError("File already exists: `{}`".format(filepath)) # if no representations matching then # Raise "no representations to be build" if len(representations) == 0: raise AttributeError("Missing list of `representations`") # check nodes input if len(nodes) == 0: log.warning("Missing list of `nodes`") # create temp nk file nuke_script = hiero.core.nuke.ScriptWriter() # create root node and save all metadata root_node = hiero.core.nuke.RootNode() anatomy = Anatomy(os.environ["AVALON_PROJECT"]) work_template = anatomy.templates["work"]["path"] root_path = anatomy.root_value_for_template(work_template) nuke_script.addNode(root_node) # here to call pype.hosts.nuke.lib.BuildWorkfile script_builder = nklib.BuildWorkfile(root_node=root_node, root_path=root_path, nodes=nuke_script.getNodes(), **kwargs)
def launch(self, session, event): # load shell scripts presets presets = config.get_presets()['ftrack'].get("user_assigment_event") if not presets: return for entity in event.get('data', {}).get('entities', []): if entity.get('entity_type') != 'Appointment': continue task, user = self._get_task_and_user(session, entity.get('action'), entity.get('changes')) if not task or not user: self.log.error( 'Task or User was not found.') continue data = self._get_template_data(task) # format directories to pass to shell script anatomy = Anatomy(data["project"]["name"]) anatomy_filled = anatomy.format(data) # formatting work dir is easiest part as we can use whole path work_dir = anatomy_filled["work"]["folder"] # we also need publish but not whole anatomy_filled.strict = False publish = anatomy_filled["publish"]["folder"] # now find path to {asset} m = re.search("(^.+?{})".format(data['asset']), publish) if not m: msg = 'Cannot get part of publish path {}'.format(publish) self.log.error(msg) return { 'success': False, 'message': msg } publish_dir = m.group(1) for script in presets.get(entity.get('action')): self.log.info( '[{}] : running script for user {}'.format( entity.get('action'), user["username"])) self._run_script(script, [user["username"], work_dir, publish_dir]) return True
def prepare_global_data(self): """Prepare global objects to `data` that will be used for sure.""" # Mongo documents project_name = self.data.get("project_name") if not project_name: self.log.info( "Skipping global data preparation." " Key `project_name` was not found in launch context.") return self.log.debug("Project name is set to \"{}\"".format(project_name)) # Anatomy self.data["anatomy"] = Anatomy(project_name) # Mongo connection dbcon = avalon.api.AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name dbcon.install() self.data["dbcon"] = dbcon # Project document project_doc = dbcon.find_one({"type": "project"}) self.data["project_doc"] = project_doc asset_name = self.data.get("asset_name") if not asset_name: self.log.warning( "Asset name was not set. Skipping asset document query.") return asset_doc = dbcon.find_one({"type": "asset", "name": asset_name}) self.data["asset_doc"] = asset_doc
def sync_avalon_data_to_workfile(): # import session to get project dir project_name = avalon.Session["AVALON_PROJECT"] anatomy = Anatomy(project_name) work_template = anatomy.templates["work"]["path"] work_root = anatomy.root_value_for_template(work_template) active_project_root = (os.path.join(work_root, project_name)).replace("\\", "/") # getting project project = get_current_project() if "Tag Presets" in project.name(): return log.debug("Synchronizing Pype metadata to project: {}".format( project.name())) # set project root with backward compatibility try: project.setProjectDirectory(active_project_root) except Exception: # old way of seting it project.setProjectRoot(active_project_root) # get project data from avalon db project_doc = avalon.io.find_one({"type": "project"}) project_data = project_doc["data"] log.debug("project_data: {}".format(project_data)) # get format and fps property from avalon db on project width = project_data["resolutionWidth"] height = project_data["resolutionHeight"] pixel_aspect = project_data["pixelAspect"] fps = project_data['fps'] format_name = project_data['code'] # create new format in hiero project format = hiero.core.Format(width, height, pixel_aspect, format_name) project.setOutputFormat(format) # set fps to hiero project project.setFramerate(fps) # TODO: add auto colorspace set from project drop log.info("Project property has been synchronised with Avalon db")
def process(self, context): project_name = os.environ.get("AVALON_PROJECT") if project_name is None: raise AssertionError("Environment `AVALON_PROJECT` is not set." "Could not initialize project's Anatomy.") context.data["anatomy"] = Anatomy(project_name) self.log.info("Anatomy object collected for project \"{}\".".format( project_name))
def _get_destination_path(self, asset, project): project_name = project["name"] hierarchy = "" parents = asset['data']['parents'] if parents and len(parents) > 0: hierarchy = os.path.join(*parents) template_data = { "project": { "name": project_name, "code": project['data']['code'] }, "silo": asset.get('silo'), "asset": asset['name'], "family": 'texture', "subset": 'Main', "hierarchy": hierarchy } anatomy = Anatomy(project_name) anatomy_filled = anatomy.format(template_data) return anatomy_filled['texture']['path']
def create_folders(self, basic_paths, project): anatomy = Anatomy(project["full_name"]) roots_paths = [] if isinstance(anatomy.roots, dict): for root in anatomy.roots.values(): roots_paths.append(root.value) else: roots_paths.append(anatomy.roots.value) for root_path in roots_paths: project_root = os.path.join(root_path, project["full_name"]) full_paths = self.compute_paths(basic_paths, project_root) # Create folders for path in full_paths: full_path = path.format(project_root=project_root) if os.path.exists(full_path): self.log.debug( "Folder already exists: {}".format(full_path) ) else: self.log.debug("Creating folder: {}".format(full_path)) os.makedirs(full_path)
def launch(self, session, entities, event): entity = entities[0] project = self.get_project_from_entity(entity) project_folder_presets = (config.get_presets().get( "tools", {}).get("project_folder_structure")) if not project_folder_presets: return { "success": False, "message": "Project structure presets are not set." } try: # Get paths based on presets basic_paths = self.get_path_items(project_folder_presets) anatomy = Anatomy(project["full_name"]) self.create_folders(basic_paths, entity, project, anatomy) self.create_ftrack_entities(basic_paths, project) except Exception as exc: session.rollback() return {"success": False, "message": str(exc)} return True
def launch(self, session, entities, event): '''Callback method for custom action.''' with_childrens = True if self.without_interface is False: if "values" not in event["data"]: return with_childrens = event["data"]["values"]["children_included"] entity = entities[0] if entity.entity_type.lower() == "project": proj = entity else: proj = entity["project"] project_name = proj["full_name"] project_code = proj["name"] if entity.entity_type.lower() == 'project' and with_childrens is False: return {'success': True, 'message': 'Nothing was created'} all_entities = [] all_entities.append(entity) if with_childrens: all_entities = self.get_notask_children(entity) anatomy = Anatomy(project_name) project_settings = get_project_settings(project_name) work_keys = ["work", "folder"] work_template = anatomy.templates for key in work_keys: work_template = work_template[key] work_has_apps = "{app" in work_template publish_keys = ["publish", "folder"] publish_template = anatomy.templates for key in publish_keys: publish_template = publish_template[key] publish_has_apps = "{app" in publish_template tools_settings = project_settings["global"]["tools"] app_presets = tools_settings["Workfiles"]["sw_folders"] app_manager_apps = None if app_presets and (work_has_apps or publish_has_apps): app_manager_apps = ApplicationManager().applications cached_apps = {} collected_paths = [] for entity in all_entities: if entity.entity_type.lower() == "project": continue ent_data = { "project": { "name": project_name, "code": project_code } } ent_data["asset"] = entity["name"] parents = entity["link"][1:-1] hierarchy_names = [p["name"] for p in parents] hierarchy = "" if hierarchy_names: hierarchy = os.path.sep.join(hierarchy_names) ent_data["hierarchy"] = hierarchy tasks_created = False for child in entity["children"]: if child["object_type"]["name"].lower() != "task": continue tasks_created = True task_type_name = child["type"]["name"].lower() task_data = ent_data.copy() task_data["task"] = child["name"] apps = [] if app_manager_apps: possible_apps = app_presets.get(task_type_name) or [] for app_name in possible_apps: if app_name in cached_apps: apps.append(cached_apps[app_name]) continue app_def = app_manager_apps.get(app_name) if app_def and app_def.is_host: app_dir = app_def.host_name else: app_dir = app_name cached_apps[app_name] = app_dir apps.append(app_dir) # Template wok if work_has_apps: app_data = task_data.copy() for app in apps: app_data["app"] = app collected_paths.append( self.compute_template(anatomy, app_data, work_keys)) else: collected_paths.append( self.compute_template(anatomy, task_data, work_keys)) # Template publish if publish_has_apps: app_data = task_data.copy() for app in apps: app_data["app"] = app collected_paths.append( self.compute_template(anatomy, app_data, publish_keys)) else: collected_paths.append( self.compute_template(anatomy, task_data, publish_keys)) if not tasks_created: # create path for entity collected_paths.append( self.compute_template(anatomy, ent_data, work_keys)) collected_paths.append( self.compute_template(anatomy, ent_data, publish_keys)) if len(collected_paths) == 0: return { "success": True, "message": "No project folders to create." } self.log.info("Creating folders:") for path in set(collected_paths): self.log.info(path) if not os.path.exists(path): os.makedirs(path) return { "success": True, "message": "Successfully created project folders." }
def get_anatomy(**kwarg): return Anatomy()
def real_launch(self, session, entities, event): self.log.info("Delivery action just started.") report_items = collections.defaultdict(list) values = event["data"]["values"] location_path = values.pop("__location_path__") anatomy_name = values.pop("__new_anatomies__") project_name = values.pop("__project_name__") repre_names = [] for key, value in values.items(): if value is True: repre_names.append(key) if not repre_names: return { "success": True, "message": "Not selected components to deliver." } location_path = location_path.strip() if location_path: location_path = os.path.normpath(location_path) if not os.path.exists(location_path): os.makedirs(location_path) self.db_con.Session["AVALON_PROJECT"] = project_name self.log.debug("Collecting representations to process.") version_ids = self._get_interest_version_ids(entities) repres_to_deliver = list(self.db_con.find({ "type": "representation", "parent": {"$in": version_ids}, "name": {"$in": repre_names} })) anatomy = Anatomy(project_name) format_dict = {} if location_path: location_path = location_path.replace("\\", "/") root_names = anatomy.root_names_from_templates( anatomy.templates["delivery"] ) if root_names is None: format_dict["root"] = location_path else: format_dict["root"] = {} for name in root_names: format_dict["root"][name] = location_path datetime_data = config.get_datetime_data() for repre in repres_to_deliver: source_path = repre.get("data", {}).get("path") debug_msg = "Processing representation {}".format(repre["_id"]) if source_path: debug_msg += " with published path {}.".format(source_path) self.log.debug(debug_msg) # Get destination repre path anatomy_data = copy.deepcopy(repre["context"]) anatomy_data.update(datetime_data) anatomy_filled = anatomy.format_all(anatomy_data) test_path = anatomy_filled["delivery"][anatomy_name] if not test_path.solved: msg = ( "Missing keys in Representation's context" " for anatomy template \"{}\"." ).format(anatomy_name) if test_path.missing_keys: keys = ", ".join(test_path.missing_keys) sub_msg = ( "Representation: {}<br>- Missing keys: \"{}\"<br>" ).format(str(repre["_id"]), keys) if test_path.invalid_types: items = [] for key, value in test_path.invalid_types.items(): items.append("\"{}\" {}".format(key, str(value))) keys = ", ".join(items) sub_msg = ( "Representation: {}<br>" "- Invalid value DataType: \"{}\"<br>" ).format(str(repre["_id"]), keys) report_items[msg].append(sub_msg) self.log.warning( "{} Representation: \"{}\" Filled: <{}>".format( msg, str(repre["_id"]), str(test_path) ) ) continue # Get source repre path frame = repre['context'].get('frame') if frame: repre["context"]["frame"] = len(str(frame)) * "#" repre_path = self.path_from_represenation(repre, anatomy) # TODO add backup solution where root of path from component # is repalced with root args = ( repre_path, anatomy, anatomy_name, anatomy_data, format_dict, report_items ) if not frame: self.process_single_file(*args) else: self.process_sequence(*args) return self.report(report_items)
def launch(self, session, entities, event): if not event['data'].get('values', {}): return in_data = event['data']['values'] root_values = {} root_key = "__root__" for key in tuple(in_data.keys()): if key.startswith(root_key): _key = key[len(root_key):] root_values[_key] = in_data.pop(key) root_names = in_data.pop("__rootnames__", None) root_data = {} if root_names: for root_name in json.loads(root_names): root_data[root_name] = {} for key, value in tuple(root_values.items()): if key.startswith(root_name): _key = key[len(root_name):] root_data[root_name][_key] = value else: for key, value in root_values.items(): root_data[key] = value project_name = entities[0]["full_name"] anatomy = Anatomy(project_name) anatomy.templates_obj.save_project_overrides(project_name) anatomy.roots_obj.save_project_overrides(project_name, root_data, override=True) anatomy.reset() # pop out info about creating project structure create_proj_struct = in_data.pop(self.create_project_structure_key) # Find hidden items for multiselect enumerators keys_to_process = [] for key in in_data: if key.startswith("__hidden__"): keys_to_process.append(key) self.log.debug("Preparing data for Multiselect Enumerators") enumerators = {} for key in keys_to_process: new_key = key.replace("__hidden__", "") enumerator_items = in_data.pop(key) enumerators[new_key] = json.loads(enumerator_items) # find values set for multiselect enumerator for key, enumerator_items in enumerators.items(): in_data[key] = [] name = "__{}__".format(key) for item in enumerator_items: value = in_data.pop(item) if value is True: new_key = item.replace(name, "") in_data[key].append(new_key) self.log.debug("Setting Custom Attribute values:") entity = entities[0] for key, value in in_data.items(): entity["custom_attributes"][key] = value self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value)) session.commit() # Create project structure self.create_project_specific_config(entities[0]["full_name"], in_data) # Trigger Create Project Structure action if create_proj_struct is True: self.trigger_action("create.project.structure", event) return True
def interface(self, session, entities, event): if event["data"].get("values", {}): return title = "Delivery data to Client" items = [] item_splitter = {"type": "label", "value": "---"} project_entity = self.get_project_from_entity(entities[0]) project_name = project_entity["full_name"] self.db_con.install() self.db_con.Session["AVALON_PROJECT"] = project_name project_doc = self.db_con.find_one({"type": "project"}) if not project_doc: return { "success": False, "message": ( "Didn't found project \"{}\" in avalon." ).format(project_name) } repre_names = self._get_repre_names(entities) self.db_con.uninstall() items.append({ "type": "hidden", "name": "__project_name__", "value": project_name }) # Prpeare anatomy data anatomy = Anatomy(project_name) new_anatomies = [] first = None for key, template in (anatomy.templates.get("delivery") or {}).items(): # Use only keys with `{root}` or `{root[*]}` in value if isinstance(template, str) and "{root" in template: new_anatomies.append({ "label": key, "value": key }) if first is None: first = key skipped = False # Add message if there are any common components if not repre_names or not new_anatomies: skipped = True items.append({ "type": "label", "value": "<h1>Something went wrong:</h1>" }) items.append({ "type": "hidden", "name": "__skipped__", "value": skipped }) if not repre_names: if len(entities) == 1: items.append({ "type": "label", "value": ( "- Selected entity doesn't have components to deliver." ) }) else: items.append({ "type": "label", "value": ( "- Selected entities don't have common components." ) }) # Add message if delivery anatomies are not set if not new_anatomies: items.append({ "type": "label", "value": ( "- `\"delivery\"` anatomy key is not set in config." ) }) # Skip if there are any data shortcomings if skipped: return { "items": items, "title": title } items.append({ "value": "<h1>Choose Components to deliver</h1>", "type": "label" }) for repre_name in repre_names: items.append({ "type": "boolean", "value": False, "label": repre_name, "name": repre_name }) items.append(item_splitter) items.append({ "value": "<h2>Location for delivery</h2>", "type": "label" }) items.append({ "type": "label", "value": ( "<i>NOTE: It is possible to replace `root` key in anatomy.</i>" ) }) items.append({ "type": "text", "name": "__location_path__", "empty_text": "Type location path here...(Optional)" }) items.append(item_splitter) items.append({ "value": "<h2>Anatomy of delivery files</h2>", "type": "label" }) items.append({ "type": "label", "value": ( "<p><i>NOTE: These can be set in Anatomy.yaml" " within `delivery` key.</i></p>" ) }) items.append({ "type": "enumerator", "name": "__new_anatomies__", "data": new_anatomies, "value": first }) return { "items": items, "title": title }
def interface(self, session, entities, event): if event['data'].get('values', {}): return # Inform user that this may take a while self.show_message(event, "Preparing data... Please wait", True) self.log.debug("Preparing data which will be shown") self.log.debug("Loading custom attributes") project_name = entities[0]["full_name"] project_defaults = ( config.get_presets(project_name) .get("ftrack", {}) .get("project_defaults", {}) ) anatomy = Anatomy(project_name) if not anatomy.roots: return { "success": False, "message": ( "Have issues with loading Roots for project \"{}\"." ).format(anatomy.project_name) } root_items = self.prepare_root_items(anatomy) ca_items, multiselect_enumerators = ( self.prepare_custom_attribute_items(project_defaults) ) self.log.debug("Heavy items are ready. Preparing last items group.") title = "Prepare Project" items = [] # Add root items items.extend(root_items) items.append(self.item_splitter) # Ask if want to trigger Action Create Folder Structure items.append({ "type": "label", "value": "<h3>Want to create basic Folder Structure?</h3>" }) items.append({ "name": self.create_project_structure_key, "type": "boolean", "value": False, "label": "Check if Yes" }) items.append(self.item_splitter) items.append({ "type": "label", "value": "<h3>Set basic Attributes:</h3>" }) items.extend(ca_items) # This item will be last (before enumerators) # - sets value of auto synchronization auto_sync_name = "avalon_auto_sync" auto_sync_item = { "name": auto_sync_name, "type": "boolean", "value": project_defaults.get(auto_sync_name, False), "label": "AutoSync to Avalon" } # Add autosync attribute items.append(auto_sync_item) # Add enumerator items at the end for item in multiselect_enumerators: items.append(item) return { "items": items, "title": title }
def launch(self, session, entities, event): """Callback method for the custom action. return either a bool (True if successful or False if the action failed) or a dictionary with they keys `message` and `success`, the message should be a string and will be displayed as feedback to the user, success should be a bool, True if successful or False if the action failed. *session* is a `ftrack_api.Session` instance *entities* is a list of tuples each containing the entity type and the entity id. If the entity is a hierarchical you will always get the entity type TypedContext, once retrieved through a get operation you will have the "real" entity type ie. example Shot, Sequence or Asset Build. *event* the unmodified original event """ entity = entities[0] project_name = entity["project"]["full_name"] database = pypelib.get_avalon_database() asset_name = entity["parent"]["name"] asset_document = database[project_name].find_one({ "type": "asset", "name": asset_name }) hierarchy = "" asset_doc_parents = asset_document["data"].get("parents") if len(asset_doc_parents) > 0: hierarchy = os.path.join(*asset_doc_parents) application = avalon.lib.get_application(self.identifier) data = { "project": { "name": entity["project"]["full_name"], "code": entity["project"]["name"] }, "task": entity["name"], "asset": asset_name, "app": application["application_dir"], "hierarchy": hierarchy } try: anatomy = Anatomy(project_name) anatomy_filled = anatomy.format(data) workdir = os.path.normpath(anatomy_filled["work"]["folder"]) except Exception as exc: msg = "Error in anatomy.format: {}".format( str(exc) ) self.log.error(msg, exc_info=True) return { "success": False, "message": msg } try: os.makedirs(workdir) except FileExistsError: pass # set environments for Avalon prep_env = copy.deepcopy(os.environ) prep_env.update({ "AVALON_PROJECT": project_name, "AVALON_ASSET": asset_name, "AVALON_TASK": entity["name"], "AVALON_APP": self.identifier.split("_")[0], "AVALON_APP_NAME": self.identifier, "AVALON_HIERARCHY": hierarchy, "AVALON_WORKDIR": workdir }) prep_env.update(anatomy.roots_obj.root_environments()) # collect all parents from the task parents = [] for item in entity['link']: parents.append(session.get(item['type'], item['id'])) # collect all the 'environment' attributes from parents tools_attr = [prep_env["AVALON_APP"], prep_env["AVALON_APP_NAME"]] tools_env = asset_document["data"].get("tools_env") or [] tools_attr.extend(tools_env) tools_env = acre.get_tools(tools_attr) env = acre.compute(tools_env) env = acre.merge(env, current_env=dict(prep_env)) env = acre.append(dict(prep_env), env) # Get path to execute st_temp_path = os.environ["PYPE_CONFIG"] os_plat = platform.system().lower() # Path to folder with launchers path = os.path.join(st_temp_path, "launchers", os_plat) # Full path to executable launcher execfile = None if application.get("launch_hook"): hook = application.get("launch_hook") self.log.info("launching hook: {}".format(hook)) ret_val = pypelib.execute_hook( application.get("launch_hook"), env=env) if not ret_val: return { 'success': False, 'message': "Hook didn't finish successfully {0}" .format(self.label) } if sys.platform == "win32": for ext in os.environ["PATHEXT"].split(os.pathsep): fpath = os.path.join(path.strip('"'), self.executable + ext) if os.path.isfile(fpath) and os.access(fpath, os.X_OK): execfile = fpath break # Run SW if was found executable if execfile is None: return { "success": False, "message": "We didn't find launcher for {0}".format( self.label ) } popen = avalon.lib.launch( executable=execfile, args=[], environment=env ) elif (sys.platform.startswith("linux") or sys.platform.startswith("darwin")): execfile = os.path.join(path.strip('"'), self.executable) if not os.path.isfile(execfile): msg = "Launcher doesn't exist - {}".format(execfile) self.log.error(msg) return { "success": False, "message": msg } try: fp = open(execfile) except PermissionError as perm_exc: msg = "Access denied on launcher {} - {}".format( execfile, perm_exc ) self.log.exception(msg, exc_info=True) return { "success": False, "message": msg } fp.close() # check executable permission if not os.access(execfile, os.X_OK): msg = "No executable permission - {}".format(execfile) self.log.error(msg) return { "success": False, "message": msg } # Run SW if was found executable if execfile is None: return { "success": False, "message": "We didn't found launcher for {0}".format( self.label ) } popen = avalon.lib.launch( # noqa: F841 "/usr/bin/env", args=["bash", execfile], environment=env ) # Change status of task to In progress presets = config.get_presets()["ftrack"]["ftrack_config"] if "status_update" in presets: statuses = presets["status_update"] actual_status = entity["status"]["name"].lower() already_tested = [] ent_path = "/".join( [ent["name"] for ent in entity["link"]] ) while True: next_status_name = None for key, value in statuses.items(): if key in already_tested: continue if actual_status in value or "_any_" in value: if key != "_ignore_": next_status_name = key already_tested.append(key) break already_tested.append(key) if next_status_name is None: break try: query = "Status where name is \"{}\"".format( next_status_name ) status = session.query(query).one() entity["status"] = status session.commit() self.log.debug("Changing status to \"{}\" <{}>".format( next_status_name, ent_path )) break except Exception: session.rollback() msg = ( "Status \"{}\" in presets wasn't found" " on Ftrack entity type \"{}\"" ).format(next_status_name, entity.entity_type) self.log.warning(msg) return { "success": True, "message": "Launching {0}".format(self.label) }
def launch(self, session, entities, event): values = event["data"].get("values") if not values: return versions_count = int(values["last_versions_count"]) force_to_remove = values["force_delete_publish_folder"] _val1 = "OFF" if force_to_remove: _val1 = "ON" _val3 = "s" if versions_count == 1: _val3 = "" self.log.debug( ("Process started. Force to delete publish folder is set to [{0}]" " and will keep {1} latest version{2}.").format( _val1, versions_count, _val3)) self.dbcon.install() project = None avalon_asset_names = [] asset_versions_by_parent_id = collections.defaultdict(list) subset_names_by_asset_name = collections.defaultdict(list) ftrack_assets_by_name = {} for entity in entities: ftrack_asset = entity["asset"] parent_ent = ftrack_asset["parent"] parent_ftrack_id = parent_ent["id"] parent_name = parent_ent["name"] if parent_name not in avalon_asset_names: avalon_asset_names.append(parent_name) # Group asset versions by parent entity asset_versions_by_parent_id[parent_ftrack_id].append(entity) # Get project if project is None: project = parent_ent["project"] # Collect subset names per asset subset_name = ftrack_asset["name"] subset_names_by_asset_name[parent_name].append(subset_name) if subset_name not in ftrack_assets_by_name: ftrack_assets_by_name[subset_name] = ftrack_asset # Set Mongo collection project_name = project["full_name"] anatomy = Anatomy(project_name) self.dbcon.Session["AVALON_PROJECT"] = project_name self.log.debug("Project is set to {}".format(project_name)) # Get Assets from avalon database assets = list( self.dbcon.find({ "type": "asset", "name": { "$in": avalon_asset_names } })) asset_id_to_name_map = { asset["_id"]: asset["name"] for asset in assets } asset_ids = list(asset_id_to_name_map.keys()) self.log.debug("Collected assets ({})".format(len(asset_ids))) # Get Subsets subsets = list( self.dbcon.find({ "type": "subset", "parent": { "$in": asset_ids } })) subsets_by_id = {} subset_ids = [] for subset in subsets: asset_id = subset["parent"] asset_name = asset_id_to_name_map[asset_id] available_subsets = subset_names_by_asset_name[asset_name] if subset["name"] not in available_subsets: continue subset_ids.append(subset["_id"]) subsets_by_id[subset["_id"]] = subset self.log.debug("Collected subsets ({})".format(len(subset_ids))) # Get Versions versions = list( self.dbcon.find({ "type": "version", "parent": { "$in": subset_ids } })) versions_by_parent = collections.defaultdict(list) for ent in versions: versions_by_parent[ent["parent"]].append(ent) def sort_func(ent): return int(ent["name"]) all_last_versions = [] for parent_id, _versions in versions_by_parent.items(): for idx, version in enumerate( sorted(_versions, key=sort_func, reverse=True)): if idx >= versions_count: break all_last_versions.append(version) self.log.debug("Collected versions ({})".format(len(versions))) # Filter latest versions for version in all_last_versions: versions.remove(version) # Update versions_by_parent without filtered versions versions_by_parent = collections.defaultdict(list) for ent in versions: versions_by_parent[ent["parent"]].append(ent) # Filter already deleted versions versions_to_pop = [] for version in versions: version_tags = version["data"].get("tags") if version_tags and "deleted" in version_tags: versions_to_pop.append(version) for version in versions_to_pop: subset = subsets_by_id[version["parent"]] asset_id = subset["parent"] asset_name = asset_id_to_name_map[asset_id] msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format( asset_name, subset["name"], version["name"]) self.log.warning( ("Skipping version. Already tagged as `deleted`. < {} >" ).format(msg)) versions.remove(version) version_ids = [ent["_id"] for ent in versions] self.log.debug("Filtered versions to delete ({})".format( len(version_ids))) if not version_ids: msg = "Skipping processing. Nothing to delete." self.log.debug(msg) return {"success": True, "message": msg} repres = list( self.dbcon.find({ "type": "representation", "parent": { "$in": version_ids } })) self.log.debug("Collected representations to remove ({})".format( len(repres))) dir_paths = {} file_paths_by_dir = collections.defaultdict(list) for repre in repres: file_path, seq_path = self.path_from_represenation(repre, anatomy) if file_path is None: self.log.warning( ("Could not format path for represenation \"{}\"").format( str(repre))) continue dir_path = os.path.dirname(file_path) dir_id = None for _dir_id, _dir_path in dir_paths.items(): if _dir_path == dir_path: dir_id = _dir_id break if dir_id is None: dir_id = uuid.uuid4() dir_paths[dir_id] = dir_path file_paths_by_dir[dir_id].append([file_path, seq_path]) dir_ids_to_pop = [] for dir_id, dir_path in dir_paths.items(): if os.path.exists(dir_path): continue dir_ids_to_pop.append(dir_id) # Pop dirs from both dictionaries for dir_id in dir_ids_to_pop: dir_paths.pop(dir_id) paths = file_paths_by_dir.pop(dir_id) # TODO report of missing directories? paths_msg = ", ".join( ["'{}'".format(path[0].replace("\\", "/")) for path in paths]) self.log.warning( ("Folder does not exist. Deleting it's files skipped: {}" ).format(paths_msg)) if force_to_remove: self.delete_whole_dir_paths(dir_paths.values()) else: self.delete_only_repre_files(dir_paths, file_paths_by_dir) mongo_changes_bulk = [] for version in versions: orig_version_tags = version["data"].get("tags") or [] version_tags = [tag for tag in orig_version_tags] if "deleted" not in version_tags: version_tags.append("deleted") if version_tags == orig_version_tags: continue update_query = {"_id": version["_id"]} update_data = {"$set": {"data.tags": version_tags}} mongo_changes_bulk.append(UpdateOne(update_query, update_data)) if mongo_changes_bulk: self.dbcon.bulk_write(mongo_changes_bulk) self.dbcon.uninstall() # Set attribute `is_published` to `False` on ftrack AssetVersions for subset_id, _versions in versions_by_parent.items(): subset_name = None for subset in subsets: if subset["_id"] == subset_id: subset_name = subset["name"] break if subset_name is None: self.log.warning("Subset with ID `{}` was not found.".format( str(subset_id))) continue ftrack_asset = ftrack_assets_by_name.get(subset_name) if not ftrack_asset: self.log.warning(("Could not find Ftrack asset with name `{}`" ).format(subset_name)) continue version_numbers = [int(ver["name"]) for ver in _versions] for version in ftrack_asset["versions"]: if int(version["version"]) in version_numbers: version["is_published"] = False try: session.commit() except Exception: msg = ("Could not set `is_published` attribute to `False`" " for selected AssetVersions.") self.log.warning(msg, exc_info=True) return {"success": False, "message": msg} return True
def launch(self, session, entities, event): # DEBUG LINE # root_path = r"C:\Users\jakub.trllo\Desktop\Tests\ftrack_thumbnails" user = session.query("User where username is '{0}'".format( session.api_user)).one() action_job = session.create( "Job", { "user": user, "status": "running", "data": json.dumps({"description": "Storing thumbnails to avalon."}) }) session.commit() project = self.get_project_from_entity(entities[0]) project_name = project["full_name"] anatomy = Anatomy(project_name) if "publish" not in anatomy.templates: msg = "Anatomy does not have set publish key!" action_job["status"] = "failed" session.commit() self.log.warning(msg) return {"success": False, "message": msg} if "thumbnail" not in anatomy.templates["publish"]: msg = ( "There is not set \"thumbnail\"" " template in Antomy for project \"{}\"").format(project_name) action_job["status"] = "failed" session.commit() self.log.warning(msg) return {"success": False, "message": msg} thumbnail_roots = os.environ.get(self.thumbnail_key) if ("{thumbnail_root}" in anatomy.templates["publish"]["thumbnail"] and not thumbnail_roots): msg = "`{}` environment is not set".format(self.thumbnail_key) action_job["status"] = "failed" session.commit() self.log.warning(msg) return {"success": False, "message": msg} existing_thumbnail_root = None for path in thumbnail_roots.split(os.pathsep): if os.path.exists(path): existing_thumbnail_root = path break if existing_thumbnail_root is None: msg = ("Can't access paths, set in `{}` ({})").format( self.thumbnail_key, thumbnail_roots) action_job["status"] = "failed" session.commit() self.log.warning(msg) return {"success": False, "message": msg} example_template_data = { "_id": "ID", "thumbnail_root": "THUBMNAIL_ROOT", "thumbnail_type": "THUMBNAIL_TYPE", "ext": ".EXT", "project": { "name": "PROJECT_NAME", "code": "PROJECT_CODE" }, "asset": "ASSET_NAME", "subset": "SUBSET_NAME", "version": "VERSION_NAME", "hierarchy": "HIERARCHY" } tmp_filled = anatomy.format_all(example_template_data) thumbnail_result = tmp_filled["publish"]["thumbnail"] if not thumbnail_result.solved: missing_keys = thumbnail_result.missing_keys invalid_types = thumbnail_result.invalid_types submsg = "" if missing_keys: submsg += "Missing keys: {}".format(", ".join( ["\"{}\"".format(key) for key in missing_keys])) if invalid_types: items = [] for key, value in invalid_types.items(): items.append("{}{}".format(str(key), str(value))) submsg += "Invalid types: {}".format(", ".join(items)) msg = ("Thumbnail Anatomy template expects more keys than action" " can offer. {}").format(submsg) action_job["status"] = "failed" session.commit() self.log.warning(msg) return {"success": False, "message": msg} thumbnail_template = anatomy.templates["publish"]["thumbnail"] self.db_con.install() for entity in entities: # Skip if entity is not AssetVersion (never should happend, but..) if entity.entity_type.lower() != "assetversion": continue # Skip if AssetVersion don't have thumbnail thumbnail_ent = entity["thumbnail"] if thumbnail_ent is None: self.log.debug(("Skipping. AssetVersion don't " "have set thumbnail. {}").format(entity["id"])) continue avalon_ents_result = self.get_avalon_entities_for_assetversion( entity, self.db_con) version_full_path = ("Asset: \"{project_name}/{asset_path}\"" " | Subset: \"{subset_name}\"" " | Version: \"{version_name}\"").format( **avalon_ents_result) version = avalon_ents_result["version"] if not version: self.log.warning( ("AssetVersion does not have version in avalon. {}" ).format(version_full_path)) continue thumbnail_id = version["data"].get("thumbnail_id") if thumbnail_id: self.log.info( ("AssetVersion skipped, already has thubmanil set. {}" ).format(version_full_path)) continue # Get thumbnail extension file_ext = thumbnail_ent["file_type"] if not file_ext.startswith("."): file_ext = ".{}".format(file_ext) avalon_project = avalon_ents_result["project"] avalon_asset = avalon_ents_result["asset"] hierarchy = "" parents = avalon_asset["data"].get("parents") or [] if parents: hierarchy = "/".join(parents) # Prepare anatomy template fill data # 1. Create new id for thumbnail entity thumbnail_id = ObjectId() template_data = { "_id": str(thumbnail_id), "thumbnail_root": existing_thumbnail_root, "thumbnail_type": "thumbnail", "ext": file_ext, "project": { "name": avalon_project["name"], "code": avalon_project["data"].get("code") }, "asset": avalon_ents_result["asset_name"], "subset": avalon_ents_result["subset_name"], "version": avalon_ents_result["version_name"], "hierarchy": hierarchy } anatomy_filled = anatomy.format(template_data) thumbnail_path = anatomy_filled["publish"]["thumbnail"] thumbnail_path = thumbnail_path.replace("..", ".") thumbnail_path = os.path.normpath(thumbnail_path) downloaded = False for loc in (thumbnail_ent.get("component_locations") or []): res_id = loc.get("resource_identifier") if not res_id: continue thubmnail_url = self.get_thumbnail_url(res_id) if self.download_file(thubmnail_url, thumbnail_path): downloaded = True break if not downloaded: self.log.warning("Could not download thumbnail for {}".format( version_full_path)) continue # Clean template data from keys that are dynamic template_data.pop("_id") template_data.pop("thumbnail_root") thumbnail_entity = { "_id": thumbnail_id, "type": "thumbnail", "schema": "pype:thumbnail-1.0", "data": { "template": thumbnail_template, "template_data": template_data } } # Create thumbnail entity self.db_con.insert_one(thumbnail_entity) self.log.debug("Creating entity in database {}".format( str(thumbnail_entity))) # Set thumbnail id for version self.db_con.update_one( {"_id": version["_id"]}, {"$set": { "data.thumbnail_id": thumbnail_id }}) self.db_con.update_one( {"_id": avalon_asset["_id"]}, {"$set": { "data.thumbnail_id": thumbnail_id }}) action_job["status"] = "done" session.commit() return True
def launch(self, session, event): if not event.get("data"): return entities_info = event["data"].get("entities") if not entities_info: return # load shell scripts presets tmp_by_project_name = {} for entity_info in entities_info: if entity_info.get('entity_type') != 'Appointment': continue task_entity, user_entity = self._get_task_and_user( session, entity_info.get('action'), entity_info.get('changes')) if not task_entity or not user_entity: self.log.error("Task or User was not found.") continue # format directories to pass to shell script project_name = task_entity["project"]["full_name"] project_data = tmp_by_project_name.get(project_name) or {} if "scripts_by_action" not in project_data: project_settings = get_project_settings(project_name) _settings = ( project_settings["ftrack"]["events"]["user_assignment"]) project_data["scripts_by_action"] = _settings.get("scripts") tmp_by_project_name[project_name] = project_data scripts_by_action = project_data["scripts_by_action"] if not scripts_by_action: continue if "anatomy" not in project_data: project_data["anatomy"] = Anatomy(project_name) tmp_by_project_name[project_name] = project_data anatomy = project_data["anatomy"] data = self._get_template_data(task_entity) anatomy_filled = anatomy.format(data) # formatting work dir is easiest part as we can use whole path work_dir = anatomy_filled["work"]["folder"] # we also need publish but not whole anatomy_filled.strict = False publish = anatomy_filled["publish"]["folder"] # now find path to {asset} m = re.search("(^.+?{})".format(data["asset"]), publish) if not m: msg = 'Cannot get part of publish path {}'.format(publish) self.log.error(msg) return {'success': False, 'message': msg} publish_dir = m.group(1) username = user_entity["username"] event_entity_action = entity_info["action"] for script in scripts_by_action.get(event_entity_action): self.log.info(("[{}] : running script for user {}").format( event_entity_action, username)) self._run_script(script, [username, work_dir, publish_dir]) return True
def get_data(self, context, versions_count): subset = context["subset"] asset = context["asset"] anatomy = Anatomy(context["project"]["name"]) self.dbcon = AvalonMongoDB() self.dbcon.Session["AVALON_PROJECT"] = context["project"]["name"] self.dbcon.install() versions = list( self.dbcon.find({ "type": "version", "parent": { "$in": [subset["_id"]] } })) versions_by_parent = collections.defaultdict(list) for ent in versions: versions_by_parent[ent["parent"]].append(ent) def sort_func(ent): return int(ent["name"]) all_last_versions = [] for _parent_id, _versions in versions_by_parent.items(): for idx, version in enumerate( sorted(_versions, key=sort_func, reverse=True)): if idx >= versions_count: break all_last_versions.append(version) self.log.debug("Collected versions ({})".format(len(versions))) # Filter latest versions for version in all_last_versions: versions.remove(version) # Update versions_by_parent without filtered versions versions_by_parent = collections.defaultdict(list) for ent in versions: versions_by_parent[ent["parent"]].append(ent) # Filter already deleted versions versions_to_pop = [] for version in versions: version_tags = version["data"].get("tags") if version_tags and "deleted" in version_tags: versions_to_pop.append(version) for version in versions_to_pop: msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format( asset["name"], subset["name"], version["name"]) self.log.debug( ("Skipping version. Already tagged as `deleted`. < {} >" ).format(msg)) versions.remove(version) version_ids = [ent["_id"] for ent in versions] self.log.debug("Filtered versions to delete ({})".format( len(version_ids))) if not version_ids: msg = "Skipping processing. Nothing to delete." self.log.info(msg) self.message(msg) return repres = list( self.dbcon.find({ "type": "representation", "parent": { "$in": version_ids } })) self.log.debug("Collected representations to remove ({})".format( len(repres))) dir_paths = {} file_paths_by_dir = collections.defaultdict(list) for repre in repres: file_path, seq_path = self.path_from_representation(repre, anatomy) if file_path is None: self.log.debug( ("Could not format path for represenation \"{}\"").format( str(repre))) continue dir_path = os.path.dirname(file_path) dir_id = None for _dir_id, _dir_path in dir_paths.items(): if _dir_path == dir_path: dir_id = _dir_id break if dir_id is None: dir_id = uuid.uuid4() dir_paths[dir_id] = dir_path file_paths_by_dir[dir_id].append([file_path, seq_path]) dir_ids_to_pop = [] for dir_id, dir_path in dir_paths.items(): if os.path.exists(dir_path): continue dir_ids_to_pop.append(dir_id) # Pop dirs from both dictionaries for dir_id in dir_ids_to_pop: dir_paths.pop(dir_id) paths = file_paths_by_dir.pop(dir_id) # TODO report of missing directories? paths_msg = ", ".join( ["'{}'".format(path[0].replace("\\", "/")) for path in paths]) self.log.debug( ("Folder does not exist. Deleting it's files skipped: {}" ).format(paths_msg)) data = { "dir_paths": dir_paths, "file_paths_by_dir": file_paths_by_dir, "versions": versions, "asset": asset, "subset": subset, "archive_subset": versions_count == 0 } return data