def test_update(): """Updating works""" transform, generator = cmds.polyCube(name="body_PLY") group = cmds.group(transform, name="ROOT") cmds.select(group, replace=True) maya.create(name="modelDefault", asset=ASSET_NAME, family="ava.model", options={"useSelection": True}) # Comply with save validator cmds.file(save=True) publish() publish() publish() # Version 3 cmds.file(new=True, force=True) asset = io.find_one({"type": "asset", "name": ASSET_NAME}) subset = io.find_one({ "parent": asset["_id"], "type": "subset", "name": "modelDefault" }) version = io.find_one({ "parent": subset["_id"], "type": "version", "name": 2 }) assert version representation = io.find_one({ "parent": version["_id"], "type": "representation", "name": "ma" }) maya.load(representation["_id"]) container = next(maya.ls()) maya.update(container, 3)
def get_avalon_parent(self, entity): parent_id = entity['data']['visualParent'] parents = [] if parent_id is not None: parent = io.find_one({'_id': parent_id}) parents.extend(self.get_avalon_parent(parent)) parents.append(parent['name']) return parents
def _copy_representations(self, representation_id): """Copy all documents and files of representation and dependencies""" # Representation representation = self._find_one({"_id": representation_id}) if not representation: representation = io.find_one({"_id": representation_id}) self._insert_one(representation) # Version version = io.find_one({"_id": representation["parent"]}) if not self._find_one({"_id": version["_id"]}): self._insert_one(version) # Subset subset = io.find_one({"_id": version["parent"]}) if not self._find_one({"_id": subset["_id"]}): self._insert_one(subset) # Asset asset = io.find_one({"_id": subset["parent"]}) if not self._find_one({"_id": asset["_id"]}): asset["parent"] = self._project["_id"] self._insert_one(asset) # Asset Visual Parent parent_id = asset["data"]["visualParent"] if parent_id: parent_id = io.ObjectId(parent_id) if not self._find_one({"_id": parent_id}): parent_asset = io.find_one({"_id": parent_id}) parent_asset["parent"] = self._project["_id"] self._insert_one(parent_asset) # Dependencies for dependency_id in version["data"]["dependencies"]: dependency_id = io.ObjectId(dependency_id) for representation_ in io.find({"parent": dependency_id}): self._copy_representations(representation_["_id"]) # Copy package parents = io.parenthood(representation) src_package = get_representation_path_(representation, parents) parents = parents[:-1] + [self._project] representation["data"]["reprRoot"] = self._project["data"].get("root") dst_package = get_representation_path_(representation, parents) self._copy_dir(src_package, dst_package)
def update(self, container, representation): """Update the Loader's path Fusion automatically tries to reset some variables when changing the loader's path to a new file. These automatic changes are to its inputs: """ from avalon.nuke import ( viewer_update_and_undo_stop, ls_img_sequence, update_container ) log.info("this i can see") node = container["_tool"] # TODO: prepare also for other readers img/geo/camera assert node.Class() == "Reader", "Must be Reader" root = api.get_representation_path(representation) file = ls_img_sequence(os.path.dirname(root), one=True) # Get start frame from version data version = io.find_one({"type": "version", "_id": representation["parent"]}) start = version["data"].get("startFrame") if start is None: log.warning("Missing start frame for updated version" "assuming starts at frame 0 for: " "{} ({})".format(node['name'].value(), representation)) start = 0 with viewer_update_and_undo_stop(): # Update the loader's path whilst preserving some values with preserve_trim(node): with preserve_inputs(node, knobs=["file", "first", "last", "originfirst", "originlast", "frame_mode", "frame"]): node["file"] = file["path"] # Set the global in to the start frame of the sequence global_in_changed = loader_shift(node, start, relative=False) if global_in_changed: # Log this change to the user log.debug("Changed '{}' global in:" " {:d}".format(node['name'].value(), start)) # Update the imprinted representation update_container( node, {"representation": str(representation["_id"])} )
def create_asset(data): """Create asset Requires: {"name": "uniquecode", "silo": "assets"} Optional: {"data": {}} """ data = data.copy() project = io.find_one({"type": "project"}) if project is None: raise RuntimeError("Project must exist prior to creating assets") # Link to parent by id if provided, otherwise parent to the project visual_parent = data.pop("visualParent", None) asset = { "schema": "avalon-core:asset-2.0", "parent": project['_id'], "name": data.pop("name"), "silo": data.pop("silo"), "visualParent": visual_parent, "type": "asset", "data": data } # Asset *must* have a name and silo assert asset['name'], "Asset has no name" assert asset['silo'], "Asset has no silo" # Ensure it has a unique name asset_doc = io.find_one({ "name": asset['name'], "type": "asset", }) if asset_doc is not None: raise RuntimeError("Asset named {} already " "exists.".format(asset['name'])) schema.validate(asset) io.insert_one(asset)
def get_latest_version(asset_name, subset_name, family): """Retrieve latest files concerning extendFrame feature.""" # Get asset asset_name = io.find_one({ "type": "asset", "name": asset_name }, projection={"name": True}) subset = io.find_one( { "type": "subset", "name": subset_name, "parent": asset_name["_id"] }, projection={ "_id": True, "name": True }, ) # Check if subsets actually exists (pre-run check) assert subset, "No subsets found, please publish with `extendFrames` off" # Get version version_projection = { "name": True, "data.startFrame": True, "data.endFrame": True, "parent": True, } version = io.find_one( { "type": "version", "parent": subset["_id"], "data.families": family }, projection=version_projection, sort=[("name", -1)], ) assert version, "No version found, this is a bug" return version
def check_parent_hierarchical(self, entityId, attr): if entityId is None: return None entity = io.find_one({"_id": entityId}) if attr in entity['data']: self.log.info(attr) return entity['data'][attr] else: return self.check_parent_hierarchical(entity['parent'], attr)
def on_container_picked(self, container): if container is None: return asset = io.find_one({"_id": io.ObjectId(container["assetId"])}) if asset is None: main_logger.error("Asset not found.") return self.widget["silo"].setCurrentText(asset["silo"]) self.widget["asset"].setCurrentText(asset["name"]) subset = io.find_one({"_id": io.ObjectId(container["subsetId"])}) if subset is None: main_logger.error("Subset not found.") return self.widget["subset"].setCurrentText(subset["name"])
def get_project_code(): """ Obtain project code from database Returns: string: project code """ return io.find_one({"type": "project"})["data"].get("code", '')
def __init__(self, root_node=None, nodes=None, **kwargs): self._project = kwargs.get("project") or io.find_one( {"type": "project"}) self._asset = kwargs.get("asset_name") or api.Session["AVALON_ASSET"] self._asset_entity = pype.get_asset(self._asset) self._root_node = root_node or nuke.root() self._nodes = self.get_nodes(nodes=nodes) self.data = kwargs
def update_package_version(container, version): """ Update package by version number Args: container (dict): container data of the container node version (int): the new version number of the package Returns: None """ # Versioning (from `core.maya.pipeline`) current_representation = io.find_one( {"_id": io.ObjectId(container["representation"])}) assert current_representation is not None, "This is a bug" version_, subset, asset, project = io.parenthood(current_representation) if version == -1: new_version = io.find_one({ "type": "version", "parent": subset["_id"] }, sort=[("name", -1)]) else: new_version = io.find_one({ "type": "version", "parent": subset["_id"], "name": version, }) assert new_version is not None, "This is a bug" # Get the new representation (new file) new_representation = io.find_one({ "type": "representation", "parent": new_version["_id"], "name": current_representation["name"] }) update_package(container, new_representation)
def create_items_from_nodes(nodes): """Create an item for the view based the container and content of it It fetches the look document based on the asset ID found in the content. The item will contain all important information for the tool to work. If there is an asset ID which is not registered in the project's collection it will log a warning message. Args: nodes (list): list of maya nodes Returns: list of dicts """ asset_view_items = [] id_hashes = create_asset_id_hash(nodes) if not id_hashes: return asset_view_items for _id, id_nodes in id_hashes.items(): try: database_id = io.ObjectId(_id) except io.InvalidId: log.warning("Invalid ObjectId '%s' on nodes: %s" % (_id, id_nodes)) continue asset = io.find_one({"_id": database_id}, projection={"name": True}) # Skip if asset id is not found if not asset: log.warning("Id not found in the database, skipping '%s'." % _id) log.warning("Nodes: %s" % id_nodes) continue # Collect available look subsets for this asset looks = cblib.list_looks(asset["_id"]) # Collect namespaces the asset is found in namespaces = set() for node in id_nodes: namespace = get_namespace_from_node(node) namespaces.add(namespace) asset_view_items.append({ "label": asset["name"], "asset": asset, "looks": looks, "namespaces": namespaces }) return asset_view_items
def process(self, session, **kwargs): """Implement the behavior for when the action is triggered Args: session (dict): environment dictionary Returns: Popen instance of newly spawned process """ with pype.modified_environ(**session): # Get executable by name app = lib.get_application(self.name) executable = lib.which(app["executable"]) # Run as server arguments = [] tools_env = acre.get_tools([self.name]) env = acre.compute(tools_env) env = acre.merge(env, current_env=dict(os.environ)) if not env.get('AVALON_WORKDIR', None): project_name = env.get("AVALON_PROJECT") anatomy = Anatomy(project_name) os.environ['AVALON_PROJECT'] = project_name io.Session['AVALON_PROJECT'] = project_name task_name = os.environ.get("AVALON_TASK", io.Session["AVALON_TASK"]) asset_name = os.environ.get("AVALON_ASSET", io.Session["AVALON_ASSET"]) application = lib.get_application( os.environ["AVALON_APP_NAME"]) project_doc = io.find_one({"type": "project"}) data = { "task": task_name, "asset": asset_name, "project": { "name": project_doc["name"], "code": project_doc["data"].get("code", '') }, "hierarchy": pype.get_hierarchy(), "app": application["application_dir"] } anatomy_filled = anatomy.format(data) workdir = anatomy_filled["work"]["folder"] os.environ["AVALON_WORKDIR"] = workdir env.update(dict(os.environ)) lib.launch(executable=executable, args=arguments, environment=env) return
def get_task_icons(self): # Get the project configured icons from database project = io.find_one({"type": "project"}) tasks = project["config"].get("tasks", []) for task in tasks: icon_name = task.get("icon", None) if icon_name: icon = qtawesome.icon("fa.{}".format(icon_name), color=style.colors.default) self._icons[task["name"]] = icon
def fetch_looks(asset_ids): """Get all looks based on the asset id from the cbId attributes Use the given asset ID from the attribute which matches an ID from the database to use Args: asset_ids (list): list of unique asset IDs Returns: looks (list): looks per asset {asset_name : [look_data, look_data]} """ publish_looks = list() for asset_id in asset_ids: # Get asset name for sorting object_id = io.ObjectId(asset_id) # Verify if asset ID is correct asset = io.find_one({"_id": object_id}, projection={"name": True}) if not asset: raise ValueError("Could not find asset with objectId " "`{}`".format(asset_id)) # Get all data for subset in cblib.list_looks(object_id): version = io.find_one({ "type": "version", "parent": subset["_id"] }, projection={ "name": True, "parent": True }, sort=[("name", -1)]) publish_looks.append({ "asset": asset["name"], "subset": subset["name"], "version": version }) return publish_looks
def find_parent(document): """Find and cache""" parent_id = document["parent"] if parent_id not in _DOC_CACHE: parent = io.find_one({"_id": parent_id}) _DOC_CACHE[parent_id] = parent else: parent = _DOC_CACHE[parent_id] return parent
def find_last_version(self, subset_name, asset_doc): subset_doc = io.find_one({ "type": "subset", "name": subset_name, "parent": asset_doc["_id"] }) if subset_doc is None: self.log.debug("Subset entity does not exist yet.") else: version_doc = io.find_one( { "type": "version", "parent": subset_doc["_id"] }, sort=[("name", -1)]) if version_doc: return int(version_doc["name"]) return None
def __init__(self, project): self.project = project self.this_project = io.find_one({"type": "project"}) self.that_project = None self._mongo_client = None self._database = None self._collection = None self._connected = False
def asset_by_id(id): if id not in _cached_asset: asset = io.find_one({"_id": io.ObjectId(id)}, projection={"name": True}) if not asset: return None _cached_asset[id] = asset["name"] return _cached_asset[id]
def process(self, context): project = io.find_one({"type": "project"}, projection={"data.code": True}) if not project: raise RuntimeError("Can't find current project in database.") code = project["data"].get("code", None) self.log.info("Collected project code: %s" % code) context.data["code"] = code
def update_dependency(container): """Update subset data and references This is for updating dependencies and relink them to assets in current project for the loaded subset that was originally moved from other project. You need to manually update the representation id value in container before using this function. """ representation_id = cmds.getAttr(container + ".representation") representation_id = io.ObjectId(representation_id) representation = io.find_one({"_id": representation_id}) if representation is None: raise Exception("Representation not found.") version, subset, asset, project = io.parenthood(representation) cmds.setAttr(container + ".assetId", str(asset["_id"]), type="string") cmds.setAttr(container + ".subsetId", str(subset["_id"]), type="string") cmds.setAttr(container + ".versionId", str(version["_id"]), type="string") # Update Reference path reference_node = next( iter(cmds.ls(cmds.sets(container, query=True), type="reference")), None) if reference_node is None: # No reference to update return package_path = get_representation_path_(representation, (version, subset, asset, project)) file_type = representation["name"] if file_type == "FBXCache": file_type = "FBX" elif file_type in ("GPUCache", "LookDev"): file_type = "MayaAscii" file_name = representation["data"]["entryFileName"] entry_path = os.path.join(package_path, file_name) if not os.path.isfile(entry_path): raise IOError("File Not Found: {!r}".format(entry_path)) entry_path = env_embedded_path(entry_path) cmds.file(entry_path, loadReference=reference_node, type=file_type, defaultExtensions=False)
def set_context(project, asset, task, app): ''' Sets context for pyblish (must be done before pyblish is launched) :param project: Name of `Project` where instance should be published :type project: str :param asset: Name of `Asset` where instance should be published :type asset: str ''' os.environ["AVALON_PROJECT"] = project io.Session["AVALON_PROJECT"] = project os.environ["AVALON_ASSET"] = asset io.Session["AVALON_ASSET"] = asset if not task: task = '' os.environ["AVALON_TASK"] = task io.Session["AVALON_TASK"] = task io.install() av_project = io.find_one({'type': 'project'}) av_asset = io.find_one({ "type": 'asset', "name": asset }) parents = av_asset['data']['parents'] hierarchy = '' if parents and len(parents) > 0: hierarchy = os.path.sep.join(parents) os.environ["AVALON_HIERARCHY"] = hierarchy io.Session["AVALON_HIERARCHY"] = hierarchy os.environ["AVALON_PROJECTCODE"] = av_project['data'].get('code', '') io.Session["AVALON_PROJECTCODE"] = av_project['data'].get('code', '') io.Session["current_dir"] = os.path.normpath(os.getcwd()) os.environ["AVALON_APP"] = app io.Session["AVALON_APP"] = app io.uninstall()
def from_workfile(self, additional_jobs=None): """Generate jobs from workfile (DCC App agnostic) Args: additional_jobs (list, optional): A list of callbacks """ # Add workfile session = api.Session host = api.registered_host() workfile = host.current_file() if workfile is None: # Must be saved since we are parsing workfile here raise Exception("Could not obtain workfile path.") # Compute remote work dir project = io.find_one({"type": "project"}) template = project["config"]["template"]["work"] remote_path = template.format( **{ "root": self.remote_root, "project": session["AVALON_PROJECT"], "silo": session["AVALON_SILO"], "asset": session["AVALON_ASSET"], "task": session["AVALON_TASK"], "app": session["AVALON_APP"], "user": self.remote_user, }) workfile_name = os.path.basename(workfile) local_user = session.get("AVALON_USER", getpass.getuser()) # Prevent workfile overwrite when the remote username is not the # same as local username. This happens when multiple local users # using same remote account to access remote machine. same_user = local_user == self.remote_user if not same_user and "{user}" in template: # Prefix local username into file name workfile_name = local_user + "_" + workfile_name remote_path += "/scenes/" # AVALON_SCENEDIR remote_path += workfile_name workfile = os.path.normpath(workfile) remote_path = os.path.normpath(remote_path) # Add workfile job self.add_job(files=[(workfile, remote_path)], type="Workfile", description="%s - %s" % (session["AVALON_ASSET"], os.path.basename(workfile))) # Additional jobs for job in additional_jobs or []: job()
def process(self, context): asset_data = io.find_one({ "type": "asset", "name": api.Session["AVALON_ASSET"] }) self.log.info("asset_data: {}".format(asset_data)) context.data['handles'] = int(asset_data["data"].get("handles", 0)) context.data["handleStart"] = int(asset_data["data"].get( "handleStart", 0)) context.data["handleEnd"] = int(asset_data["data"].get("handleEnd", 0))
def set_start_end_frames(): from avalon import io asset_name = io.Session["AVALON_ASSET"] asset_doc = io.find_one({ "type": "asset", "name": asset_name }) bpy.context.scene.frame_start = asset_doc["data"]["frameStart"] bpy.context.scene.frame_end = asset_doc["data"]["frameEnd"]
def get_asset(asset_name=None): """ Returning asset document from database """ if not asset_name: asset_name = avalon.api.Session["AVALON_ASSET"] asset_document = io.find_one({"name": asset_name, "type": "asset"}) if not asset_document: raise TypeError("Entity \"{}\" was not found in DB".format(asset_name)) return asset_document
def get_relationship(look): representation_id = io.ObjectId(look["representation"]) representation = io.find_one({"_id": representation_id}) parents = io.parenthood(representation) package_path = get_representation_path_(representation, parents) file_name = representation["data"]["linkFname"] relationship = os.path.join(package_path, file_name) return relationship
def update(self, container, representation): """Update the Loader's path Nuke automatically tries to reset some variables when changing the loader's path to a new file. These automatic changes are to its inputs: """ from avalon.nuke import (update_container) node = nuke.toNode(container['objectName']) root = api.get_representation_path(representation).replace("\\", "/") # Get start frame from version data version = io.find_one({ "type": "version", "_id": representation["parent"] }) # get all versions in list versions = io.find({ "type": "version", "parent": version["parent"] }).distinct('name') max_version = max(versions) updated_dict = {} updated_dict.update({ "representation": str(representation["_id"]), "frameEnd": version["data"].get("frameEnd"), "version": version.get("name"), "colorspace": version["data"].get("colorspace"), "source": version["data"].get("source"), "handles": version["data"].get("handles"), "fps": version["data"].get("fps"), "author": version["data"].get("author"), "outputDir": version["data"].get("outputDir"), }) # Update the imprinted representation update_container(node, updated_dict) node["file"].setValue(root) # change color of node if version.get("name") not in [max_version]: node["tile_color"].setValue(int("0xd84f20ff", 16)) else: node["tile_color"].setValue(int("0xff0ff0ff", 16)) self.log.info("udated to version: {}".format(version.get("name")))
def create_project(name): if io.find_one({"type": "project", "name": name}): raise RuntimeError("%s already exists" % name) return io.insert_one({ "schema": "avalon-core:project-2.0", "type": "project", "name": name, "data": dict(), "config": DEFAULTS["config"], "parent": None, }).inserted_id
def _save_config_1_0(project_name, data): document = io.find_one({"type": "project"}) config = document["config"] config["apps"] = data.get("apps", []) config["tasks"] = data.get("tasks", []) config["template"].update(data.get("template", {})) schema.validate(document) io.save(document)