def update(self, container, representation): import os read_nodes = dict() parents = avalon.io.parenthood(representation) version, subset, asset, project = parents self.package_path = get_representation_path_(representation, parents) sequences = representation["data"]["sequence"] start = version["data"]["startFrame"] end = version["data"]["endFrame"] if os.path.isdir(self.package_path): sequence_root = self.package_path else: sequence_root = self._fallback_stage(representation, version) self.resolve_path(sequences, sequence_root) for node in container["_members"]: if node.Class() == "Read": data = lib.get_avalon_knob_data(node) read_nodes[data["aov"]] = node with lib.sync_copies(list(read_nodes.values())): for aov_name, data in sequences.items(): read = read_nodes.get(aov_name) if not read: # (TODO) Create Read node for new or removed AOV. continue self.set_path(read, aov_name=aov_name, path=data["_resolved"]) # self.set_format(read, data["resolution"]) self.set_range(read, start=start, end=end) node = container["_node"] with lib.sync_copies([node], force=True): asset_name = asset["data"].get("shortName", asset["name"]) families = subset["data"]["families"] # avalon-core:subset-3.0 family_name = families[0].split(".")[-1] update = { "name": subset["name"], "representation": str(representation["_id"]), "namespace": "%s_%s" % (asset_name, family_name) } pipeline.update_container(node, update)
def update(self, container, representation): read_nodes = dict() parents = avalon.io.parenthood(representation) self.package_path = get_representation_path_(representation, parents) for node in container["_members"]: if node.Class() == "Read": data = lib.get_avalon_knob_data(node) read_nodes[data["aov"]] = node with lib.sync_copies(list(read_nodes.values())): for name, data in representation["data"]["sequence"].items(): read = read_nodes.get(name) if not read: continue self.set_path(read, aov_name=name, file_name=data["fname"]) self.set_format(read, data["resolution"]) self.set_range(read, start=data["seqStart"], end=data["seqEnd"]) node = container["_node"] with lib.sync_copies([node], force=True): version, subset, asset, project = parents asset_name = asset["data"].get("shortName", asset["name"]) families = subset["data"]["families"] # avalon-core:subset-3.0 family_name = families[0].split(".")[-1] update = { "name": subset["name"], "representation": str(representation["_id"]), "namespace": "%s_%s" % (asset_name, family_name) } pipeline.update_container(node, update)
def process(self, context): from avalon.nuke import lib nodes = list() for node in lib.lsattr("avalon:id", value="pyblish.avalon.instance"): if node["disable"].value(): continue try: if not node["avalon:active"].value(): continue except NameError: # node has no active switch pass nodes.append(node) for node in sorted(nodes, key=lambda n: n.fullName()): node_name = node.fullName() data = lib.get_avalon_knob_data(node) data["objectName"] = node_name # For dependency tracking data["dependencies"] = dict() data["futureDependencies"] = dict() # Create the instance self.log.info("Creating instance for {}".format(node_name)) instance = context.create_instance(data["subset"]) instance[:] = [node] instance.data.update(data) instance.data["label"] = instance.name return context
def update(self, container, representation): """Update the Loader's path Nuke automatically tries to reset some variables when changing the loader's path to a new file. These automatic changes are to its inputs: """ # get main variables # Get version from io version = io.find_one({ "type": "version", "_id": representation["parent"] }) # get corresponding node GN = nuke.toNode(container['objectName']) file = api.get_representation_path(representation).replace("\\", "/") name = container['name'] version_data = version.get("data", {}) vname = version.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) namespace = container['namespace'] colorspace = version_data.get("colorspace", None) object_name = "{}_{}".format(name, namespace) add_keys = [ "frameStart", "frameEnd", "handleStart", "handleEnd", "source", "author", "fps" ] data_imprint = { "representation": str(representation["_id"]), "frameStart": first, "frameEnd": last, "version": vname, "colorspaceInput": colorspace, "objectName": object_name } for k in add_keys: data_imprint.update({k: version_data[k]}) # adding nodes to node graph # just in case we are in group lets jump out of it nuke.endGroup() with anlib.maintained_selection(): xpos = GN.xpos() ypos = GN.ypos() avalon_data = anlib.get_avalon_knob_data(GN) nuke.delete(GN) # add group from nk nuke.nodePaste(file) GN = nuke.selectedNode() anlib.set_avalon_knob_data(GN, avalon_data) GN.setXYpos(xpos, ypos) GN["name"].setValue(object_name) # get all versions in list versions = io.find({ "type": "version", "parent": version["parent"] }).distinct('name') max_version = max(versions) # change color of node if version.get("name") not in [max_version]: GN["tile_color"].setValue(int("0xd88467ff", 16)) else: GN["tile_color"].setValue(int(self.node_color, 16)) self.log.info("udated to version: {}".format(version.get("name"))) return update_container(GN, data_imprint)
def process(self, context): asset_data = io.find_one({ "type": "asset", "name": api.Session["AVALON_ASSET"] }) self.log.debug("asset_data: {}".format(asset_data["data"])) instances = [] root = nuke.root() self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes())) for node in nuke.allNodes(): if node.Class() in ["Viewer", "Dot"]: continue try: if node["disable"].value(): continue except Exception as E: self.log.warning(E) # get data from avalon knob avalon_knob_data = anlib.get_avalon_knob_data( node, ["avalon:", "ak:"]) self.log.debug("avalon_knob_data: {}".format(avalon_knob_data)) if not avalon_knob_data: continue if avalon_knob_data["id"] != "pyblish.avalon.instance": continue # establish families family = avalon_knob_data["family"] families_ak = avalon_knob_data.get("families", []) families = list() if families_ak: families.append(families_ak.lower()) families.append(family) # except disabled nodes but exclude backdrops in test if ("nukenodes" not in family) and (node["disable"].value()): continue subset = avalon_knob_data.get("subset", None) or node["name"].value() # Create instance instance = context.create_instance(subset) instance.append(node) # get review knob value review = False if "review" in node.knobs(): review = node["review"].value() families.append("review") families.append("ftrack") # Add all nodes in group instances. if node.Class() == "Group": # check if it is write node in family if "write" in families: target = node["render"].value() if target == "Use existing frames": # Local rendering self.log.info("flagged for no render") families.append("render") elif target == "Local": # Local rendering self.log.info("flagged for local render") families.append("{}.local".format("render")) elif target == "On farm": # Farm rendering self.log.info("flagged for farm render") instance.data["transfer"] = False families.append("{}.farm".format("render")) if "render" in families: families.remove("render") family = "write" node.begin() for i in nuke.allNodes(): instance.append(i) node.end() self.log.debug("__ families: `{}`".format(families)) # Get format format = root['format'].value() resolution_width = format.width() resolution_height = format.height() pixel_aspect = format.pixelAspect() # get publish knob value if "publish" not in node.knobs(): anlib.add_publish_knob(node) # sync workfile version if not next((f for f in families if "prerender" in f), None) and self.sync_workfile_version: # get version to instance for integration instance.data['version'] = instance.context.data['version'] instance.data.update({ "subset": subset, "asset": avalon_knob_data["asset"], "label": node.name(), "name": node.name(), "subset": subset, "family": family, "families": families, "avalonKnob": avalon_knob_data, "step": 1, "publish": node.knob('publish').value(), "fps": nuke.root()['fps'].value(), "resolutionWidth": resolution_width, "resolutionHeight": resolution_height, "pixelAspect": pixel_aspect, "review": review }) self.log.info("collected instance: {}".format(instance.data)) instances.append(instance) # create instances in context data if not are created yet if not context.data.get("instances"): context.data["instances"] = list() context.data["instances"].extend(instances) self.log.debug("context: {}".format(context))
def process(self, context): root = nuke.root() current_file = os.path.normpath(nuke.root().name()) knob_data = anlib.get_avalon_knob_data(root) anlib.add_publish_knob(root) family = "workfile" task = os.getenv("AVALON_TASK", None) # creating instances per write node staging_dir = os.path.dirname(current_file) base_name = os.path.basename(current_file) subset = family + task.capitalize() # Get frame range first_frame = int(root["first_frame"].getValue()) last_frame = int(root["last_frame"].getValue()) handle_start = int(knob_data.get("handleStart", 0)) handle_end = int(knob_data.get("handleEnd", 0)) # Get format format = root['format'].value() resolution_width = format.width() resolution_height = format.height() pixel_aspect = format.pixelAspect() # Create instance instance = context.create_instance(subset) instance.add(root) script_data = { "asset": os.getenv("AVALON_ASSET", None), "frameStart": first_frame + handle_start, "frameEnd": last_frame - handle_end, "resolutionWidth": resolution_width, "resolutionHeight": resolution_height, "pixelAspect": pixel_aspect, # backward compatibility "handles": handle_start, "handleStart": handle_start, "handleEnd": handle_end, "step": 1, "fps": root['fps'].value(), "currentFile": current_file, "version": int(pype.get_version_from_path(current_file)), "host": pyblish.api.current_host(), "hostVersion": nuke.NUKE_VERSION_STRING } context.data.update(script_data) # creating instance data instance.data.update({ "subset": subset, "label": base_name, "name": base_name, "publish": root.knob('publish').value(), "family": family, "families": [family], "representations": list() }) # adding basic script data instance.data.update(script_data) # creating representation representation = { 'name': 'nk', 'ext': 'nk', 'files': base_name, "stagingDir": staging_dir, } instance.data["representations"].append(representation) self.log.info('Publishing script version') # create instances in context data if not are created yet if not context.data.get("instances"): context.data["instances"] = list() context.data["instances"].append(instance)