def process(self): from collections import OrderedDict from avalon.nuke import lib from reveries import lib as reveries_lib import nuke with lib.maintained_selection(): if not (self.options or {}).get("useSelection"): lib.reset_selection() existed_write = next( (n for n in nuke.selectedNodes() if n.Class() == "Write"), None) instance = existed_write or nuke.createNode("Write") data = OrderedDict([ (("divid", ""), lib.Knobby("Text_Knob", "")), ("deadlineFramesPerTask", 1), ("deadlineSuspendJob", False), ("deadlinePriority", 80), ("deadlinePool", reveries_lib.get_deadline_pools()), ]) self.data.update(data) lib.imprint(instance, self.data, tab="avalon") return instance
def process(self, context, plugin): try: import nuke except ImportError: raise ImportError("Current host is not Nuke") errored_instances = get_errored_instances_from_context(context) # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(errored_instances, plugin) # Get the invalid nodes for the plug-ins self.log.info("Finding invalid nodes..") invalid = list() for instance in instances: invalid_nodes = plugin.get_invalid(instance) if invalid_nodes: if isinstance(invalid_nodes, (list, tuple)): invalid.append(invalid_nodes[0]) else: self.log.warning("Plug-in returned to be invalid, " "but has no selectable nodes.") # Ensure unique (process each node only once) invalid = list(set(invalid)) if invalid: self.log.info("Selecting invalid nodes: {}".format(invalid)) reset_selection() select_nodes(invalid) else: self.log.info("No invalid nodes found.")
def extract_nkscript(self, packager): node = self.member[0] package_path = packager.create_package() ext = "nknc" if nuke.env["nc"] else "nk" fname = packager.file_name(extension=ext) fpath = os.path.join(package_path, fname) with lib.maintained_selection(): lib.reset_selection() for n in walk_tree(node): n["selected"].setValue(True) if node.Class() == "Write": # Swap image file path to published path bedore copy output = node["file"].value() node["file"].setValue(self.data["publishedSeqPatternPath"]) nuke.nodeCopy(fpath) node["file"].setValue(output) else: nuke.nodeCopy(fpath) packager.add_data({ "outputNode": node.fullName(), "scriptName": fname, })
def process(self, instance): node = instance[0] ext = "nknc" if nuke.env["nc"] else "nk" staging_dir = utils.stage_dir() filename = "%s.%s" % (instance.data["subset"], ext) outpath = "%s/%s" % (staging_dir, filename) instance.data["repr.nkscript._stage"] = staging_dir instance.data["repr.nkscript._files"] = [filename] instance.data["repr.nkscript.scriptName"] = filename instance.data["repr.nkscript.outputNode"] = node.fullName() with lib.maintained_selection(): lib.reset_selection() for n in walk_tree(node): n["selected"].setValue(True) if node.Class() == "Write": # Swap image file path to published path bedore copy output = node["file"].value() node["file"].setValue(instance.data["publishedSeqPatternPath"]) nuke.nodeCopy(outpath) node["file"].setValue(output) else: nuke.nodeCopy(outpath)
def process(self, instance): tmp_nodes = list() orig_grpn = instance[0] # Define extract output file path stagingdir = self.staging_dir(instance) filename = "{0}.nk".format(instance.name) path = os.path.join(stagingdir, filename) # maintain selection with anlib.maintained_selection(): orig_grpn_name = orig_grpn.name() tmp_grpn_name = orig_grpn_name + "_tmp" # select original group node anlib.select_nodes([orig_grpn]) # copy to clipboard nuke.nodeCopy("%clipboard%") # reset selection to none anlib.reset_selection() # paste clipboard nuke.nodePaste("%clipboard%") # assign pasted node copy_grpn = nuke.selectedNode() copy_grpn.setXYpos((orig_grpn.xpos() + 120), orig_grpn.ypos()) # convert gizmos to groups pnutils.bake_gizmos_recursively(copy_grpn) # remove avalonknobs knobs = copy_grpn.knobs() avalon_knobs = [ k for k in knobs.keys() for ak in ["avalon:", "ak:"] if ak in k ] avalon_knobs.append("publish") for ak in avalon_knobs: copy_grpn.removeKnob(knobs[ak]) # add to temporary nodes tmp_nodes.append(copy_grpn) # swap names orig_grpn.setName(tmp_grpn_name) copy_grpn.setName(orig_grpn_name) # create tmp nk file # save file to the path nuke.nodeCopy(path) # Clean up for tn in tmp_nodes: nuke.delete(tn) # rename back to original orig_grpn.setName(orig_grpn_name) if "representations" not in instance.data: instance.data["representations"] = [] # create representation representation = { 'name': 'gizmo', 'ext': 'nk', 'files': filename, "stagingDir": stagingdir } instance.data["representations"].append(representation) self.log.info("Extracted instance '{}' to: {}".format( instance.name, path)) self.log.info("Data {}".format(instance.data))
def process(self, instance): tmp_nodes = list() nodes = instance[1:] # Define extract output file path stagingdir = self.staging_dir(instance) filename = "{0}.nk".format(instance.name) path = os.path.join(stagingdir, filename) # maintain selection with anlib.maintained_selection(): # all connections outside of backdrop connections_in = instance.data["nodeConnectionsIn"] connections_out = instance.data["nodeConnectionsOut"] self.log.debug("_ connections_in: `{}`".format(connections_in)) self.log.debug("_ connections_out: `{}`".format(connections_out)) # create input nodes and name them as passing node (*_INP) for n, inputs in connections_in.items(): for i, input in inputs: inpn = nuke.createNode("Input") inpn["name"].setValue("{}_{}_INP".format(n.name(), i)) n.setInput(i, inpn) inpn.setXYpos(input.xpos(), input.ypos()) nodes.append(inpn) tmp_nodes.append(inpn) anlib.reset_selection() # connect output node for n, output in connections_out.items(): opn = nuke.createNode("Output") self.log.info(n.name()) self.log.info(output.name()) output.setInput( next((i for i, d in enumerate(output.dependencies()) if d.name() in n.name()), 0), opn) opn.setInput(0, n) opn.autoplace() nodes.append(opn) tmp_nodes.append(opn) anlib.reset_selection() # select nodes to copy anlib.reset_selection() anlib.select_nodes(nodes) # create tmp nk file # save file to the path nuke.nodeCopy(path) # Clean up for tn in tmp_nodes: nuke.delete(tn) # restore original connections # reconnect input node for n, inputs in connections_in.items(): for i, input in inputs: n.setInput(i, input) # reconnect output node for n, output in connections_out.items(): output.setInput( next((i for i, d in enumerate(output.dependencies()) if d.name() in n.name()), 0), n) if "representations" not in instance.data: instance.data["representations"] = [] # create representation representation = { 'name': 'nk', 'ext': 'nk', 'files': filename, "stagingDir": stagingdir } instance.data["representations"].append(representation) self.log.info("Extracted instance '{}' to: {}".format( instance.name, path)) self.log.info("Data {}".format(instance.data))
def load(self, context, name, namespace, data): """ Loading function to import .nk file into script and wrap it on backdrop Arguments: context (dict): context of version name (str): name of the version namespace (str): asset name data (dict): compulsory attribute > not used Returns: nuke node: containerised nuke node object """ # get main variables version = context['version'] version_data = version.get("data", {}) vname = version.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) namespace = namespace or context['asset']['name'] colorspace = version_data.get("colorspace", None) object_name = "{}_{}".format(name, namespace) # prepare data for imprinting # add additional metadata from the version to imprint to Avalon knob add_keys = [ "frameStart", "frameEnd", "handleStart", "handleEnd", "source", "author", "fps" ] data_imprint = { "frameStart": first, "frameEnd": last, "version": vname, "colorspaceInput": colorspace, "objectName": object_name } for k in add_keys: data_imprint.update({k: version_data[k]}) # getting file path file = self.fname.replace("\\", "/") # adding nodes to node graph # just in case we are in group lets jump out of it nuke.endGroup() # Get mouse position n = nuke.createNode("NoOp") xcursor, ycursor = (n.xpos(), n.ypos()) anlib.reset_selection() nuke.delete(n) bdn_frame = 50 with anlib.maintained_selection(): # add group from nk nuke.nodePaste(file) # get all pasted nodes new_nodes = list() nodes = nuke.selectedNodes() # get pointer position in DAG xpointer, ypointer = pnlib.find_free_space_to_paste_nodes( nodes, direction="right", offset=200 + bdn_frame) # reset position to all nodes and replace inputs and output for n in nodes: anlib.reset_selection() xpos = (n.xpos() - xcursor) + xpointer ypos = (n.ypos() - ycursor) + ypointer n.setXYpos(xpos, ypos) # replace Input nodes for dots if n.Class() in "Input": dot = nuke.createNode("Dot") new_name = n.name().replace("INP", "DOT") dot.setName(new_name) dot["label"].setValue(new_name) dot.setXYpos(xpos, ypos) new_nodes.append(dot) # rewire dep = n.dependent() for d in dep: index = next( (i for i, dpcy in enumerate(d.dependencies()) if n is dpcy), 0) d.setInput(index, dot) # remove Input node anlib.reset_selection() nuke.delete(n) continue # replace Input nodes for dots elif n.Class() in "Output": dot = nuke.createNode("Dot") new_name = n.name() + "_DOT" dot.setName(new_name) dot["label"].setValue(new_name) dot.setXYpos(xpos, ypos) new_nodes.append(dot) # rewire dep = next((d for d in n.dependencies()), None) if dep: dot.setInput(0, dep) # remove Input node anlib.reset_selection() nuke.delete(n) continue else: new_nodes.append(n) # reselect nodes with new Dot instead of Inputs and Output anlib.reset_selection() anlib.select_nodes(new_nodes) # place on backdrop bdn = nukescripts.autoBackdrop() # add frame offset xpos = bdn.xpos() - bdn_frame ypos = bdn.ypos() - bdn_frame bdwidth = bdn["bdwidth"].value() + (bdn_frame * 2) bdheight = bdn["bdheight"].value() + (bdn_frame * 2) bdn["xpos"].setValue(xpos) bdn["ypos"].setValue(ypos) bdn["bdwidth"].setValue(bdwidth) bdn["bdheight"].setValue(bdheight) bdn["name"].setValue(object_name) bdn["label"].setValue( "Version tracked frame: \n`{}`\n\nPLEASE DO NOT REMOVE OR MOVE \nANYTHING FROM THIS FRAME!" .format(object_name)) bdn["note_font_size"].setValue(20) return containerise(node=bdn, name=name, namespace=namespace, context=context, loader=self.__class__.__name__, data=data_imprint)
def build_sequences(cls, sequences, root_path, group_name, stamp_name, start, end): cls.resolve_path(sequences, root_path) # Filter out multi-channle sequence multiaovs = OrderedDict() singleaovs = OrderedDict() for aov_name in sorted(sequences, key=lambda k: k.lower()): data = sequences[aov_name] if cls.is_singleaov(data["_resolved"], start): singleaovs[aov_name] = data else: multiaovs[aov_name] = data multiaov_reads = list() singleaov_reads = OrderedDict() lib.reset_selection() for aov_name, data in multiaovs.items(): read = nuke.Node("Read") read["selected"].setValue(False) read.autoplace() path = data["_resolved"] cls.set_path(read, aov_name=aov_name, path=path) # cls.set_format(read, data["resolution"]) cls.set_range(read, start=start, end=end) # Mark aov name lib.set_avalon_knob_data(read, {("aov", "AOV"): aov_name}) multiaov_reads.append(read) nodes = multiaov_reads[:] if singleaovs: if "beauty" in singleaovs: # Default channel (RGBA) for exr_merge beauty_name = "beauty" else: # Ask artist if want to assign a beauty if not found beauty_name = cls.pick_beauty(singleaovs, group_name) with command.viewer_update_and_undo_stop(): group = nuke.createNode("Group") group.autoplace() with nuke_lib.group_scope(group): for aov_name, data in singleaovs.items(): read = nuke.Node("Read") read["selected"].setValue(False) read.autoplace() path = data["_resolved"] cls.set_path(read, aov_name=aov_name, path=path) # cls.set_format(read, data["resolution"]) cls.set_range(read, start=start, end=end) # Mark aov name knob = ("aov", "AOV") lib.set_avalon_knob_data(read, {knob: aov_name}) singleaov_reads[aov_name] = read if beauty_name: beauty = singleaov_reads.pop(beauty_name) else: beauty = singleaov_reads.popitem()[1] nuke_lib.exr_merge(beauty, singleaov_reads.values()) output = nuke.createNode("Output") output.autoplace() stamp = nuke.createNode("PostageStamp") stamp.setName(stamp_name) group.setName(group_name) nodes += [stamp, group] + group.nodes() return nodes