def process(self, context, plugin): # Get the errored instances failed = [] for result in context.data["results"]: if (result["error"] is not None and result["instance"] is not None and result["instance"] not in failed): failed.append(result["instance"]) # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) all_xC = list() all_yC = list() # maintain selection with anlib.maintained_selection(): # collect all failed nodes xpos and ypos for instance in instances: bdn = instance[0] xC = bdn.xpos() + bdn.screenWidth() / 2 yC = bdn.ypos() + bdn.screenHeight() / 2 all_xC.append(xC) all_yC.append(yC) self.log.info("all_xC: `{}`".format(all_xC)) self.log.info("all_yC: `{}`".format(all_yC)) # zoom to nodes in node graph nuke.zoom(2, [min(all_xC), min(all_yC)])
def process(self): from collections import OrderedDict from avalon.nuke import lib from reveries import lib as reveries_lib import nuke with lib.maintained_selection(): if not (self.options or {}).get("useSelection"): lib.reset_selection() existed_write = next( (n for n in nuke.selectedNodes() if n.Class() == "Write"), None) instance = existed_write or nuke.createNode("Write") data = OrderedDict([ (("divid", ""), lib.Knobby("Text_Knob", "")), ("deadlineFramesPerTask", 1), ("deadlineSuspendJob", False), ("deadlinePriority", 80), ("deadlinePool", reveries_lib.get_deadline_pools()), ]) self.data.update(data) lib.imprint(instance, self.data, tab="avalon") return instance
def bake_gizmos_recursively(in_group=nuke.Root()): """Converting a gizmo to group Argumets: is_group (nuke.Node)[optonal]: group node or all nodes """ # preserve selection after all is done with anlib.maintained_selection(): # jump to the group with in_group: for node in nuke.allNodes(): if is_node_gizmo(node) and not gizmo_is_nuke_default(node): with node: outputs = get_node_outputs(node) group = node.makeGroup() # Reconnect inputs and outputs if any if outputs: for n, pipes in outputs.items(): for i in pipes: n.setInput(i, group) for i in range(node.inputs()): group.setInput(i, node.input(i)) # set node position and name group.setXYpos(node.xpos(), node.ypos()) name = node.name() nuke.delete(node) group.setName(name) node = group if node.Class() == "Group": bake_gizmos_recursively(node)
def extract_nkscript(self, packager): node = self.member[0] package_path = packager.create_package() ext = "nknc" if nuke.env["nc"] else "nk" fname = packager.file_name(extension=ext) fpath = os.path.join(package_path, fname) with lib.maintained_selection(): lib.reset_selection() for n in walk_tree(node): n["selected"].setValue(True) if node.Class() == "Write": # Swap image file path to published path bedore copy output = node["file"].value() node["file"].setValue(self.data["publishedSeqPatternPath"]) nuke.nodeCopy(fpath) node["file"].setValue(output) else: nuke.nodeCopy(fpath) packager.add_data({ "outputNode": node.fullName(), "scriptName": fname, })
def process(self, instance): node = instance[0] ext = "nknc" if nuke.env["nc"] else "nk" staging_dir = utils.stage_dir() filename = "%s.%s" % (instance.data["subset"], ext) outpath = "%s/%s" % (staging_dir, filename) instance.data["repr.nkscript._stage"] = staging_dir instance.data["repr.nkscript._files"] = [filename] instance.data["repr.nkscript.scriptName"] = filename instance.data["repr.nkscript.outputNode"] = node.fullName() with lib.maintained_selection(): lib.reset_selection() for n in walk_tree(node): n["selected"].setValue(True) if node.Class() == "Write": # Swap image file path to published path bedore copy output = node["file"].value() node["file"].setValue(instance.data["publishedSeqPatternPath"]) nuke.nodeCopy(outpath) node["file"].setValue(output) else: nuke.nodeCopy(outpath)
def process(self, instance): with anlib.maintained_selection(): self.log.debug("instance: {}".format(instance)) self.log.debug("instance.data[families]: {}".format( instance.data["families"])) self.render_thumbnail(instance)
def process(self): if (self.options or {}).get("useSelection"): nodes = self.nodes self.log.info(len(nodes)) if len(nodes) == 1: anlib.select_nodes(nodes) node = nodes[-1] # check if Group node if node.Class() in "Group": node["name"].setValue("{}_GZM".format(self.name)) node["tile_color"].setValue(int(self.node_color, 16)) return anlib.set_avalon_knob_data(node, self.data) else: msg = ("Please select a group node " "you wish to publish as the gizmo") self.log.error(msg) nuke.message(msg) if len(nodes) >= 2: anlib.select_nodes(nodes) nuke.makeGroup() gizmo_node = nuke.selectedNode() gizmo_node["name"].setValue("{}_GZM".format(self.name)) gizmo_node["tile_color"].setValue(int(self.node_color, 16)) # add sticky node wit guide with gizmo_node: sticky = nuke.createNode("StickyNote") sticky["label"].setValue( "Add following:\n- set Input" " nodes\n- set one Output1\n" "- create User knobs on the group") # add avalon knobs return anlib.set_avalon_knob_data(gizmo_node, self.data) else: msg = ("Please select nodes you " "wish to add to the gizmo") self.log.error(msg) nuke.message(msg) return else: with anlib.maintained_selection(): gizmo_node = nuke.createNode("Group") gizmo_node["name"].setValue("{}_GZM".format(self.name)) gizmo_node["tile_color"].setValue(int(self.node_color, 16)) # add sticky node wit guide with gizmo_node: sticky = nuke.createNode("StickyNote") sticky["label"].setValue( "Add following:\n- add Input" " nodes\n- add one Output1\n" "- create User knobs on the group") # add avalon knobs return anlib.set_avalon_knob_data(gizmo_node, self.data)
def load(self, context, name, namespace, data): # get main variables version = context['version'] version_data = version.get("data", {}) vname = version.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) fps = version_data.get("fps") or nuke.root()["fps"].getValue() namespace = namespace or context['asset']['name'] object_name = "{}_{}".format(name, namespace) # prepare data for imprinting # add additional metadata from the version to imprint to Avalon knob add_keys = ["source", "author", "fps"] data_imprint = { "frameStart": first, "frameEnd": last, "version": vname, "objectName": object_name } for k in add_keys: data_imprint.update({k: version_data[k]}) # getting file path file = self.fname.replace("\\", "/") with anlib.maintained_selection(): camera_node = nuke.createNode( "Camera2", "name {} file {} read_from_file True".format( object_name, file), inpanel=False) camera_node.forceValidate() camera_node["frame_rate"].setValue(float(fps)) # workaround because nuke's bug is not adding # animation keys properly xpos = camera_node.xpos() ypos = camera_node.ypos() nuke.nodeCopy("%clipboard%") nuke.delete(camera_node) nuke.nodePaste("%clipboard%") camera_node = nuke.toNode(object_name) camera_node.setXYpos(xpos, ypos) # color node by correct color by actual version self.node_version_color(version, camera_node) return containerise(node=camera_node, name=name, namespace=namespace, context=context, loader=self.__class__.__name__, data=data_imprint)
def process(self, instance): if hasattr(self, "viewer_lut_raw"): self.viewer_lut_raw = self.viewer_lut_raw else: self.viewer_lut_raw = False with anlib.maintained_selection(): self.log.debug("instance: {}".format(instance)) self.log.debug("instance.data[families]: {}".format( instance.data["families"])) self.render_slate(instance)
def process(self, context, plugin): # Get the errored instances failed = [] for result in context.data["results"]: if (result["error"] is not None and result["instance"] is not None and result["instance"] not in failed): failed.append(result["instance"]) # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) # maintain selection with anlib.maintained_selection(): # collect all failed nodes xpos and ypos for instance in instances: grpn = instance[0] nuke.showDag(grpn)
def process(self, instance): families = instance.data["families"] self.log.info("Creating staging dir...") if "representations" not in instance.data: instance.data["representations"] = list() staging_dir = os.path.normpath( os.path.dirname(instance.data['path'])) instance.data["stagingDir"] = staging_dir self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) # generate data with anlib.maintained_selection(): exporter = pnlib.ExporterReviewMov( self, instance) if "render.farm" in families: instance.data["families"].remove("review") instance.data["families"].remove("ftrack") data = exporter.generate_mov(farm=True) self.log.debug( "_ data: {}".format(data)) instance.data.update({ "bakeRenderPath": data.get("bakeRenderPath"), "bakeScriptPath": data.get("bakeScriptPath"), "bakeWriteNodeName": data.get("bakeWriteNodeName") }) else: data = exporter.generate_mov() # assign to representations instance.data["representations"] += data["representations"] self.log.debug( "_ representations: {}".format(instance.data["representations"]))
def process(self, instance): families = instance.data["families"] self.log.info("Creating staging dir...") if "representations" in instance.data: staging_dir = instance.data[ "representations"][0]["stagingDir"].replace("\\", "/") instance.data["stagingDir"] = staging_dir instance.data["representations"][0]["tags"] = ["review"] else: instance.data["representations"] = [] # get output path render_path = instance.data['path'] staging_dir = os.path.normpath(os.path.dirname(render_path)) instance.data["stagingDir"] = staging_dir self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) # generate data with anlib.maintained_selection(): exporter = pnlib.ExporterReviewLut( self, instance ) data = exporter.generate_lut() # assign to representations instance.data["lutPath"] = os.path.join( exporter.stagingDir, exporter.file).replace("\\", "/") instance.data["representations"] += data["representations"] if "render.farm" in families: instance.data["families"].remove("review") instance.data["families"].remove("ftrack") self.log.debug( "_ lutPath: {}".format(instance.data["lutPath"])) self.log.debug( "_ representations: {}".format(instance.data["representations"]))
def load(self, context, name, namespace, data): """ Loading function to get Gizmo into node graph Arguments: context (dict): context of version name (str): name of the version namespace (str): asset name data (dict): compulsory attribute > not used Returns: nuke node: containerised nuke node object """ # get main variables version = context['version'] version_data = version.get("data", {}) vname = version.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) namespace = namespace or context['asset']['name'] colorspace = version_data.get("colorspace", None) object_name = "{}_{}".format(name, namespace) # prepare data for imprinting # add additional metadata from the version to imprint to Avalon knob add_keys = [ "frameStart", "frameEnd", "handleStart", "handleEnd", "source", "author", "fps" ] data_imprint = { "frameStart": first, "frameEnd": last, "version": vname, "colorspaceInput": colorspace, "objectName": object_name } for k in add_keys: data_imprint.update({k: version_data[k]}) # getting file path file = self.fname.replace("\\", "/") # adding nodes to node graph # just in case we are in group lets jump out of it nuke.endGroup() with anlib.maintained_selection(): # add group from nk nuke.nodePaste(file) GN = nuke.selectedNode() GN["name"].setValue(object_name) return containerise(node=GN, name=name, namespace=namespace, context=context, loader=self.__class__.__name__, data=data_imprint)
def process(self, instance): tmp_nodes = list() nodes = instance[1:] # Define extract output file path stagingdir = self.staging_dir(instance) filename = "{0}.nk".format(instance.name) path = os.path.join(stagingdir, filename) # maintain selection with anlib.maintained_selection(): # all connections outside of backdrop connections_in = instance.data["nodeConnectionsIn"] connections_out = instance.data["nodeConnectionsOut"] self.log.debug("_ connections_in: `{}`".format(connections_in)) self.log.debug("_ connections_out: `{}`".format(connections_out)) # create input nodes and name them as passing node (*_INP) for n, inputs in connections_in.items(): for i, input in inputs: inpn = nuke.createNode("Input") inpn["name"].setValue("{}_{}_INP".format(n.name(), i)) n.setInput(i, inpn) inpn.setXYpos(input.xpos(), input.ypos()) nodes.append(inpn) tmp_nodes.append(inpn) anlib.reset_selection() # connect output node for n, output in connections_out.items(): opn = nuke.createNode("Output") self.log.info(n.name()) self.log.info(output.name()) output.setInput( next((i for i, d in enumerate(output.dependencies()) if d.name() in n.name()), 0), opn) opn.setInput(0, n) opn.autoplace() nodes.append(opn) tmp_nodes.append(opn) anlib.reset_selection() # select nodes to copy anlib.reset_selection() anlib.select_nodes(nodes) # create tmp nk file # save file to the path nuke.nodeCopy(path) # Clean up for tn in tmp_nodes: nuke.delete(tn) # restore original connections # reconnect input node for n, inputs in connections_in.items(): for i, input in inputs: n.setInput(i, input) # reconnect output node for n, output in connections_out.items(): output.setInput( next((i for i, d in enumerate(output.dependencies()) if d.name() in n.name()), 0), n) if "representations" not in instance.data: instance.data["representations"] = [] # create representation representation = { 'name': 'nk', 'ext': 'nk', 'files': filename, "stagingDir": stagingdir } instance.data["representations"].append(representation) self.log.info("Extracted instance '{}' to: {}".format( instance.name, path)) self.log.info("Data {}".format(instance.data))
def update(self, container, representation): """Update the Loader's path Nuke automatically tries to reset some variables when changing the loader's path to a new file. These automatic changes are to its inputs: """ # get main variables # Get version from io version = io.find_one({ "type": "version", "_id": representation["parent"] }) # get corresponding node GN = nuke.toNode(container['objectName']) file = api.get_representation_path(representation).replace("\\", "/") name = container['name'] version_data = version.get("data", {}) vname = version.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) namespace = container['namespace'] colorspace = version_data.get("colorspace", None) object_name = "{}_{}".format(name, namespace) add_keys = [ "frameStart", "frameEnd", "handleStart", "handleEnd", "source", "author", "fps" ] data_imprint = { "representation": str(representation["_id"]), "frameStart": first, "frameEnd": last, "version": vname, "colorspaceInput": colorspace, "objectName": object_name } for k in add_keys: data_imprint.update({k: version_data[k]}) # adding nodes to node graph # just in case we are in group lets jump out of it nuke.endGroup() with anlib.maintained_selection(): xpos = GN.xpos() ypos = GN.ypos() avalon_data = anlib.get_avalon_knob_data(GN) nuke.delete(GN) # add group from nk nuke.nodePaste(file) GN = nuke.selectedNode() anlib.set_avalon_knob_data(GN, avalon_data) GN.setXYpos(xpos, ypos) GN["name"].setValue(object_name) # get all versions in list versions = io.find({ "type": "version", "parent": version["parent"] }).distinct('name') max_version = max(versions) # change color of node if version.get("name") not in [max_version]: GN["tile_color"].setValue(int("0xd88467ff", 16)) else: GN["tile_color"].setValue(int(self.node_color, 16)) self.log.info("udated to version: {}".format(version.get("name"))) return update_container(GN, data_imprint)
def process(self, instance): tmp_nodes = list() orig_grpn = instance[0] # Define extract output file path stagingdir = self.staging_dir(instance) filename = "{0}.nk".format(instance.name) path = os.path.join(stagingdir, filename) # maintain selection with anlib.maintained_selection(): orig_grpn_name = orig_grpn.name() tmp_grpn_name = orig_grpn_name + "_tmp" # select original group node anlib.select_nodes([orig_grpn]) # copy to clipboard nuke.nodeCopy("%clipboard%") # reset selection to none anlib.reset_selection() # paste clipboard nuke.nodePaste("%clipboard%") # assign pasted node copy_grpn = nuke.selectedNode() copy_grpn.setXYpos((orig_grpn.xpos() + 120), orig_grpn.ypos()) # convert gizmos to groups pnutils.bake_gizmos_recursively(copy_grpn) # remove avalonknobs knobs = copy_grpn.knobs() avalon_knobs = [ k for k in knobs.keys() for ak in ["avalon:", "ak:"] if ak in k ] avalon_knobs.append("publish") for ak in avalon_knobs: copy_grpn.removeKnob(knobs[ak]) # add to temporary nodes tmp_nodes.append(copy_grpn) # swap names orig_grpn.setName(tmp_grpn_name) copy_grpn.setName(orig_grpn_name) # create tmp nk file # save file to the path nuke.nodeCopy(path) # Clean up for tn in tmp_nodes: nuke.delete(tn) # rename back to original orig_grpn.setName(orig_grpn_name) if "representations" not in instance.data: instance.data["representations"] = [] # create representation representation = { 'name': 'gizmo', 'ext': 'nk', 'files': filename, "stagingDir": stagingdir } instance.data["representations"].append(representation) self.log.info("Extracted instance '{}' to: {}".format( instance.name, path)) self.log.info("Data {}".format(instance.data))
def update(self, container, representation): """ Called by Scene Inventory when look should be updated to current version. If any reference edits cannot be applied, eg. shader renamed and material not present, reference is unloaded and cleaned. All failed edits are highlighted to the user via message box. Args: container: object that has look to be updated representation: (dict): relationship data to get proper representation from DB and persisted data in .json Returns: None """ # Get version from io version = io.find_one({ "type": "version", "_id": representation["parent"] }) object_name = container['objectName'] # get corresponding node camera_node = nuke.toNode(object_name) # get main variables version_data = version.get("data", {}) vname = version.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) fps = version_data.get("fps") or nuke.root()["fps"].getValue() # prepare data for imprinting # add additional metadata from the version to imprint to Avalon knob add_keys = ["source", "author", "fps"] data_imprint = { "representation": str(representation["_id"]), "frameStart": first, "frameEnd": last, "version": vname, "objectName": object_name } for k in add_keys: data_imprint.update({k: version_data[k]}) # getting file path file = api.get_representation_path(representation).replace("\\", "/") with anlib.maintained_selection(): camera_node = nuke.toNode(object_name) camera_node['selected'].setValue(True) # collect input output dependencies dependencies = camera_node.dependencies() dependent = camera_node.dependent() camera_node["frame_rate"].setValue(float(fps)) camera_node["file"].setValue(file) # workaround because nuke's bug is # not adding animation keys properly xpos = camera_node.xpos() ypos = camera_node.ypos() nuke.nodeCopy("%clipboard%") nuke.delete(camera_node) nuke.nodePaste("%clipboard%") camera_node = nuke.toNode(object_name) camera_node.setXYpos(xpos, ypos) # link to original input nodes for i, input in enumerate(dependencies): camera_node.setInput(i, input) # link to original output nodes for d in dependent: index = next((i for i, dpcy in enumerate(d.dependencies()) if camera_node is dpcy), 0) d.setInput(index, camera_node) # color node by correct color by actual version self.node_version_color(version, camera_node) self.log.info("udated to version: {}".format(version.get("name"))) return update_container(camera_node, data_imprint)
def load(self, context, name, namespace, data): """ Loading function to import .nk file into script and wrap it on backdrop Arguments: context (dict): context of version name (str): name of the version namespace (str): asset name data (dict): compulsory attribute > not used Returns: nuke node: containerised nuke node object """ # get main variables version = context['version'] version_data = version.get("data", {}) vname = version.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) namespace = namespace or context['asset']['name'] colorspace = version_data.get("colorspace", None) object_name = "{}_{}".format(name, namespace) # prepare data for imprinting # add additional metadata from the version to imprint to Avalon knob add_keys = [ "frameStart", "frameEnd", "handleStart", "handleEnd", "source", "author", "fps" ] data_imprint = { "frameStart": first, "frameEnd": last, "version": vname, "colorspaceInput": colorspace, "objectName": object_name } for k in add_keys: data_imprint.update({k: version_data[k]}) # getting file path file = self.fname.replace("\\", "/") # adding nodes to node graph # just in case we are in group lets jump out of it nuke.endGroup() # Get mouse position n = nuke.createNode("NoOp") xcursor, ycursor = (n.xpos(), n.ypos()) anlib.reset_selection() nuke.delete(n) bdn_frame = 50 with anlib.maintained_selection(): # add group from nk nuke.nodePaste(file) # get all pasted nodes new_nodes = list() nodes = nuke.selectedNodes() # get pointer position in DAG xpointer, ypointer = pnlib.find_free_space_to_paste_nodes( nodes, direction="right", offset=200 + bdn_frame) # reset position to all nodes and replace inputs and output for n in nodes: anlib.reset_selection() xpos = (n.xpos() - xcursor) + xpointer ypos = (n.ypos() - ycursor) + ypointer n.setXYpos(xpos, ypos) # replace Input nodes for dots if n.Class() in "Input": dot = nuke.createNode("Dot") new_name = n.name().replace("INP", "DOT") dot.setName(new_name) dot["label"].setValue(new_name) dot.setXYpos(xpos, ypos) new_nodes.append(dot) # rewire dep = n.dependent() for d in dep: index = next( (i for i, dpcy in enumerate(d.dependencies()) if n is dpcy), 0) d.setInput(index, dot) # remove Input node anlib.reset_selection() nuke.delete(n) continue # replace Input nodes for dots elif n.Class() in "Output": dot = nuke.createNode("Dot") new_name = n.name() + "_DOT" dot.setName(new_name) dot["label"].setValue(new_name) dot.setXYpos(xpos, ypos) new_nodes.append(dot) # rewire dep = next((d for d in n.dependencies()), None) if dep: dot.setInput(0, dep) # remove Input node anlib.reset_selection() nuke.delete(n) continue else: new_nodes.append(n) # reselect nodes with new Dot instead of Inputs and Output anlib.reset_selection() anlib.select_nodes(new_nodes) # place on backdrop bdn = nukescripts.autoBackdrop() # add frame offset xpos = bdn.xpos() - bdn_frame ypos = bdn.ypos() - bdn_frame bdwidth = bdn["bdwidth"].value() + (bdn_frame * 2) bdheight = bdn["bdheight"].value() + (bdn_frame * 2) bdn["xpos"].setValue(xpos) bdn["ypos"].setValue(ypos) bdn["bdwidth"].setValue(bdwidth) bdn["bdheight"].setValue(bdheight) bdn["name"].setValue(object_name) bdn["label"].setValue( "Version tracked frame: \n`{}`\n\nPLEASE DO NOT REMOVE OR MOVE \nANYTHING FROM THIS FRAME!" .format(object_name)) bdn["note_font_size"].setValue(20) return containerise(node=bdn, name=name, namespace=namespace, context=context, loader=self.__class__.__name__, data=data_imprint)
def process(self, instance): handle_start = instance.context.data["handleStart"] handle_end = instance.context.data["handleEnd"] first_frame = int(nuke.root()["first_frame"].getValue()) last_frame = int(nuke.root()["last_frame"].getValue()) step = 1 output_range = str(nuke.FrameRange(first_frame, last_frame, step)) self.log.info("instance.data: `{}`".format( pformat(instance.data))) rm_nodes = list() self.log.info("Crating additional nodes") subset = instance.data["subset"] staging_dir = self.staging_dir(instance) # get extension form preset extension = next((k[1] for k in self.write_geo_knobs if k[0] == "file_type"), None) if not extension: raise RuntimeError( "Bad config for extension in presets. " "Talk to your supervisor or pipeline admin") # create file name and path filename = subset + ".{}".format(extension) file_path = os.path.join(staging_dir, filename).replace("\\", "/") with anlib.maintained_selection(): # bake camera with axeses onto word coordinate XYZ rm_n = bakeCameraWithAxeses( nuke.toNode(instance.data["name"]), output_range) rm_nodes.append(rm_n) # create scene node rm_n = nuke.createNode("Scene") rm_nodes.append(rm_n) # create write geo node wg_n = nuke.createNode("WriteGeo") wg_n["file"].setValue(file_path) # add path to write to for k, v in self.write_geo_knobs: wg_n[k].setValue(v) rm_nodes.append(wg_n) # write out camera nuke.execute( wg_n, int(first_frame), int(last_frame) ) # erase additional nodes for n in rm_nodes: nuke.delete(n) self.log.info(file_path) # create representation data if "representations" not in instance.data: instance.data["representations"] = [] representation = { 'name': extension, 'ext': extension, 'files': filename, "stagingDir": staging_dir, "frameStart": first_frame, "frameEnd": last_frame } instance.data["representations"].append(representation) instance.data.update({ "path": file_path, "outputDir": staging_dir, "ext": extension, "handleStart": handle_start, "handleEnd": handle_end, "frameStart": first_frame + handle_start, "frameEnd": last_frame - handle_end, "frameStartHandle": first_frame, "frameEndHandle": last_frame, }) self.log.info("Extracted instance '{0}' to: {1}".format( instance.name, file_path))