def repair(cls, instance): renderer = instance.data['renderer'] layer_node = instance.data['setMembers'] with lib.renderlayer(layer_node): default = lib.RENDER_ATTRS['default'] render_attrs = lib.RENDER_ATTRS.get(renderer, default) # Repair prefix if renderer != "renderman": node = render_attrs["node"] prefix_attr = render_attrs["prefix"] fname_prefix = cls.ImagePrefixTokens[renderer] cmds.setAttr("{}.{}".format(node, prefix_attr), fname_prefix, type="string") # Repair padding padding_attr = render_attrs["padding"] cmds.setAttr("{}.{}".format(node, padding_attr), cls.DEFAULT_PADDING) else: # renderman handles stuff differently cmds.setAttr("rmanGlobals.imageFileFormat", cls.ImagePrefixTokens[renderer], type="string") cmds.setAttr("rmanGlobals.imageOutputDir", cls.RendermanDirPrefix, type="string")
def repair(cls, instance): renderlayer = instance.data.get("setMembers") with lib.renderlayer(renderlayer): cls.log.info("Enabling Distributed Rendering " "ignore in batch mode..") cmds.setAttr(cls.ignored_attr, True)
def process(self, instance): """Collect the Look in the instance with the correct layer settings""" with lib.renderlayer(instance.data["renderlayer"]): self.collect(instance) # make ftrack publishable instance.data["families"] = ['ftrack'] instance.data['maketx'] = True
def process(self, instance): """Collect the Look in the instance with the correct layer settings""" with lib.renderlayer(instance.data["renderlayer"]): self.collect(instance) # make ftrack publishable self.maketx = instance.data.get('maketx', True) instance.data['maketx'] = self.maketx self.log.info('maketx: {}'.format(self.maketx))
def get_invalid(cls, instance): """Get all invalid nodes""" cls.log.info("Validating look content for " "'{}'".format(instance.name)) relationships = instance.data["lookData"]["relationships"] invalid = [] renderlayer = instance.data.get("renderlayer", "defaultRenderLayer") with lib.renderlayer(renderlayer): for node in instance: # get the connected objectSets of the node sets = lib.get_related_sets(node) if not sets: continue # check if any objectSets are not present ion the relationships missing_sets = [s for s in sets if s not in relationships] if missing_sets: for set in missing_sets: if '_SET' not in set: # A set of this node is not coming along, this is wrong! cls.log.error("Missing sets '{}' for node " "'{}'".format(missing_sets, node)) invalid.append(node) continue # Ensure the node is in the sets that are collected for shaderset, data in relationships.items(): if shaderset not in sets: # no need to check for a set if the node # isn't in it anyway continue member_nodes = [ member['name'] for member in data['members'] ] if node not in member_nodes: # The node is not found in the collected set # relationships cls.log.error("Missing '{}' in collected set node " "'{}'".format(node, shaderset)) invalid.append(node) continue return invalid
def repair(cls, instance): renderer = instance.data['renderer'] layer_node = instance.data['setMembers'] with lib.renderlayer(layer_node): default = lib.RENDER_ATTRS['default'] render_attrs = lib.RENDER_ATTRS.get(renderer, default) # Repair prefix node = render_attrs["node"] prefix_attr = render_attrs["prefix"] fname_prefix = cls.get_prefix(renderer) cmds.setAttr("{}.{}".format(node, prefix_attr), fname_prefix, type="string") # Repair padding padding_attr = render_attrs["padding"] cmds.setAttr("{}.{}".format(node, padding_attr), cls.DEFAULT_PADDING)
def process(self, instance): # Define extract output file path dir_path = self.staging_dir(instance) maya_fname = "{0}.ma".format(instance.name) json_fname = "{0}.json".format(instance.name) # Make texture dump folder maya_path = os.path.join(dir_path, maya_fname) json_path = os.path.join(dir_path, json_fname) self.log.info("Performing extraction..") # Remove all members of the sets so they are not included in the # exported file by accident self.log.info("Extract sets (Maya ASCII) ...") lookdata = instance.data["lookData"] relationships = lookdata["relationships"] sets = relationships.keys() # Extract the textures to transfer, possibly convert with maketx and # remap the node paths to the destination path. Note that a source # might be included more than once amongst the resources as they could # be the input file to multiple nodes. resources = instance.data["resources"] do_maketx = instance.data.get("maketx", False) # Collect all unique files used in the resources files = set() for resource in resources: files.update(os.path.normpath(f) for f in resource["files"]) # Process the resource files transfers = list() hardlinks = list() hashes = dict() for filepath in files: source, mode, hash = self._process_texture(filepath, do_maketx, staging=dir_path) destination = self.resource_destination(instance, source, do_maketx) if mode == COPY: transfers.append((source, destination)) elif mode == HARDLINK: hardlinks.append((source, destination)) # Store the hashes from hash to destination to include in the # database hashes[hash] = destination # Remap the resources to the destination path (change node attributes) destinations = dict() remap = OrderedDict() # needs to be ordered, see color space values for resource in resources: source = os.path.normpath(resource["source"]) if source not in destinations: # Cache destination as source resource might be included # multiple times destinations[source] = self.resource_destination( instance, source, do_maketx) # Remap file node filename to destination attr = resource['attribute'] remap[attr] = destinations[source] # Preserve color space values (force value after filepath change) # This will also trigger in the same order at end of context to # ensure after context it's still the original value. color_space_attr = resource['node'] + ".colorSpace" remap[color_space_attr] = cmds.getAttr(color_space_attr) self.log.info("Finished remapping destinations ...") # Extract in correct render layer layer = instance.data.get("renderlayer", "defaultRenderLayer") with lib.renderlayer(layer): # TODO: Ensure membership edits don't become renderlayer overrides with lib.empty_sets(sets, force=True): # To avoid Maya trying to automatically remap the file # textures relative to the `workspace -directory` we force # it to a fake temporary workspace. This fixes textures # getting incorrectly remapped. (LKD-17, PLN-101) with no_workspace_dir(): with lib.attribute_values(remap): with avalon.maya.maintained_selection(): cmds.select(sets, noExpand=True) cmds.file(maya_path, force=True, typ="mayaAscii", exportSelected=True, preserveReferences=False, channels=True, constraints=True, expressions=True, constructionHistory=True) # Write the JSON data self.log.info("Extract json..") data = { "attributes": lookdata["attributes"], "relationships": relationships } with open(json_path, "w") as f: json.dump(data, f) if "files" not in instance.data: instance.data["files"] = list() if "hardlinks" not in instance.data: instance.data["hardlinks"] = list() if "transfers" not in instance.data: instance.data["transfers"] = list() instance.data["files"].append(maya_fname) instance.data["files"].append(json_fname) # Set up the resources transfers/links for the integrator instance.data["transfers"].extend(transfers) instance.data["hardlinks"].extend(hardlinks) # Source hash for the textures instance.data["sourceHashes"] = hashes self.log.info("Extracted instance '%s' to: %s" % (instance.name, maya_path))
def process(self, instance): """Collect the Look in the instance with the correct layer settings""" with lib.renderlayer(instance.data["renderlayer"]): self.collect(instance)