def results(self, value): type = value[0] name = self.name # determine if asset was created or not. created = True if name is None or type is None: created = False if created: project = Project() body = project.create_asset(name, asset_type=type) if body == None: # print a message about failure/duplicate qd.error("Asset with name " + name + " already exists in pipeline.") elif self.type == AssetType.SHOT: qd.info("Asset created successfully.", "Success") else: if self.type == AssetType.SET: # create whole_set.json setPath = os.path.join(Project().get_assets_dir(), str(name), "model", "main", "cache", "") exporter = JSONExporter() exporter.createWholeSetJSON(setPath) assembler = Assembler() assembler.create_hda(name, body=body) qd.info("Asset created successfully.", "Success") else: qd.error("Asset creation failed.")
def export(self, element, selection=None, startFrame=None, endFrame=None): project = Project() bodyName = element.get_parent() body = project.get_body(bodyName) abcFilePath = element.get_cache_dir() self.element = element if startFrame is None: startFrame = pm.playbackOptions(q=True, animationStartTime=True) if endFrame is None: endFrame = pm.playbackOptions(q=True, animationEndTime=True) if body.is_shot(): startFrame -= 1 endFrame = int(endFrame) endFrame += 1 endFrame = str(endFrame) files = self.exportReferences(abcFilePath, tag='DCC_Alembic_Export_Flag', startFrame=startFrame, endFrame=endFrame) if self.cameras: files.extend(self.export_cameras(body, startFrame, endFrame)) elif body.is_asset(): if body.get_type() == AssetType.SET: files = self.exportReferences(abcFilePath) else: files = self.exportAll(abcFilePath, tag='DCC_Alembic_Export_Flag', element=element) elif body.is_crowd_cycle(): files = self.exportAll(abcFilePath, tag='DCC_Alembic_Export_Flag', startFrame=startFrame, endFrame=endFrame, element=element) if not files: #Maybe this is a bad distinction but None is if it was canceled or something and empty is if it went but there weren't any alembics if files is None: return qd.error( 'No alembics were exported. Make sure the top-level group is tagged.' ) return for abcFile in files: os.system('chmod 774 ' + abcFile) exported_asset_names = "" for file in files: asset_file_name = str(file).rpartition('/')[2] exported_asset_names += asset_file_name + '\n' qd.info("Alembics exported successfully: " + '\n' + exported_asset_names) return files
def department_results(self, value): chosen_department = value[0] prepare_scene_file(quick_publish=self.quick_publish, body=self.body, department=chosen_department) print("value: ", value) print("dept: ", chosen_department) if chosen_department == Department.RIG: self.export = False # get the element for the model dept and the user, and using that publish selected_element = self.body.get_element(chosen_department) user = Environment().get_user() # get the comment comment = qd.input("Comment for publish") if comment is None: comment = "No comment." post_publish(selected_element, user, self.export, published=True, comment=comment) setPublishEnvVar(self.body.get_name(), chosen_department) qd.info("Asset published successfully.", "Success")
def results(self, value): type = value[0] name = self.name # determine if asset was created or not. created = True if name is None or type is None: created = False if created: project = Project() body = project.create_asset(name, asset_type=type) if body == None: # print a message about failure/duplicate qd.error("Asset with name " + name + " already exists in pipeline.") else: assembler = Assembler() assembler.create_hda(name, body=body) qd.info("Asset created successfully.", "Success") else: qd.error("Asset creation failed.")
def results(self, values): selection = str(values[0]) shot = Project().get_body(selection) comp_element = shot.get_element(Department.COMP) self.publishes = comp_element.list_publishes() os.environ["DCC_NUKE_ASSET_NAME"] = selection if not self.publishes: # has not been imported. Import it first. shot_importer = importer.NukeImporter() shot_importer.shot_results([selection]) return else: # get the latest publish username = Environment().get_current_username() try: filepath = comp_element.checkout(username) except: filepath = comp_element.get_last_publish()[3] if os.path.exists(filepath): qd.info("Opening file, please wait.") nuke.scriptOpen(filepath) else: qd.error("Couldn't find the file.")
def tag(self): response = qd.binary_option("Add Alembic tag to:\n" + str(self.selected_string), "Yes", "No", title='Add Alembic Tag') if response: for node in self.selected: tag_node_with_flag(node, "DCC_Alembic_Export_Flag") qd.info("tag successful!")
def untag(self): response = qd.binary_option("Remove Alembic tag from:\n" + str(self.selected_string), "Yes", "No", title='Remove Alembic Tag') if response: for node in self.selected: untag_node_with_flag(node, "DCC_Alembic_Export_Flag") qd.info("untag successful!")
def create_body(self): name = qd.input("What's the name of this asset?") # determine if asset was created or not. created = True if name is None: created = False if created: qd.info("Asset created successfully (but not really, yet).", "Success") else: qd.error("Asset creation failed.")
def asset_results(self, value): chosen_asset = value[0] project = Project() self.body = project.get_body(chosen_asset) start_frame = mc.playbackOptions(q=True, min=True) end_frame = mc.playbackOptions(q=True, max=True) playblast_element = self.body.get_element(Department().RENDER) playblast_dir = playblast_element.get_render_dir() playblast_filename = chosen_asset + "_playblast.mov" path = os.path.join(playblast_dir, playblast_filename) self.simpleBlast(start_frame, end_frame, path) qd.info("Playblast created at " + str(path))
def asset_results(self, value): chosen_asset = value[0] start_frame = mc.playbackOptions(q=True, min=True) end_frame = mc.playbackOptions(q=True, max=True) submission_location = Project().get_submission_location() playblast_filename = chosen_asset + "_playblast.mov" path = os.path.join(submission_location, playblast_filename) try: self.simple_blast(start_frame, end_frame, path) except Exception as e: qd.error("playblast failed: " + str(e)) return qd.info("Playblast created at " + str(path))
def export(self, alembic=True, fbx=True, json=True, usd=True, methods=None): if methods is None: methods = self.list asset_name = os.environ.get("DCC_ASSET_NAME") if not asset_name: qd.error("You must first create or clone an asset.") return self.body = Project().get_body(asset_name) if alembic: AlembicExporter().auto_export(asset_name, self.cameras) if self.body and self.body.is_asset(): if json: if self.body.get_type() == AssetType.SET or self.body.get_type( ) == AssetType.SHOT: json_export = JSONExporter() json_export.go(self.body, self.body.get_type()) else: methods.remove("json") if fbx: if self.body.get_type( ) == AssetType.PROP or self.body.get_type() == AssetType.ACTOR: FbxExporter().auto_export(asset_name) else: methods.remove("fbx") if usd: print("USD isn't supported... yet :|") methods.remove("usd") if methods: qd.info("Successfully exported " + str(asset_name) + " as " + str(methods)) else: qd.info("Nothing was exported.")
def results(self, value): type = value[0] name = self.name # determine if asset was created or not. created = True if name is None or type is None: created = False if created: scene_file, new_file = get_scene_file() print("scene file, new file: ", scene_file, new_file) check_unsaved_changes() project = Project() body = project.create_asset(name, asset_type=type) if body == None: # print a message about failure/duplicate qd.error("Asset with name " + str(name) + " already exists in pipeline.") else: prepare_scene_file(body=body) department = "model" asset_list = body.list_elements(department) selected_element = body.get_element(department) user = Environment().get_user() print("name :", str(name)) print("department: ", department) setPublishEnvVar(name, department) post_publish(selected_element, user, self.export, published=True, comment="First publish!") qd.info("Asset created successfully!", "Success") else: qd.error("Asset creation failed.")
def camera_results(self, values): cameras = [] for item in values.items(): if item[1]: cameras.append(item[0]) submission_location = Project().get_submission_location() try: files = self.quick_render(submission_location, cameras) except Exception as e: qd.error("Snapshot failed: " + str(e)) return file_string = "" for dest in files: file_string += str(dest) + "\n" qd.info("Snapshot(s) created at:\n" + file_string)
def results(self, values): shot_name = str(values[0]) shot = Project().get_body(shot_name) comp_element = shot.get_element(Department.COMP) user_workspace = Environment().get_user_workspace() temp_filepath = os.path.join(user_workspace, shot_name + ".nk") # filepath = nuke.toNode("root").name() #grab name of file they're working on nuke.scriptSave(temp_filepath) print("filepath: ", temp_filepath) user = Environment().get_current_username() comment = qd.input("Comment for publish") if comment is None: comment = "Publish by " + str(user) + " in comp." comp_element.publish(user, temp_filepath, comment) os.environ["DCC_NUKE_ASSET_NAME"] = shot_name qd.info(shot_name + " published successfully.")
def results(self, value): type = value[0] name = self.name # determine if asset was created or not. created = True if name is None or type is None: created = False if created: project = Project() body = project.create_asset(name, asset_type=type) if body == None: # print a message about failure/duplicate qd.error("Asset with name " + str(name) + " already exists in pipeline.") else: prepare_scene_file() # show the gui, get the element. To list elements, get the body and get the department department = "model" # hard-coding model for now since this is Maya asset_list = body.list_elements(department) # get the element for the model dept and the user, and using that publish selected_element = body.get_element("model") user = Environment().get_user() post_publish( selected_element, user, published=True, comment="First commit." ) # FIXME: WE NEED TO FIGURE OUT TO WHICH DEPARTMENT(S) WE ACTUALLY NEED TO PUBLISH TO qd.info("Asset created successfully!", "Success") else: qd.error("Asset creation failed.")
def create_from_current(self): script_name = qd.input("Enter a name for this template: ") if not script_name or script_name == u'': return templates_dir = Environment().get_templates_dir() temp_filepath = os.path.join(templates_dir, script_name + ".nk") basename = os.path.basename(temp_filepath) templates_in_dir = os.listdir(templates_dir) print("templates: ", templates_in_dir) if basename in templates_in_dir: overwrite = qd.yes_or_no( str(script_name) + " already exists. Overwrite it?") if overwrite: nuke.scriptSave(temp_filepath) qd.info("Template created successfully!") else: nuke.scriptSave(temp_filepath) qd.info("Template created successfully!") print("filepath: ", temp_filepath)
def set_results(self, value): set_name = value[0] project = Project() self.body = project.get_body(set_name) obj = hou.node("/obj") set = obj.node(set_name) if set is None: qd.error( "No set found with that name. Please check naming and try again." ) return print("set: ", set) inside = set.node("inside") children = inside.children() set_file = os.path.join(Project().get_assets_dir(), set_name, "model", "main", "cache", "whole_set.json") set_data = [] try: with open(set_file) as f: set_data = json.load(f) except Exception as error: qd.error("No valid JSON file for " + str(set_name)) return items_in_set = [] for item in set_data: item_name = item['asset_name'] item_version = item['version_number'] items_in_set.append(item_name) # TODO: for each child, make sure that it exists in whole_set.json, or add it if it doesn't, or remove it if it does not child_names = [] for child in children: child_path = child.path() name = child_path.split('/')[-1].lower() child_names.append(name) for item in set_data: if str(item['asset_name']) not in child_names: set_data.remove(item) for child in children: print("child: ", child) inside = child.node("inside") out = inside.node("OUT") set_transform = inside.node("set_dressing_transform") child_path = child.path() name = child_path.split('/')[-1].lower() child_body = project.get_body(name) if child_body is None: qd.warning( str(name) + " not found in pipe. Please check that node is named correctly." ) continue # get transform parms: t is translate, r rotate and s scale (with associated x,y,z vals) tx, ty, tz = self.get_transform(set_transform, "tx", "ty", "tz") rx, ry, rz = self.get_transform(set_transform, "rx", "ry", "rz") sx, sy, sz = self.get_transform(set_transform, "sx", "sy", "sz") latest_file, latest_version = self.body.get_latest_json_version( name) if latest_version == int(9): new_version = 0 else: new_version = int(latest_version) + 1 prop_file = os.path.join( Project().get_assets_dir(), set_name, "model", "main", "cache", str(name) + "_" + str(latest_version) + ".json") if name in items_in_set: print("set contains asset: " + str(name)) try: with open(prop_file) as f: prop_data = json.load(f) except Exception as error: qd.warning("No valid JSON file for " + str(name) + ". Skipping changes made to this asset.") continue for set_item in set_data: if str(set_item['asset_name']) == str(name): set_item['version_number'] = new_version break else: print(str(name) + " not found in set file.") path = self.get_prim_path(out) prop_data = { "asset_name": name, "version_number": 0, "path": str(path), "a": [0, 0, 0], "b": [0, 0, 0], "c": [0, 0, 0] } set_data.append({"asset_name": str(name), "version_number": 0}) new_version = 0 new_prop_file = os.path.join( Project().get_assets_dir(), set_name, "model", "main", "cache", str(name) + "_" + str(new_version) + ".json") # get a b and c from prop_data file. Each is an array of size 3, representing x,y,z coords a = prop_data['a'] b = prop_data['b'] c = prop_data['c'] self.update_points_by_geo(out, a, b, c) # put the updated coords back into prop_data prop_data['a'] = a prop_data['b'] = b prop_data['c'] = c # TODO: add a commit and a publish for this set print("prop data (updated): ", prop_data) updated_prop_data = json.dumps(prop_data) outfile = open(new_prop_file, "w") outfile.write(updated_prop_data) outfile.close() print("prop file updated for " + str(name)) self.clear_transform(set_transform) self.update_version_number(child, new_version) import_node = child.node("import") read_from_json = import_node.node("read_from_json") read_from_json.parm("reload").pressButton() outfile = open(set_file, "w") print("set data: ", set_data) updated_set_data = json.dumps(set_data) outfile.write(updated_set_data) outfile.close() qd.info("Set " + str(set_name) + " published successfully!")
def set_results(self, value): set_name = value[0] project = Project() self.body = project.get_body(set_name) obj = hou.node("/obj") set = obj.node(set_name) if set is None: qd.error( "No set found with that name. Please check naming and try again." ) return #migrate transforms for all the children to set_dressing_transform self.update_set_dressing_transform(set) #create children list (list of houdini objects) print("set: ", set) inside = set.node("inside") children = inside.children() set_file = os.path.join(Project().get_assets_dir(), set_name, "model", "main", "cache", "whole_set.json") ''' The idea here is to get the set data from whole_set.json, get the set data from Houdini, and then compare the two. Here are the possible scenarios: 1. There are items in the JSON file that aren't in the Houdini Set -remove the item from the Json file -remove the item's json files 2. There are items in the Houdini Set that aren't in the JSON file -Add those to the JSON file ''' items_to_delete = [] set_data = [] items_in_set = [] items_to_delete, set_data, items_in_set = self.get_set_comparable_lists( children, set_file) self.delete_asset_json(items_to_delete, set_name) # TODO: To allow adding multiple copies of the same prop to a set in houdini, we'll want to add as many copies to the whole_set.json file # for child_name in child_names: # child = inside.node(child_name) # get the child node # inside = child.node("inside") # modify = inside.node("modify") # modify_name = modify.type().name() # name = modify_name.split("_")[0].lower() # # if name not in items_in_set: # set_data.append print("starting to work on children\n") for child in children: #find if it is scaled (set to big scale) and initialize variables---------------------------------------------------------- #inside #import_node #out #set_transform #current_version #name isScaled = False print("child: " + str(child)) print("current set_data: " + str(set_data)) if child.type().name() == "dcc_geo": inside = child.node("inside") import_node = child.node("import") if child.parm("Scale_Object").evalAsInt() == 1: child.parm("Scale_Object").set(0) isScaled = True else: inside = child.node("inside") geo = inside.node("geo") inside = geo.node("inside") import_node = geo.node("import") out = inside.node("OUT") set_transform = inside.node("set_dressing_transform") current_version = child.parm("version_number").evalAsInt() #need a asset number or letter (I honestly just need to name it something and have that reflected in houdini) #import_number = child.parm("import_number").evalAsInt name = child.parm("asset_name").evalAsString() #--------------------------------------------------------------------------------------------------------------------------- child_body = project.get_body(name) if child_body is None: qd.warning( str(name) + " not found in pipe. Please check that node is named correctly." ) continue cache_dir = os.path.join(Project().get_assets_dir(), set_name, "model", "main", "cache") print("filepath: ", cache_dir) latest_version, version_string = self.body.version_prop_json( name, cache_dir) print('latest version: ', latest_version) new_version = latest_version latest_version -= 1 prop_file = os.path.join( cache_dir, str(name) + "_" + str(current_version) + ".json") print("prop file: ", prop_file) #will have to change items_in_set to be checked if name in items_in_set: print("set contains asset: " + str(name)) try: with open(prop_file) as f: prop_data = json.load(f) except Exception as error: print("No valid JSON file for " + str(name) + ". Skipping changes made to this asset.") continue for set_item in set_data: if str(set_item['asset_name']) == str(name): if set_item['version_number'] <= current_version: print("updating ", set_item, " with version ", new_version) set_item['version_number'] = new_version break else: # create blank prop data and add it to the set print(str(name) + " not found in set file.") path = self.get_prim_path(out) prop_data = { "asset_name": name, "version_number": 0, "path": str(path), "a": [0, 0, 0], "b": [0, 0, 0], "c": [0, 0, 0] } set_data.append({"asset_name": str(name), "version_number": 0}) print("appended set_data: " + str(set_data)) new_version = 0 items_in_set.append(name) print("current set_data: " + str(set_data)) new_prop_file = os.path.join( Project().get_assets_dir(), set_name, "model", "main", "cache", str(name) + "_" + str(new_version) + ".json") # get a b and c from prop_data file. Each is an array of size 3, representing x,y,z coords a = prop_data['a'] b = prop_data['b'] c = prop_data['c'] self.update_points_by_geo(out, prop_data['path'], a, b, c) # put the updated coords back into prop_data prop_data['a'] = a prop_data['b'] = b prop_data['c'] = c prop_data['version_number'] = new_version # TODO: add a commit and a publish for this set print("prop data (updated): ", prop_data) updated_prop_data = json.dumps(prop_data) outfile = open(new_prop_file, "w") outfile.write(updated_prop_data) outfile.close() print("prop file updated for " + str(name)) print("") self.clear_transform(set_transform) self.set_space(child, set_name, name, new_version) if isScaled: child.parm("Scale_Object").set(1) #reloading the new data that was written try: read_from_json = import_node.node("read_from_json") read_from_json.parm("reload").pressButton() except: print("no nodes are in the set, cannot read from JSON") #rewriting the whole_set json file outfile = open(set_file, "w") print("set data: ", set_data) updated_set_data = json.dumps(set_data) outfile.write(updated_set_data) outfile.close() qd.info("Set " + str(set_name) + " published successfully!")
def print_success_message(self, message): qd.info(message)
def set_results(self, value): set_name = value[0] project = Project() self.body = project.get_body(set_name) obj = hou.node("/obj") set = obj.node(set_name) if set is None: qd.error( "No set found with that name. Please check naming and try again." ) return print("set: ", set) inside = set.node("inside") children = inside.children() set_file = os.path.join(Project().get_assets_dir(), set_name, "model", "main", "cache", "whole_set.json") set_data = [] try: with open(set_file) as f: set_data = json.load(f) except Exception as error: qd.error("No valid JSON file for " + str(set_name)) return items_in_set = [] for item in set_data: item_name = item['asset_name'] item_version = item['version_number'] items_in_set.append(item_name) child_names = [] for child in children: child_path = child.path() first_char_to_lower = lambda s: s[:1].lower() + s[1:] if s else '' name = child_path.split('/')[-1] name = first_char_to_lower(name) child_names.append(name) print("child names; ", child_names) for item in set_data: if str(item['asset_name']) not in child_names: set_data.remove(item) # TODO: To allow adding multiple copies of the same prop to a set in houdini, we'll want to add as many copies to the whole_set.json file # for child_name in child_names: # child = inside.node(child_name) # get the child node # inside = child.node("inside") # modify = inside.node("modify") # modify_name = modify.type().name() # name = modify_name.split("_")[0].lower() # # if name not in items_in_set: # set_data.append for child in children: if child.type().name() == "dcc_geo": inside = child.node("inside") import_node = child.node("import") else: inside = child.node("inside") geo = inside.node("geo") inside = geo.node("inside") import_node = geo.node("import") out = inside.node("OUT") set_transform = inside.node("set_dressing_transform") current_version = child.parm("version_number").evalAsInt() name = child.parm("asset_name").evalAsString() child_body = project.get_body(name) if child_body is None: qd.warning( str(name) + " not found in pipe. Please check that node is named correctly." ) continue # get transform parms: t is translate, r rotate and s scale (with associated x,y,z vals) tx, ty, tz = self.get_transform(set_transform, "tx", "ty", "tz") rx, ry, rz = self.get_transform(set_transform, "rx", "ry", "rz") sx, sy, sz = self.get_transform(set_transform, "sx", "sy", "sz") cache_dir = os.path.join(Project().get_assets_dir(), set_name, "model", "main", "cache") print("filepath: ", cache_dir) latest_version, version_string = self.body.version_prop_json( name, cache_dir) print('latest version: ', latest_version) new_version = latest_version latest_version -= 1 prop_file = os.path.join( cache_dir, str(name) + "_" + str(current_version) + ".json") print("prop file: ", prop_file) if name in items_in_set: print("set contains asset: " + str(name)) try: with open(prop_file) as f: prop_data = json.load(f) except Exception as error: qd.warning("No valid JSON file for " + str(name) + ". Skipping changes made to this asset.") continue for set_item in set_data: if str(set_item['asset_name']) == str(name): if set_item['version_number'] <= current_version: print("updating ", set_item, " with version ", new_version) set_item['version_number'] = new_version break else: print(str(name) + " not found in set file.") path = self.get_prim_path(out) prop_data = { "asset_name": name, "version_number": 0, "path": str(path), "a": [0, 0, 0], "b": [0, 0, 0], "c": [0, 0, 0] } set_data.append({"asset_name": str(name), "version_number": 0}) new_version = 0 items_in_set.append(name) new_prop_file = os.path.join( Project().get_assets_dir(), set_name, "model", "main", "cache", str(name) + "_" + str(new_version) + ".json") # get a b and c from prop_data file. Each is an array of size 3, representing x,y,z coords a = prop_data['a'] b = prop_data['b'] c = prop_data['c'] self.update_points_by_geo(out, a, b, c) # put the updated coords back into prop_data prop_data['a'] = a prop_data['b'] = b prop_data['c'] = c prop_data['version_number'] = new_version # TODO: add a commit and a publish for this set print("prop data (updated): ", prop_data) updated_prop_data = json.dumps(prop_data) outfile = open(new_prop_file, "w") outfile.write(updated_prop_data) outfile.close() print("prop file updated for " + str(name)) self.clear_transform(set_transform) self.set_space(child, set_name, name, new_version) read_from_json = import_node.node("read_from_json") read_from_json.parm("reload").pressButton() outfile = open(set_file, "w") print("set data: ", set_data) updated_set_data = json.dumps(set_data) outfile.write(updated_set_data) outfile.close() qd.info("Set " + str(set_name) + " published successfully!")