def post_reference(self, value): assets = value asset_filepaths = [] # TODO: add crowd cycle capability crowd = False # qd.yes_or_no("Are you referencing a crowd cycle?") if crowd: self.reference_crowd_cycle(asset_filepaths) else: for asset in assets: body = self.project.get_body(asset) model = qd.binary_option("Which department for " + str(asset) + " ?", "Model", "Rig", title="Select department") if model: department = "model" else: department = "rig" element = body.get_element(department) publish = element.get_last_publish() if publish: filepath = publish[3] asset_filepaths.append(filepath) else: qd.warning("No publish exists for " + str(asset) + " in " + str(department) + ". Skipping.") print("files for reference: ", asset_filepaths) self.reference_asset(asset_filepaths)
def check_unsaved_changes(): unsaved_changes = mc.file(q=True, modified=True) if unsaved_changes: response = qd.yes_or_no("Would you like to publish the current asset before you proceed?", title="Unsaved changes detected", details="(Press No if you just created a new scene or opened Maya.)") if response is True: # instead of saving, publish. scene = mc.file(q=True, sceneName=True) dir_path = scene.split("assets/") try: asset_path = dir_path[1].split("/") except: # scene path is stored in the user directory instead of assets. We can't get the asset name, so they must publish manually. qd.error("Publish failed. Please publish manually before cloning the new asset.") return asset_name = asset_path[0] try: department = asset_path[1].split("/")[0] print("department " + department) except: department = None if department: print("department found") else: qd.warning("Skipping changes to " + str(asset_name)) return publisher = Publisher(quick_publish=True, export=False) publisher.non_gui_publish(asset_name, department)
def reference_asset(self, filepath_list): if filepath_list is not None: for path in filepath_list: print("Path: ", path) if os.path.exists(path): print (path, " exists") part_one, part_two = path.split("assets/") asset = part_two.split("/")[0] pm.system.createReference(path, namespace=asset) else: qd.warning(path, " doesn't exist")
def tag(self): if self.selected: response = qd.binary_option("Add Alembic tag to:\n" + str(self.selected_string), "Yes", "No", title='Add Alembic Tag') else: qd.warning("Nothing is selected") return if response: for node in self.selected: tag_node_with_flag(node, "DCC_Alembic_Export_Flag") qd.info("tag successful!")
def results(self, value): print("Final value: ", value[0]) filename = value[0] project = Project() body = project.get_body(filename) self.body = body type = body.get_type() element = self.get_element_option(type, body) if self.quick: latest = element.get_last_publish() if not latest: qd.error("There have been no publishes in this department.") return else: selected_scene_file = latest[3] self.open_scene_file(selected_scene_file) return if element is None: qd.warning("Nothing was cloned.") return self.publishes = element.list_publishes() print("publishes: ", self.publishes) if not self.publishes: qd.error( "There have been no publishes in this department. Maybe you meant model?" ) return # make the list a list of strings, not tuples self.sanitized_publish_list = [] for publish in self.publishes: path = publish[3] file_ext = path.split('.')[-1] if not file_ext == "mb": continue label = publish[0] + " " + publish[1] + " " + publish[2] self.sanitized_publish_list.append(label) self.item_gui = sfl.SelectFromList(l=self.sanitized_publish_list, parent=maya_main_window(), title="Select publish to clone") self.item_gui.submitted.connect(self.publish_selection_results)
def update_points_by_geo(self, out, a, b, c): geo = out.geometry() path = geo.attribValue("path") starting_point = None found = False for point in geo.points(): for prim in point.prims(): if str(path) in str(prim.attribValue("path")): starting_point = point found = True break if found is True: break if not starting_point: qd.warning("Could not find the correct path for " + str(path) + ". Transform may be incorrect.") start_num = 0 else: print("start point: ", starting_point) start_num = starting_point.number() point_a = geo.iterPoints()[start_num] point_b = geo.iterPoints()[start_num + 1] point_c = geo.iterPoints()[start_num + 2] a_x = point_a.position()[0] a_y = point_a.position()[1] a_z = point_a.position()[2] b_x = point_b.position()[0] b_y = point_b.position()[1] b_z = point_b.position()[2] c_x = point_c.position()[0] c_y = point_c.position()[1] c_z = point_c.position()[2] # a is the first point of this object in geo spreadsheet, b is second, c third. a[0] = a_x a[1] = a_y a[2] = a_z b[0] = b_x b[1] = b_y b[2] = b_z c[0] = c_x c[1] = c_y c[2] = c_z
def post_reference(self, value): assets = value asset_filepaths = [] # TODO: add crowd cycle capability crowd = False # qd.yes_or_no("Are you referencing a crowd cycle?") if crowd: self.reference_crowd_cycle(asset_filepaths) else: for asset in assets: body = self.project.get_body(asset) # TODO: change the choice to depend more on each individual asset. I.e. only ask model or rig for actors. Otherwise it's always going to be model. type = body.get_type() if type == AssetType.ACTOR: model = qd.binary_option("Which department for " + str(asset) + "?", "Model", "Rig", title="Select department") else: model = True if model: department = "model" elif model is not None: department = "rig" else: qd.warning("Skipping " + str(asset)) continue element = body.get_element(department) filepath = element.get_app_filepath() if filepath: asset_filepaths.append(filepath) else: qd.warning("No publish exists for " + str(asset) + " in " + str(department) + ". Skipping.") print("files for reference: ", asset_filepaths) self.reference_asset(asset_filepaths)
def export_cameras(self, shot, startFrame, endFrame): cam_list = mc.listCameras(p=True) if u'persp' in cam_list: cam_list.remove(u'persp') print("cam list: ", cam_list) print("shot", str(shot)) cam_element = shot.get_element(Department.CAMERA, force_create=True) cache_dir = cam_element.get_cache_dir() print("cache dir: ", cache_dir) files = [] for cam_name in cam_list: cameras = pm.ls(cam_name) camera = cameras[0] root = self.get_parent_root_string(camera) root_strings = [root] destination = os.path.join(cache_dir, str(cam_name) + ".abc") command = self.buildAlembicCommand(destination, startFrame, endFrame, geoList=root_strings) print 'Export Alembic command: ', command try: pm.Mel.eval(command) except: qd.warning( "No alembic exported for " + str(camera) + ". Make sure that there is only one top-level group in the outliner." ) continue files.append(destination) return files
def get_path_point_starting_num(self, geo, path): starting_point = None found = False for point in geo.points(): for prim in point.prims(): if str(path) in str(prim.attribValue("path")): starting_point = point found = True break if found is True: break if not starting_point: qd.warning("Could not find the correct path for " + str(path) + ". Transform may be incorrect.") start_num = 0 else: print("start point: ", starting_point) start_num = starting_point.number() return start_num
def publish_src_node_to_department(self, src, node, department, user, comment): if os.path.exists(src): try: #save node definition--this is the same as the Save Node Type menu option. Just to make sure I remember how this works - We are getting the definition of the selected hda and calling the function on it passing in the selected hda. We are not calling the function on the selected hda. node.type().definition().updateFromNode(node) except hou.OperationFailed, e: qd.error( 'There was a problem publishing the HDA to the pipeline.\n' ) print(str(e)) return try: node.matchCurrentDefinition( ) # this function locks the node for editing. except hou.OperationFailed, e: qd.warning( 'There was a problem while trying to match the current definition. It\'s not a critical problem. Look at it and see if you can resolve the problem. Publish was successful.' ) print(str(e))
def reference_asset(self, filepath_list): if filepath_list is not None: for path in filepath_list: print("Path: ", path) if os.path.exists(path): print(path, " exists") part_one, part_two = path.split("assets/") asset = part_two.split("/")[0] while True: try: num_references = int( qd.input("How many copies of " + str(asset) + " do you want to reference?")) except: qd.warning("Invalid input for " + str(asset) + ". Try again.") continue break for i in range(num_references): pm.system.createReference(path, namespace=asset) else: qd.warning(path, " doesn't exist")
def scene_prep(): try: clear_construction_history() except: qd.warning( "Clear construction history failed. There may be something unusual in the history that's causing this." ) try: freeze_transformations() except: qd.warning( "Freeze transform failed. There may be 1+ keyframed values in object. Remove all keyframed values and expressions from object." ) try: delete_image_planes() except: qd.warning("Delete image planes failed.") try: group_top_level() except: qd.warning("Group top level failed.")
def scene_prep(quick_publish, body=None, department=None): if quick_publish: print("skipping check for unsaved changes") else: check_unsaved_changes() # save_scene_file() freeze_and_clear = True if department == Department.RIG: freeze_and_clear = False if body.is_shot() or body.get_type() == AssetType.SET: freeze_and_clear = False if not body.get_type() == AssetType.SHOT: # delete cameras cam_list = pm.ls(ca=True) print("deleting cameras:", cam_list) for cam in cam_list: if str(cam) == "perspShape" or str(cam) == "topShape" or str( cam) == "frontShape" or str(cam) == "sideShape": continue cam_response = qd.yes_or_no( "Camera " + str(cam) + " found in scene. Cameras will cause problems if left in the asset. \n\nProceed to delete this camera?" ) if cam_response: parents = cam.listRelatives(p=True) while parents: if "camera" in str(parents[0]): cam = parents[0] else: break parents = cam.listRelatives(p=True) print("parents: ", parents) pm.delete(cam) if freeze_and_clear: print("clearing construction history") try: clear_construction_history() except: qd.warning( "Clear construction history failed. There may be something unusual in the history that's causing this." ) try: freeze_transformations() except: qd.warning( "Freeze transform failed. There may be 1+ keyframed values in object. Remove all keyframed values and expressions from object." ) try: delete_image_planes() except: qd.warning("Delete image planes failed.") try: group_top_level() except: qd.warning("Group top level failed.")
def set_results(self, value): set_name = value[0] project = Project() self.body = project.get_body(set_name) obj = hou.node("/obj") set = obj.node(set_name) if set is None: qd.error( "No set found with that name. Please check naming and try again." ) return #migrate transforms for all the children to set_dressing_transform self.update_set_dressing_transform(set) #create children list (list of houdini objects) print("set: ", set) inside = set.node("inside") children = inside.children() set_file = os.path.join(Project().get_assets_dir(), set_name, "model", "main", "cache", "whole_set.json") ''' The idea here is to get the set data from whole_set.json, get the set data from Houdini, and then compare the two. Here are the possible scenarios: 1. There are items in the JSON file that aren't in the Houdini Set -remove the item from the Json file -remove the item's json files 2. There are items in the Houdini Set that aren't in the JSON file -Add those to the JSON file ''' items_to_delete = [] set_data = [] items_in_set = [] items_to_delete, set_data, items_in_set = self.get_set_comparable_lists( children, set_file) self.delete_asset_json(items_to_delete, set_name) # TODO: To allow adding multiple copies of the same prop to a set in houdini, we'll want to add as many copies to the whole_set.json file # for child_name in child_names: # child = inside.node(child_name) # get the child node # inside = child.node("inside") # modify = inside.node("modify") # modify_name = modify.type().name() # name = modify_name.split("_")[0].lower() # # if name not in items_in_set: # set_data.append print("starting to work on children\n") for child in children: #find if it is scaled (set to big scale) and initialize variables---------------------------------------------------------- #inside #import_node #out #set_transform #current_version #name isScaled = False print("child: " + str(child)) print("current set_data: " + str(set_data)) if child.type().name() == "dcc_geo": inside = child.node("inside") import_node = child.node("import") if child.parm("Scale_Object").evalAsInt() == 1: child.parm("Scale_Object").set(0) isScaled = True else: inside = child.node("inside") geo = inside.node("geo") inside = geo.node("inside") import_node = geo.node("import") out = inside.node("OUT") set_transform = inside.node("set_dressing_transform") current_version = child.parm("version_number").evalAsInt() #need a asset number or letter (I honestly just need to name it something and have that reflected in houdini) #import_number = child.parm("import_number").evalAsInt name = child.parm("asset_name").evalAsString() #--------------------------------------------------------------------------------------------------------------------------- child_body = project.get_body(name) if child_body is None: qd.warning( str(name) + " not found in pipe. Please check that node is named correctly." ) continue cache_dir = os.path.join(Project().get_assets_dir(), set_name, "model", "main", "cache") print("filepath: ", cache_dir) latest_version, version_string = self.body.version_prop_json( name, cache_dir) print('latest version: ', latest_version) new_version = latest_version latest_version -= 1 prop_file = os.path.join( cache_dir, str(name) + "_" + str(current_version) + ".json") print("prop file: ", prop_file) #will have to change items_in_set to be checked if name in items_in_set: print("set contains asset: " + str(name)) try: with open(prop_file) as f: prop_data = json.load(f) except Exception as error: print("No valid JSON file for " + str(name) + ". Skipping changes made to this asset.") continue for set_item in set_data: if str(set_item['asset_name']) == str(name): if set_item['version_number'] <= current_version: print("updating ", set_item, " with version ", new_version) set_item['version_number'] = new_version break else: # create blank prop data and add it to the set print(str(name) + " not found in set file.") path = self.get_prim_path(out) prop_data = { "asset_name": name, "version_number": 0, "path": str(path), "a": [0, 0, 0], "b": [0, 0, 0], "c": [0, 0, 0] } set_data.append({"asset_name": str(name), "version_number": 0}) print("appended set_data: " + str(set_data)) new_version = 0 items_in_set.append(name) print("current set_data: " + str(set_data)) new_prop_file = os.path.join( Project().get_assets_dir(), set_name, "model", "main", "cache", str(name) + "_" + str(new_version) + ".json") # get a b and c from prop_data file. Each is an array of size 3, representing x,y,z coords a = prop_data['a'] b = prop_data['b'] c = prop_data['c'] self.update_points_by_geo(out, prop_data['path'], a, b, c) # put the updated coords back into prop_data prop_data['a'] = a prop_data['b'] = b prop_data['c'] = c prop_data['version_number'] = new_version # TODO: add a commit and a publish for this set print("prop data (updated): ", prop_data) updated_prop_data = json.dumps(prop_data) outfile = open(new_prop_file, "w") outfile.write(updated_prop_data) outfile.close() print("prop file updated for " + str(name)) print("") self.clear_transform(set_transform) self.set_space(child, set_name, name, new_version) if isScaled: child.parm("Scale_Object").set(1) #reloading the new data that was written try: read_from_json = import_node.node("read_from_json") read_from_json.parm("reload").pressButton() except: print("no nodes are in the set, cannot read from JSON") #rewriting the whole_set json file outfile = open(set_file, "w") print("set data: ", set_data) updated_set_data = json.dumps(set_data) outfile.write(updated_set_data) outfile.close() qd.info("Set " + str(set_name) + " published successfully!")
def exportReferences(self, destination, tag=None, startFrame=1, endFrame=1): selection = get_loaded_references() if selection is None: return abcFiles = [] print("destination: ", destination) for ref in selection: try: rootNode = get_root_node_from_reference(ref) except: qd.warning("Could not find " + str(ref) + " in scene. Skipping.") continue print("root node: ", rootNode) if tag: if node_is_tagged_with_flag(rootNode, tag): print("root node is tagged: " + str(rootNode)) elif children_tagged_with_flag(rootNode, tag): rootNode = get_first_child_with_flag(rootNode, tag) print("child node is tagged for: " + str(rootNode)) else: print("ref is not tagged: " + str(ref)) continue name = str(ref.associatedNamespace(baseName=True)) parent = ref.parentReference() print("ref: ", ref) if not parent: # then this is either an animated prop, a char, or a set. Export an alembic for each accordingly, with the correct file name refAbcFilePath = os.path.join(destination, name + ".abc") print("ref abc filepath: ", refAbcFilePath) root = self.get_parent_root_string(rootNode) root_strings = [root] # if tag: # command = self.buildTaggedAlembicCommand(refAbcFilePath, tag, startFrame, endFrame) # else: command = self.buildAlembicCommand(refAbcFilePath, startFrame, endFrame, geoList=root_strings) else: continue print 'Export Alembic command: ', command try: pm.Mel.eval(command) except: qd.warning( "No alembic exported for " + str(rootNode) + ". Make sure that there is only one top-level group in the outliner." ) continue abcFiles.append(refAbcFilePath) return abcFiles
def publish_hda(self, value): comment = value if not comment: comment = "publish by " + str( user.get_username()) + " in department " + str(department) project = Project() environment = Environment() user = environment.get_user() selectedHDA = self.selectedHDA if selectedHDA is None: print("No HDA selected!") src = self.src body = self.body asset_type = body.get_type() inside = selectedHDA.node("inside") if inside is None: print("No inside node found!") modify = inside.node("modify") material = inside.node("material") hair = inside.node("hair") cloth = inside.node("cloth") if asset_type == AssetType.ACTOR: geo = inside.node("geo") geo_inside = geo.node("inside") modify = geo_inside.node("modify") material = geo_inside.node("material") departments_to_publish = [] if modify is not None: print("Found modify") departments_to_publish.append("modify") if material is not None: print("Found material") departments_to_publish.append("material") if hair is not None: departments_to_publish.append("hair") if cloth is not None: departments_to_publish.append("cloth") if body is None: qd.error("Asset not found in pipe.") return for department in departments_to_publish: inside = self.get_inside_node(asset_type, department, self.selectedHDA) node = inside.node(department) src = node.type().definition().libraryFilePath() try: self.publish_src_node_to_department(src, node, department, user, comment) except Exception as e: print(str(e)) qd.warning("Something went wrong, but it's probably okay.") success_message = "Success! Published to " + str( departments_to_publish) self.print_success_message(success_message) return "published to " + str(departments_to_publish)
class Publisher: def __init__(self): self.dcc_geo_departments = [Department.MODIFY, Department.MATERIAL] self.item_gui = None self.node_name = None def publish_content_hda(self, node): self.node = node self.comment = qd.HoudiniInput(parent=houdini_main_window(), title="Any comments?") self.comment.submitted.connect(self.publish_content_hda_comment) def publish_content_hda_comment(self, value): node = self.node comment = value if not comment: comment = "published by " + str( user.get_username()) + " in department " + str(department) node_name = node.type().name() index = node_name.rfind('_') asset_name = node_name[:index] department = node_name[index + 1:] self.body = Project().get_body(asset_name) src = node.type().definition().libraryFilePath() user = Environment().get_user() self.publish_src_node_to_department(src, node, department, user, comment) success_message = "Success! Published " + asset_name + " to " + str( department) self.print_success_message(success_message) def publish_asset(self, node=None, name=None, inner=False): self.departments = [ Department.MODIFY, Department.MATERIAL, Department.HAIR, Department.CLOTH ] if node: if inner: # TODO: clean this up if node.type().name() == 'byu_inside' or node.type().name( ) == 'byu_objectinside': node = node.parent() if node.parent().parent().type().name() == 'dcc_character': node = node.parent().parent() self.node_name = node.parm("asset_name").eval() print("node: ", node) print("name: ", self.node_name) self.publish(selectedHDA=node) def publish_tool(self, node=None): if node is None: node = get_selected_node() if node is None: return node_path = node.path() name = node_path.split('/')[-1] tool_name = name tools = Project().list_hdas() if tool_name not in tools: qd.error("Tool not found in project. Try creating HDA instead.") try: node.type().definition().updateFromNode(node) except hou.OperationFailed, e: qd.error( 'There was a problem publishing the HDA to the pipeline.\n', details=str(e)) return try: node.matchCurrentDefinition() except hou.OperationFailed, e: qd.warning("Problem matching description.")
def set_results(self, value): set_name = value[0] project = Project() self.body = project.get_body(set_name) obj = hou.node("/obj") set = obj.node(set_name) if set is None: qd.error( "No set found with that name. Please check naming and try again." ) return print("set: ", set) inside = set.node("inside") children = inside.children() set_file = os.path.join(Project().get_assets_dir(), set_name, "model", "main", "cache", "whole_set.json") set_data = [] try: with open(set_file) as f: set_data = json.load(f) except Exception as error: qd.error("No valid JSON file for " + str(set_name)) return items_in_set = [] for item in set_data: item_name = item['asset_name'] item_version = item['version_number'] items_in_set.append(item_name) child_names = [] for child in children: child_path = child.path() first_char_to_lower = lambda s: s[:1].lower() + s[1:] if s else '' name = child_path.split('/')[-1] name = first_char_to_lower(name) child_names.append(name) print("child names; ", child_names) for item in set_data: if str(item['asset_name']) not in child_names: set_data.remove(item) # TODO: To allow adding multiple copies of the same prop to a set in houdini, we'll want to add as many copies to the whole_set.json file # for child_name in child_names: # child = inside.node(child_name) # get the child node # inside = child.node("inside") # modify = inside.node("modify") # modify_name = modify.type().name() # name = modify_name.split("_")[0].lower() # # if name not in items_in_set: # set_data.append for child in children: if child.type().name() == "dcc_geo": inside = child.node("inside") import_node = child.node("import") else: inside = child.node("inside") geo = inside.node("geo") inside = geo.node("inside") import_node = geo.node("import") out = inside.node("OUT") set_transform = inside.node("set_dressing_transform") current_version = child.parm("version_number").evalAsInt() name = child.parm("asset_name").evalAsString() child_body = project.get_body(name) if child_body is None: qd.warning( str(name) + " not found in pipe. Please check that node is named correctly." ) continue # get transform parms: t is translate, r rotate and s scale (with associated x,y,z vals) tx, ty, tz = self.get_transform(set_transform, "tx", "ty", "tz") rx, ry, rz = self.get_transform(set_transform, "rx", "ry", "rz") sx, sy, sz = self.get_transform(set_transform, "sx", "sy", "sz") cache_dir = os.path.join(Project().get_assets_dir(), set_name, "model", "main", "cache") print("filepath: ", cache_dir) latest_version, version_string = self.body.version_prop_json( name, cache_dir) print('latest version: ', latest_version) new_version = latest_version latest_version -= 1 prop_file = os.path.join( cache_dir, str(name) + "_" + str(current_version) + ".json") print("prop file: ", prop_file) if name in items_in_set: print("set contains asset: " + str(name)) try: with open(prop_file) as f: prop_data = json.load(f) except Exception as error: qd.warning("No valid JSON file for " + str(name) + ". Skipping changes made to this asset.") continue for set_item in set_data: if str(set_item['asset_name']) == str(name): if set_item['version_number'] <= current_version: print("updating ", set_item, " with version ", new_version) set_item['version_number'] = new_version break else: print(str(name) + " not found in set file.") path = self.get_prim_path(out) prop_data = { "asset_name": name, "version_number": 0, "path": str(path), "a": [0, 0, 0], "b": [0, 0, 0], "c": [0, 0, 0] } set_data.append({"asset_name": str(name), "version_number": 0}) new_version = 0 items_in_set.append(name) new_prop_file = os.path.join( Project().get_assets_dir(), set_name, "model", "main", "cache", str(name) + "_" + str(new_version) + ".json") # get a b and c from prop_data file. Each is an array of size 3, representing x,y,z coords a = prop_data['a'] b = prop_data['b'] c = prop_data['c'] self.update_points_by_geo(out, a, b, c) # put the updated coords back into prop_data prop_data['a'] = a prop_data['b'] = b prop_data['c'] = c prop_data['version_number'] = new_version # TODO: add a commit and a publish for this set print("prop data (updated): ", prop_data) updated_prop_data = json.dumps(prop_data) outfile = open(new_prop_file, "w") outfile.write(updated_prop_data) outfile.close() print("prop file updated for " + str(name)) self.clear_transform(set_transform) self.set_space(child, set_name, name, new_version) read_from_json = import_node.node("read_from_json") read_from_json.parm("reload").pressButton() outfile = open(set_file, "w") print("set data: ", set_data) updated_set_data = json.dumps(set_data) outfile.write(updated_set_data) outfile.close() qd.info("Set " + str(set_name) + " published successfully!")
element = body.get_element(department, Element.DEFAULT_NAME, force_create=True) try: hda.type().definition().updateFromNode(hda) except hou.OperationFailed, e: qd.error('There was a problem publishing the HDA to the pipeline.\n', details=str(e)) return try: hda.matchCurrentDefinition() except hou.OperationFailed, e: qd.warning( 'There was a problem while trying to match the current definition.', details=str(e)) dst = element.publish(user, hda_src, comment) #Ensure file has correct permissions try: os.chmod(dst, 0660) except: pass # TODO: UGLY HOTFIX FOR OLD ASSEMBLY ASSETS for v1 backwards compatability saveFile = hdaName + "_" + Element.DEFAULT_NAME + ".hdanc" if department not in [ Department.ASSEMBLY, Department.HDA ] else asset_name + "_" + department + "_" + Element.DEFAULT_NAME + ".hdanc" dst = os.path.join(environment.get_hda_dir(), saveFile)
def set_results(self, value): set_name = value[0] project = Project() self.body = project.get_body(set_name) obj = hou.node("/obj") set = obj.node(set_name) if set is None: qd.error( "No set found with that name. Please check naming and try again." ) return print("set: ", set) inside = set.node("inside") children = inside.children() set_file = os.path.join(Project().get_assets_dir(), set_name, "model", "main", "cache", "whole_set.json") set_data = [] try: with open(set_file) as f: set_data = json.load(f) except Exception as error: qd.error("No valid JSON file for " + str(set_name)) return items_in_set = [] for item in set_data: item_name = item['asset_name'] item_version = item['version_number'] items_in_set.append(item_name) # TODO: for each child, make sure that it exists in whole_set.json, or add it if it doesn't, or remove it if it does not child_names = [] for child in children: child_path = child.path() name = child_path.split('/')[-1].lower() child_names.append(name) for item in set_data: if str(item['asset_name']) not in child_names: set_data.remove(item) for child in children: print("child: ", child) inside = child.node("inside") out = inside.node("OUT") set_transform = inside.node("set_dressing_transform") child_path = child.path() name = child_path.split('/')[-1].lower() child_body = project.get_body(name) if child_body is None: qd.warning( str(name) + " not found in pipe. Please check that node is named correctly." ) continue # get transform parms: t is translate, r rotate and s scale (with associated x,y,z vals) tx, ty, tz = self.get_transform(set_transform, "tx", "ty", "tz") rx, ry, rz = self.get_transform(set_transform, "rx", "ry", "rz") sx, sy, sz = self.get_transform(set_transform, "sx", "sy", "sz") latest_file, latest_version = self.body.get_latest_json_version( name) if latest_version == int(9): new_version = 0 else: new_version = int(latest_version) + 1 prop_file = os.path.join( Project().get_assets_dir(), set_name, "model", "main", "cache", str(name) + "_" + str(latest_version) + ".json") if name in items_in_set: print("set contains asset: " + str(name)) try: with open(prop_file) as f: prop_data = json.load(f) except Exception as error: qd.warning("No valid JSON file for " + str(name) + ". Skipping changes made to this asset.") continue for set_item in set_data: if str(set_item['asset_name']) == str(name): set_item['version_number'] = new_version break else: print(str(name) + " not found in set file.") path = self.get_prim_path(out) prop_data = { "asset_name": name, "version_number": 0, "path": str(path), "a": [0, 0, 0], "b": [0, 0, 0], "c": [0, 0, 0] } set_data.append({"asset_name": str(name), "version_number": 0}) new_version = 0 new_prop_file = os.path.join( Project().get_assets_dir(), set_name, "model", "main", "cache", str(name) + "_" + str(new_version) + ".json") # get a b and c from prop_data file. Each is an array of size 3, representing x,y,z coords a = prop_data['a'] b = prop_data['b'] c = prop_data['c'] self.update_points_by_geo(out, a, b, c) # put the updated coords back into prop_data prop_data['a'] = a prop_data['b'] = b prop_data['c'] = c # TODO: add a commit and a publish for this set print("prop data (updated): ", prop_data) updated_prop_data = json.dumps(prop_data) outfile = open(new_prop_file, "w") outfile.write(updated_prop_data) outfile.close() print("prop file updated for " + str(name)) self.clear_transform(set_transform) self.update_version_number(child, new_version) import_node = child.node("import") read_from_json = import_node.node("read_from_json") read_from_json.parm("reload").pressButton() outfile = open(set_file, "w") print("set data: ", set_data) updated_set_data = json.dumps(set_data) outfile.write(updated_set_data) outfile.close() qd.info("Set " + str(set_name) + " published successfully!")