def run_jsn_thread(f, ii, config, jsn_tasks): cmd = python_tool_to_platform(config["tools"]["jsn"]) cmd += " -i " + f[0] + " -o " + f[1] + ii imports = jsn.get_import_file_list(f[0], jsn_tasks["import_dirs"]) inputs = [f[0], config["tools"]["jsn"]] for im in imports: inputs.append(im) dep = dependencies.create_dependency_info(inputs, [f[1]], cmd) if not dependencies.check_up_to_date_single(f[1], dep): subprocess.call(cmd, shell=True) dependencies.write_to_file_single(dep, util.change_ext(f[1], ".dep"))
def run_textures(config): print( "--------------------------------------------------------------------------------" ) print( "textures -----------------------------------------------------------------------" ) print( "--------------------------------------------------------------------------------" ) tool_cmd = tool_to_platform(config["tools"]["texturec"]) for task in config["textures"]: files = get_task_files_containers(task) for f in files: copy_fmt = [".dds", ".pmv"] conv_fmt = [".png", ".jpg", ".tga", ".bmp", ".txt"] cont_fmt = [".txt"] fext = os.path.splitext(f[0])[1] if fext in copy_fmt: util.copy_file_create_dir_if_newer(f[0], f[1]) if fext in conv_fmt: export = export_config_for_file(f[0]) dep_inputs = [f[0]] if fext in cont_fmt: export = export_config_for_directory(f[0], "osx") dep_inputs = get_container_dep_inputs(f[0], dep_inputs) dst = util.change_ext(f[1], ".dds") if not dependencies.check_up_to_date_single(dst): if "format" not in export.keys(): export["format"] = "RGBA8" dep_outputs = [dst] dep_info = dependencies.create_dependency_info( dep_inputs, dep_outputs) dep = dict() dep["files"] = list() dep["files"].append(dep_info) util.create_dir(dst) cmd = tool_cmd + " " cmd += "-f " + f[0] + " " cmd += "-t " + export["format"] + " " if "cubemap" in export.keys() and export["cubemap"]: cmd += " --cubearray " cmd += "-o " + dst print("texturec " + f[0]) subprocess.call(cmd, shell=True) dependencies.write_to_file_single( dep, util.change_ext(dst, ".json"))
def process_single_file(f): src_file = os.path.join(root, f) supported, dest_file = get_output_name(source, src_file) if not supported: return export_info = dependencies.get_export_config(os.path.join(root, f)) relative_data_filename = dest_file.replace(current_directory, "") relative_data_filename = relative_data_filename.replace( platform_data_dir, "") dependency_inputs = [os.path.join(os.getcwd(), src_file)] dependency_outputs = [relative_data_filename] file_info = dependencies.create_dependency_info(dependency_inputs, dependency_outputs) dependency_info[dest_dir]["files"].append(file_info) if dependencies.check_up_to_date(dependency_info[dest_dir], relative_data_filename): print(relative_data_filename + " already up to date") return if not os.path.exists(dest_dir): os.makedirs(dest_dir) if f.endswith(".dds") or f.endswith(".pmv"): print("copying " + f) # print(src_file + " to " + dest_file) shutil.copy(src_file, dest_file) else: export_options_string = options_from_export(export_info, src_file) print("compress and generate mips " + src_file) cmdline = nvcompress + " " + export_options_string + " -silent " + src_file + " " + dest_file print(cmdline) subprocess.call(cmdline, shell=True)
def process_collection(container): supported, cubemap_file = get_output_name(source, container) if not supported: return export_info = dependencies.get_export_config(container) if "cubemap_faces" not in export_info.keys(): print("missing cubemap_faces array in export") return cubemap_faces = [] for f in export_info["cubemap_faces"]: cubemap_faces.append(os.path.join(container, f)) relative_data_filename = cubemap_file.replace(current_directory, "") relative_data_filename = relative_data_filename.replace( platform_data_dir, "") dependency_outputs = [relative_data_filename] dest_container_dir = os.path.dirname(cubemap_file) file_info = dependencies.create_dependency_info(cubemap_faces, dependency_outputs) dependency_info[dest_container_dir]["files"].append(file_info) if dependencies.check_up_to_date(dependency_info[dest_container_dir], relative_data_filename): print(relative_data_filename + " already up to date") return print("assembling " + cubemap_file) cmdline = nvassemble for face in cubemap_faces: cmdline += " " + face cmdline += " -o " + cubemap_file subprocess.call(cmdline, shell=True)
f = file root = os.path.dirname(f) [fnoext, fext] = os.path.splitext(file) out_dir = helpers.build_dir current_filename = os.path.basename(file) helpers.current_filename = current_filename helpers.build_dir = out_dir helpers.output_file = helpers.pmm_file() base_out_file = os.path.join(out_dir, os.path.basename(fnoext)) depends_dest = base_out_file util.create_dir(out_dir) # build different model formats if file.endswith(".obj"): dependency_inputs = get_dep_inputs([os.path.join(os.getcwd(), f)]) dependency_outputs = [depends_dest + ".pmm"] dep = dependencies.create_dependency_info(dependency_inputs, dependency_outputs) if not dependencies.check_up_to_date_single(depends_dest + ".pmm", dep): parse_obj.write_geometry(os.path.basename(file), root) helpers.output_file.write(base_out_file + ".pmm") if len(mesh_opt) > 0: cmd = " -i " + base_out_file + ".pmm" p = subprocess.Popen(mesh_opt + cmd, shell=True) p.wait() dependencies.write_to_file_single(dep, depends_dest + ".dep") elif file.endswith(".dae"): joint_list = [] transform_list = [] parent_list = [] geometries = [] type_list = []
base_out_file = os.path.join(out_dir, os.path.basename(fnoext)) depends_dest = base_out_file deps["dir"] = out_dir util.create_dir(out_dir) if file.endswith(".obj"): dependency_inputs = [os.path.join(os.getcwd(), f)] dependency_outputs = [depends_dest + ".pmm"] # add dependency to the scripts for obj main_file = os.path.realpath(__file__) dependency_inputs.append(os.path.realpath(__file__)) dependency_inputs.append( main_file.replace("build_models.py", os.path.join("models", "parse_obj.py"))) file_info = dependencies.create_dependency_info( dependency_inputs, dependency_outputs) deps["files"].append(file_info) if dependencies.check_up_to_date_single(depends_dest + ".pmm"): continue parse_obj.write_geometry(os.path.basename(file), root) helpers.output_file.write(base_out_file + ".pmm") elif file.endswith(".dae"): joint_list = [] transform_list = [] parent_list = [] geometries = [] type_list = [] geom_attach_data_list = [] material_attach_data_list = [] material_symbol_list = [] node_name_list = []