def run_textures(config): print( "--------------------------------------------------------------------------------" ) print( "textures -----------------------------------------------------------------------" ) print( "--------------------------------------------------------------------------------" ) tool_cmd = tool_to_platform(config["tools"]["texturec"]) for task in config["textures"]: files = get_task_files_containers(task) for f in files: copy_fmt = [".dds", ".pmv"] conv_fmt = [".png", ".jpg", ".tga", ".bmp", ".txt"] cont_fmt = [".txt"] fext = os.path.splitext(f[0])[1] if fext in copy_fmt: util.copy_file_create_dir_if_newer(f[0], f[1]) if fext in conv_fmt: export = export_config_for_file(f[0]) dep_inputs = [f[0]] if fext in cont_fmt: export = export_config_for_directory(f[0], "osx") dep_inputs = get_container_dep_inputs(f[0], dep_inputs) dst = util.change_ext(f[1], ".dds") if not dependencies.check_up_to_date_single(dst): if "format" not in export.keys(): export["format"] = "RGBA8" dep_outputs = [dst] dep_info = dependencies.create_dependency_info( dep_inputs, dep_outputs) dep = dict() dep["files"] = list() dep["files"].append(dep_info) util.create_dir(dst) cmd = tool_cmd + " " cmd += "-f " + f[0] + " " cmd += "-t " + export["format"] + " " if "cubemap" in export.keys() and export["cubemap"]: cmd += " --cubearray " cmd += "-o " + dst print("texturec " + f[0]) subprocess.call(cmd, shell=True) dependencies.write_to_file_single( dep, util.change_ext(dst, ".json"))
def run_jsn_thread(f, ii, config, jsn_tasks): cmd = python_tool_to_platform(config["tools"]["jsn"]) cmd += " -i " + f[0] + " -o " + f[1] + ii imports = jsn.get_import_file_list(f[0], jsn_tasks["import_dirs"]) inputs = [f[0], config["tools"]["jsn"]] for im in imports: inputs.append(im) dep = dependencies.create_dependency_info(inputs, [f[1]], cmd) if not dependencies.check_up_to_date_single(f[1], dep): subprocess.call(cmd, shell=True) dependencies.write_to_file_single(dep, util.change_ext(f[1], ".dep"))
def write_to_file_single(deps, file): file = util.change_ext(file, ".dep") output_d = open(file, 'wb+') output_d.write(bytes(json.dumps(deps, indent=4), 'UTF-8')) output_d.close()
def check_up_to_date_single(dest_file, deps): dep_filename = util.change_ext(dest_file, ".dep") if not os.path.exists(dep_filename): print("new file: " + os.path.basename(dest_file), flush=True) return False if not os.path.exists(dest_file): print("new file:" + os.path.basename(dest_file), flush=True) return False if os.path.isdir(dest_file): files = os.listdir(dest_file) for f in files: j = os.path.join(dest_file, f) dep_ts = os.path.getmtime(j) else: dep_ts = os.path.getmtime(dest_file) file = open(dep_filename) d_str = file.read() d_json = json.loads(d_str) # check for changes to cmdline if "cmdline" in deps: if "cmdline" not in d_json.keys( ) or deps["cmdline"] != d_json["cmdline"]: print(dest_file + " cmdline changed", flush=True) return False # check multi cmdlines if "cmdlines" in deps: if "cmdlines" not in d_json.keys(): return False if deps["cmdlines"] != d_json["cmdlines"]: return False # check for missing for output in d_json["files"]: if not os.path.exists(output): print(os.path.basename(output) + ": is missing out-of-date", flush=True) return False # check for new additions dep_files = [] for output in d_json["files"]: for i in d_json["files"][output]: dep_files.append(i["name"]) for output in deps["files"]: for i in deps["files"][output]: if i["name"] not in dep_files: print(os.path.basename(dest_file) + ": has new inputs", flush=True) return False # check for timestamps on existing for d in d_json["files"]: dest_file = sanitize_filename(d) for input_file in d_json["files"][d]: # output file does not exist yet if not os.path.exists(dest_file): print("new file: " + os.path.basename(dest_file), flush=True) return False # output file is out of date if os.path.getmtime(input_file["name"]) > dep_ts: print(os.path.basename(dest_file) + ": is out-of-date.", flush=True) return False print(os.path.basename(dest_file) + ": up-to-date", flush=True) return True