def lock_task(name, silent=True): path = get_make_config().get_path(f"toolchain/build/lock/{name}.lock") ensure_file_dir(path) await_message = False if os.path.exists(path): while True: try: if os.path.exists(path): os.remove(path) break except IOError as _: if not await_message: await_message = True if not silent: sys.stdout.write( f"task {name} is locked by another process, waiting for it to unlock." ) if name in locked_tasks: error("ERROR: dead lock detected", code=-2) if not silent: sys.stdout.write(".") sys.stdout.flush() time.sleep(0.5) if await_message: if not silent: print("") open(path, "tw").close() locked_tasks[name] = open(path, "a")
def build_script(source, target): if os.path.isfile(source): ensure_file_dir(target) copy_file(source, target) return 0 else: if os.path.isfile(os.path.join(source, ".includes")): return build_includes_dir(source, target)
def write_build_config(self): if self.build_config is None: return build_config_file = os.path.join(self.directory, "build.config") if os.path.isdir(build_config_file): clear_directory(build_config_file) os.remove(build_config_file) ensure_file_dir(build_config_file) with open(build_config_file, "w", encoding="utf-8") as build_config: build_config.write(json.dumps(self.build_config, indent=" " * 4))
def task_build_package(): import shutil output_dir = get_make_config().get_path("output") output_file = get_make_config().get_path("mod.icmod") output_file_tmp = get_make_config().get_path("toolchain/build/mod.zip") ensure_directory(output_dir) ensure_file_dir(output_file_tmp) if os.path.isfile(output_file): os.remove(output_file) if os.path.isfile(output_file_tmp): os.remove(output_file_tmp) shutil.make_archive(output_file_tmp[:-4], 'zip', output_dir) os.rename(output_file_tmp, output_file) return 0
def task_build_package(): import shutil config = get_make_config() output_dir = config.get_project_path("output") output_file = config.get_project_path( config.get_value("currentProject", "mod") + ".icmod") output_file_tmp = config.get_path("toolchain/build/mod.zip") ensure_directory(output_dir) ensure_file_dir(output_file_tmp) if os.path.isfile(output_file): os.remove(output_file) if os.path.isfile(output_file_tmp): os.remove(output_file_tmp) shutil.make_archive(output_file_tmp[:-4], 'zip', output_dir) os.rename(output_file_tmp, output_file) return 0
def task_build_package(): import shutil config = get_make_config() output_dir = config.get_path(os.path.join("output/debug", config.get_mod_dir())) ensure_directory(config.get_path("output/release")) output_file = config.get_path("output/release/"+config.get_mod_dir() + ".icmod") output_file_tmp = config.get_path("toolchain/build/mod.zip") ensure_directory(output_dir) ensure_file_dir(output_file_tmp) if os.path.isfile(output_file): os.remove(output_file) if os.path.isfile(output_file_tmp): os.remove(output_file_tmp) shutil.make_archive(output_file_tmp[:-4], 'zip', output_dir) os.rename(output_file_tmp, output_file) return 0
def task_build_additional(): overall_result = 0 for additional_dir in get_make_config().get_value("additional", fallback=[]): if "source" in additional_dir and "targetDir" in additional_dir: for additional_path in get_make_config().get_paths( additional_dir["source"]): if not os.path.exists(additional_path): print("non existing additional path: " + additional_path) overall_result = 1 break target = get_make_config().get_path( os.path.join("output", additional_dir["targetDir"], os.path.basename(additional_path))) if os.path.isdir(additional_path): copy_directory(additional_path, target) else: ensure_file_dir(target) copy_file(additional_path, target) return overall_result
def task_build_package(): import shutil config = get_make_config() output_dir = config.get_path("output") mod_folder = config.get_value("make.modFolder") output_file = config.get_path(mod_folder + ".icmod") output_root_tmp = config.get_path("toolchain/build") output_dir_tmp = output_root_tmp + "/" + mod_folder output_file_tmp = output_root_tmp + "/mod.zip" ensure_directory(output_dir) ensure_file_dir(output_file_tmp) if os.path.isfile(output_file): os.remove(output_file) if os.path.isfile(output_file_tmp): os.remove(output_file_tmp) shutil.move(output_dir, output_dir_tmp) shutil.make_archive(output_file_tmp[:-4], 'zip', output_root_tmp, mod_folder) os.rename(output_file_tmp, output_file) shutil.move(output_dir_tmp, output_dir) return 0
def build_native_dir(directory, output_dir, cache_dir, abis, std_includes_path, rules: BaseConfig): executables = {} for abi in abis: executable = prepare_compiler_executable(abi) if executable is None: print("failed to acquire GCC executable from NDK for abi " + abi) return CODE_FAILED_NO_GCC executables[abi] = executable try: manifest = get_manifest(directory) targets = {} soname = "lib" + manifest["shared"]["name"] + ".so" for abi in abis: targets[abi] = os.path.join(output_dir, "so/" + abi + "/" + soname) except Exception as err: print("failed to read manifest for directory " + directory + " error: " + str(err)) return CODE_FAILED_INVALID_MANIFEST keep_sources = rules.get_value("keepSources", fallback=False) if keep_sources: # copy everything and clear build files copy_directory(directory, output_dir, clear_dst=True) clear_directory(os.path.join(output_dir, "so")) os.remove(os.path.join(output_dir, soname)) else: clear_directory(output_dir) # copy manifest copy_file(os.path.join(directory, "manifest"), os.path.join(output_dir, "manifest")) # copy includes keep_includes = rules.get_value("keepIncludes", fallback=True) for include_path in manifest["shared"]["include"]: src_include_path = os.path.join(directory, include_path) output_include_path = os.path.join(output_dir, include_path) if keep_includes: copy_directory(src_include_path, output_include_path, clear_dst=True) else: clear_directory(output_include_path) std_includes = [] for std_includes_dir in os.listdir(std_includes_path): std_includes.append(os.path.abspath(os.path.join(std_includes_path, std_includes_dir))) # compile for every abi overall_result = CODE_OK for abi in abis: printed_compilation_title = f"compiling {os.path.basename(directory)} for {abi}" print("\n") print(f"{'=' * (48 - len(printed_compilation_title) // 2)} {printed_compilation_title} {'=' * (48 - (1 + len(printed_compilation_title)) // 2)}") executable = executables[abi] gcc = [executable, "-std=c++11"] includes = [] for std_includes_dir in std_includes: includes.append(f'-I{std_includes_dir}') dependencies = [f'-L{get_fake_so_dir(abi)}', "-landroid", "-lm", "-llog"] for link in rules.get_value("link", fallback=[]) + make_config.get_value("make.linkNative", fallback=[]) + ["horizon"]: add_fake_so(executable, abi, link) dependencies.append(f'-l{link}') if "depends" in manifest: search_dir = os.path.abspath(os.path.join(directory, "..")) # always search for dependencies in current dir for dependency in manifest["depends"]: if dependency is not None: add_fake_so(executable, abi, dependency) dependencies.append("-l" + dependency) dependency_dir = search_directory(search_dir, dependency) if dependency_dir is not None: try: for include_dir in get_manifest(dependency_dir)["shared"]["include"]: includes.append("-I" + os.path.join(dependency_dir, include_dir)) except KeyError: pass else: print(f"ERROR: dependency directory {dependency} is not found, it will be skipped") # prepare directories source_files = get_all_files(directory, extensions=(".cpp", ".c")) preprocessed_dir = os.path.abspath(os.path.join(cache_dir, "preprocessed", abi)) ensure_directory(preprocessed_dir) object_dir = os.path.abspath(os.path.join(cache_dir, "object", abi)) ensure_directory(object_dir) # pre-process and compile changes import filecmp object_files = [] recompiled_count = 0 for file in source_files: relative_file = relative_path(directory, file) sys.stdout.write("preprocessing " + relative_file + " " * 64 + "\r") object_file = os.path.join(object_dir, relative_file) + ".o" preprocessed_file = os.path.join(preprocessed_dir, relative_file) tmp_preprocessed_file = preprocessed_file + ".tmp" ensure_file_dir(preprocessed_file) ensure_file_dir(object_file) object_files.append(object_file) result = subprocess.call(gcc + ["-E", file, "-o", tmp_preprocessed_file] + includes) if result == CODE_OK: if not os.path.isfile(preprocessed_file) or not os.path.isfile(object_file) or \ not filecmp.cmp(preprocessed_file, tmp_preprocessed_file): if os.path.isfile(preprocessed_file): os.remove(preprocessed_file) os.rename(tmp_preprocessed_file, preprocessed_file) if os.path.isfile(object_file): os.remove(object_file) sys.stdout.write("compiling " + relative_file + " " * 64 + "\n") result = max(result, subprocess.call(gcc + ["-c", preprocessed_file, "-shared", "-o", object_file])) if result != CODE_OK: if os.path.isfile(object_file): os.remove(object_file) overall_result = result else: recompiled_count += 1 else: if os.path.isfile(object_file): os.remove(object_file) overall_result = result print(" " * 128) if overall_result != CODE_OK: print("failed to compile", overall_result) return overall_result else: print(f"recompiled {recompiled_count}/{len(object_files)} files with result {overall_result}") ensure_file_dir(targets[abi]) command = [] command += gcc command += object_files command.append("-shared") command.append("-Wl,-soname=" + soname) command.append("-o") command.append(targets[abi]) command += includes command += dependencies print("linking object files...") result = subprocess.call(command) if result == CODE_OK: print("build successful") else: print("linker failed with result code", result) overall_result = result return overall_result return overall_result