def get_asset_directories(**kw): main_assets = get_path_set(make_config.get_value("assets.main", []), error_sensitive=True) if main_assets is not None: modified_assets = get_path_set(make_config.get_value( "assets.modified", []), error_sensitive=True) if modified_assets is not None: return main_assets + modified_assets return None
def task_build_info(): import json config = get_make_config() out_dir = os.path.join("output/debug", config.get_mod_dir()) with open(config.get_path(os.path.join(out_dir, "mod.info")), "w") as info_file: info = dict(config.get_value("global.info", fallback={"name": "No was provided"})) if "icon" in info: del info["icon"] info_file.write(json.dumps(info, indent=" " * 4)) icon_path = config.get_value("global.info.icon") if icon_path is not None: copy_file(config.get_path(icon_path, True), config.get_path(os.path.join(out_dir, "mod_icon.png"))) return 0
def get_push_pack_directory(): directory = make_config.get_value( "pushTo") + "/innercore/mods/" + make_config.get_value( "currentProject") if directory is None: return None if "games/horizon/packs" not in directory: ans = input( f"push directory {directory} looks suspicious, it does not belong to horizon packs directory, push will corrupt all contents, allow it only if you know what are you doing (type Y or yes to proceed): " ) if ans.lower() in ["yes", "y"]: return directory else: print("interpreted as NO, aborting push") return None return directory
def build_all_resources(): mod_structure.cleanup_build_target("resource_directory") mod_structure.cleanup_build_target("gui") mod_structure.cleanup_build_target("minecraft_resource_pack") mod_structure.cleanup_build_target("minecraft_behavior_pack") overall_result = 0 for resource in make_config.get_value("resources", fallback=[]): if "path" not in resource or "type" not in resource: print("skipped invalid source json", resource, file=sys.stderr) overall_result = 1 continue for source_path in make_config.get_paths(resource["path"]): if not exists(source_path): print("skipped non-existing resource path", resource["path"], file=sys.stderr) overall_result = 1 continue resource_type = resource["type"] if resource_type not in ("resource_directory", "gui", "minecraft_resource_pack", "minecraft_behavior_pack"): print("skipped invalid resource with type", resource_type, file=sys.stderr) overall_result = 1 continue resource_name = resource[ "target"] if "target" in resource else basename(source_path) resource_name += "{}" if resource_type in ("resource_directory", "gui"): target = mod_structure.new_build_target( resource_type, resource_name, declare={ "type": { "resource_directory": "resource", "gui": "gui" }[resource_type] }) else: target = mod_structure.new_build_target( resource_type, resource_name, exclude=True, declare_default={ "resourcePacksDir": mod_structure.get_target_directories( "minecraft_resource_pack")[0], "behaviorPacksDir": mod_structure.get_target_directories( "minecraft_behavior_pack")[0] }) clear_directory(target) copy_directory(source_path, target) mod_structure.update_build_config_list("resources") return overall_result
def build_all_scripts(): overall_result = 0 # FIXME: декларации создаются после компиляции мода, следовательно не указываются в tsconfig.json у мода # clear_directory(make_config.get_path("toolchain/build/typescript-headers")) mod_structure.cleanup_build_target("script_source") mod_structure.cleanup_build_target("script_library") for item in make_config.get_value("sources", fallback=[]): _source = item["source"] _target = item["target"] if "target" in item else None _type = item["type"] _language = item["language"] if _type not in ("main", "launcher", "library", "preloader"): print_err(f"skipped invalid source with type {_type}") overall_result = 1 continue for source_path in make_config.get_paths(_source): if not exists(source_path): print_err(f"skipped non-existing source path {_source}") overall_result = 1 continue target_type = "script_library" if _type == "library" else "script_source" target_path = _target if _target is not None else f"{splitext(basename(source_path))[0]}.js" # translate make.json source type to build.config source type declare = { "sourceType": { "main": "mod", "launcher": "launcher", "preloader": "preloader", "library": "library" }[_type] } if "api" in item: declare["api"] = item["api"] try: dot_index = target_path.rindex(".") target_path = target_path[:dot_index] + \ "{}" + target_path[dot_index:] except ValueError: target_path += "{}" print_info( f"building {_language} {_type} from {_source} {'to ' + _target if _target is not None else '' }" ) tsconfig_path = build_script( source_path, mod_structure.new_build_target(target_type, target_path, source_type=_type, declare=declare)) mod_structure.update_build_config_list("compile") return overall_result
def setup_default_config(self): self.read_or_create_build_config() if "defaultConfig" not in self.build_config or not isinstance(self.build_config["defaultConfig"], dict): self.build_config["defaultConfig"] = {} default_config = self.build_config["defaultConfig"] default_config["readme"] = "this build config is generated automatically by mod development toolchain" default_config["api"] = make_config.get_value("global.api", fallback="CoreEngine") default_config["buildType"] = "develop" self.write_build_config()
def task_exclude_directories(): config = get_make_config() for path in config.get_value("make.excludeFromRelease", []): for exclude in config.get_paths(os.path.join("output", path)): if os.path.isdir(exclude): clear_directory(exclude) elif os.path.isfile(exclude): os.remove(exclude) return 0
def get_ndk_path(): path_from_config = make_config.get_value("make.ndkPath") if path_from_config is not None: return path_from_config # linux try: return search_ndk_path(environ['HOME']) except KeyError: pass # windows return search_ndk_path(getenv("LOCALAPPDATA"))
def build_all_scripts(): overall_result = 0 mod_structure.cleanup_build_target("script_source") mod_structure.cleanup_build_target("script_library") for source in make_config.get_value("sources", fallback=[]): if "source" not in source or "type" not in source: print("skipped invalid source json", source, file=sys.stderr) overall_result = 1 continue for source_path in make_config.get_paths(source["source"]): if not os.path.exists(source_path): print("skipped non-existing source path", source["source"], file=sys.stderr) overall_result = 1 continue source_type = source["type"] if source_type not in ("main", "launcher", "library", "preloader"): print("skipped invalid source with type", source_type, file=sys.stderr) overall_result = 1 continue target_type = "script_library" if source_type == "library" else "script_source" source_name = source[ "target"] if "target" in source else os.path.basename( source_path) try: dot_index = source_name.rindex(".") source_name = source_name[:dot_index] + "{}" + source_name[ dot_index:] except ValueError: source_name += "{}" declare = { "sourceType": { "main": "mod", "launcher": "launcher", "preloader": "preloader", "library": "library" }[source_type] } if "api" in source: declare["api"] = source["api"] overall_result = build_script( source_path, mod_structure.new_build_target(target_type, source_name, source_type=source_type, declare=declare)) mod_structure.update_build_config_list("compile") return overall_result
def compile_all_using_make_config(): import time start_time = time.time() overall_result = 0 cache_dir = make_config.get_path("toolchain/build/gradle") ensure_directory(cache_dir) directories = [] directory_names = [] for directory in make_config.get_filtered_list("compile", prop="type", values=("java", )): if "source" not in directory: print("skipped invalid java directory json", directory, file=sys.stderr) overall_result = -1 continue for path in make_config.get_paths(directory["source"]): if not os.path.isdir(path): print("skipped non-existing java directory path", directory["source"], file=sys.stderr) overall_result = -1 continue directories.append(path) if overall_result != 0: print("failed to get java directories", file=sys.stderr) return overall_result if len(directories) > 0: classpath_directories = [make_config.get_path("toolchain/classpath") ] + make_config.get_value( "make.gradle.classpath", []) overall_result = build_java_directories( directories, cache_dir, get_classpath_from_directories(classpath_directories)) if overall_result != 0: print(f"failed, clearing compiled directories {directories} ...") for directory_name in directory_names: clear_directory( make_config.get_path("output/" + directory_name)) cleanup_gradle_scripts(directories) mod_structure.update_build_config_list("javaDirs") print( f"completed java build in {int((time.time() - start_time) * 100) / 100}s with result {overall_result} - {'OK' if overall_result == 0 else 'ERROR'}" ) return overall_result
def task_build_package(): import shutil config = get_make_config() output_dir = config.get_project_path("output") output_file = config.get_project_path( config.get_value("currentProject", "mod") + ".icmod") output_file_tmp = config.get_path("toolchain/build/mod.zip") ensure_directory(output_dir) ensure_file_dir(output_file_tmp) if os.path.isfile(output_file): os.remove(output_file) if os.path.isfile(output_file_tmp): os.remove(output_file_tmp) shutil.make_archive(output_file_tmp[:-4], 'zip', output_dir) os.rename(output_file_tmp, output_file) return 0
def assemble_additional_directories(): result = 0 output_dir = make_config.get_path("output") for additional_dir in make_config.get_value("additional", []): if "sources" not in additional_dir or "pushTo" not in additional_dir: print("invalid formatted additional directory json", additional_dir) result = -1 break dst_dir = os.path.join(output_dir, additional_dir["pushTo"]) clear_directory(dst_dir) source_directories = get_path_set(additional_dir["sources"], error_sensitive=True) if source_directories is None: print("some additional directories are invalid") result = -1 break for source_dir in source_directories: copy_directory(source_dir, dst_dir) return result
def task_build_package(): import shutil config = get_make_config() output_dir = config.get_path("output") mod_folder = config.get_value("make.modFolder") output_file = config.get_path(mod_folder + ".icmod") output_root_tmp = config.get_path("toolchain/build") output_dir_tmp = output_root_tmp + "/" + mod_folder output_file_tmp = output_root_tmp + "/mod.zip" ensure_directory(output_dir) ensure_file_dir(output_file_tmp) if os.path.isfile(output_file): os.remove(output_file) if os.path.isfile(output_file_tmp): os.remove(output_file_tmp) shutil.move(output_dir, output_dir_tmp) shutil.make_archive(output_file_tmp[:-4], 'zip', output_root_tmp, mod_folder) os.rename(output_file_tmp, output_file) shutil.move(output_dir_tmp, output_dir) return 0
def task_build_additional(): overall_result = 0 config = get_make_config() for additional_dir in config.get_value("additional", fallback=[]): if "source" in additional_dir and "targetDir" in additional_dir: for additional_path in config.get_paths(additional_dir["source"]): if not os.path.exists(additional_path): print("non existing additional path: " + additional_path) overall_result = 1 break target = config.get_path(os.path.join( "output", "debug", config.get_mod_dir(), additional_dir["targetDir"], os.path.basename(additional_path) )) if os.path.isdir(additional_path): copy_directory(additional_path, target) else: ensure_file_dir(target) copy_file(additional_path, target) return overall_result
def build_all_scripts(): mod_structure.cleanup_build_target("script_source") mod_structure.cleanup_build_target("script_library") overall_result = 0 from functools import cmp_to_key def libraries_first(a, b): la = a["type"] == "library" lb = b["type"] == "library" if la == lb: return 0 elif la: return -1 else: return 1 sources = make_config.get_value("sources", fallback=[]) sources = sorted(sources, key=cmp_to_key(libraries_first)) for item in sources: _source = item["source"] _target = item["target"] if "target" in item else None _type = item["type"] _language = item["language"] _includes = item["includes"] if "includes" in item else ".includes" if _type not in ("main", "launcher", "library", "preloader"): print(f"skipped invalid source with type {_type}") overall_result = 1 continue for source_path in make_config.get_paths(_source): if not exists(source_path): print(f"skipped non-existing source path {_source}") overall_result = 1 continue target_type = "script_library" if _type == "library" else "script_source" target_path = _target if _target is not None else f"{splitext(basename(source_path))[0]}.js" # translate make.json source type to build.config source type declare = { "sourceType": { "main": "mod", "launcher": "launcher", "preloader": "preloader", "library": "library" }[_type] } if "api" in item: declare["api"] = item["api"] try: dot_index = target_path.rindex(".") target_path = target_path[:dot_index] + "{}" + target_path[dot_index:] except ValueError: target_path += "{}" destination_path = mod_structure.new_build_target( target_type, target_path, source_type=_type, declare=declare ) mod_structure.update_build_config_list("compile") if (isfile(source_path)): copy_file(source_path, destination_path) else: overall_result += build_source(source_path, destination_path, _includes) return overall_result
def build_native_dir(directory, output_dir, cache_dir, abis, std_includes_path, rules: BaseConfig): executables = {} for abi in abis: executable = prepare_compiler_executable(abi) if executable is None: print("failed to acquire GCC executable from NDK for abi " + abi) return CODE_FAILED_NO_GCC executables[abi] = executable try: manifest = get_manifest(directory) targets = {} soname = "lib" + manifest["shared"]["name"] + ".so" for abi in abis: targets[abi] = os.path.join(output_dir, "so/" + abi + "/" + soname) except Exception as err: print("failed to read manifest for directory " + directory + " error: " + str(err)) return CODE_FAILED_INVALID_MANIFEST keep_sources = rules.get_value("keepSources", fallback=False) if keep_sources: # copy everything and clear build files copy_directory(directory, output_dir, clear_dst=True) clear_directory(os.path.join(output_dir, "so")) os.remove(os.path.join(output_dir, soname)) else: clear_directory(output_dir) # copy manifest copy_file(os.path.join(directory, "manifest"), os.path.join(output_dir, "manifest")) # copy includes keep_includes = rules.get_value("keepIncludes", fallback=True) for include_path in manifest["shared"]["include"]: src_include_path = os.path.join(directory, include_path) output_include_path = os.path.join(output_dir, include_path) if keep_includes: copy_directory(src_include_path, output_include_path, clear_dst=True) else: clear_directory(output_include_path) std_includes = [] for std_includes_dir in os.listdir(std_includes_path): std_includes.append(os.path.abspath(os.path.join(std_includes_path, std_includes_dir))) # compile for every abi overall_result = CODE_OK for abi in abis: printed_compilation_title = f"compiling {os.path.basename(directory)} for {abi}" print("\n") print(f"{'=' * (48 - len(printed_compilation_title) // 2)} {printed_compilation_title} {'=' * (48 - (1 + len(printed_compilation_title)) // 2)}") executable = executables[abi] gcc = [executable, "-std=c++11"] includes = [] for std_includes_dir in std_includes: includes.append(f'-I{std_includes_dir}') dependencies = [f'-L{get_fake_so_dir(abi)}', "-landroid", "-lm", "-llog"] for link in rules.get_value("link", fallback=[]) + make_config.get_value("make.linkNative", fallback=[]) + ["horizon"]: add_fake_so(executable, abi, link) dependencies.append(f'-l{link}') if "depends" in manifest: search_dir = os.path.abspath(os.path.join(directory, "..")) # always search for dependencies in current dir for dependency in manifest["depends"]: if dependency is not None: add_fake_so(executable, abi, dependency) dependencies.append("-l" + dependency) dependency_dir = search_directory(search_dir, dependency) if dependency_dir is not None: try: for include_dir in get_manifest(dependency_dir)["shared"]["include"]: includes.append("-I" + os.path.join(dependency_dir, include_dir)) except KeyError: pass else: print(f"ERROR: dependency directory {dependency} is not found, it will be skipped") # prepare directories source_files = get_all_files(directory, extensions=(".cpp", ".c")) preprocessed_dir = os.path.abspath(os.path.join(cache_dir, "preprocessed", abi)) ensure_directory(preprocessed_dir) object_dir = os.path.abspath(os.path.join(cache_dir, "object", abi)) ensure_directory(object_dir) # pre-process and compile changes import filecmp object_files = [] recompiled_count = 0 for file in source_files: relative_file = relative_path(directory, file) sys.stdout.write("preprocessing " + relative_file + " " * 64 + "\r") object_file = os.path.join(object_dir, relative_file) + ".o" preprocessed_file = os.path.join(preprocessed_dir, relative_file) tmp_preprocessed_file = preprocessed_file + ".tmp" ensure_file_dir(preprocessed_file) ensure_file_dir(object_file) object_files.append(object_file) result = subprocess.call(gcc + ["-E", file, "-o", tmp_preprocessed_file] + includes) if result == CODE_OK: if not os.path.isfile(preprocessed_file) or not os.path.isfile(object_file) or \ not filecmp.cmp(preprocessed_file, tmp_preprocessed_file): if os.path.isfile(preprocessed_file): os.remove(preprocessed_file) os.rename(tmp_preprocessed_file, preprocessed_file) if os.path.isfile(object_file): os.remove(object_file) sys.stdout.write("compiling " + relative_file + " " * 64 + "\n") result = max(result, subprocess.call(gcc + ["-c", preprocessed_file, "-shared", "-o", object_file])) if result != CODE_OK: if os.path.isfile(object_file): os.remove(object_file) overall_result = result else: recompiled_count += 1 else: if os.path.isfile(object_file): os.remove(object_file) overall_result = result print(" " * 128) if overall_result != CODE_OK: print("failed to compile", overall_result) return overall_result else: print(f"recompiled {recompiled_count}/{len(object_files)} files with result {overall_result}") ensure_file_dir(targets[abi]) command = [] command += gcc command += object_files command.append("-shared") command.append("-Wl,-soname=" + soname) command.append("-o") command.append(targets[abi]) command += includes command += dependencies print("linking object files...") result = subprocess.call(command) if result == CODE_OK: print("build successful") else: print("linker failed with result code", result) overall_result = result return overall_result return overall_result
def setup_gradle_project(cache_dir, directories, classpath): file = open(os.path.join(cache_dir, "settings.gradle"), "w", encoding="utf-8") file.writelines([ "include ':%s'\nproject(':%s').projectDir = file('%s')\n" % (os.path.basename(item), os.path.basename(item), item.replace("\\", "\\\\")) for item in directories ]) file.close() for directory in directories: target_dir = mod_structure.new_build_target( "java", os.path.basename(directory)) clear_directory(target_dir) ensure_directory(target_dir) copy_file(os.path.join(directory, "manifest"), os.path.join(target_dir, "manifest")) with open(os.path.join(directory, "manifest"), "r", encoding="utf-8") as file: manifest = json.load(file) source_dirs = manifest["source-dirs"] library_dirs = manifest["library-dirs"] build_dir = os.path.join(cache_dir, os.path.basename(target_dir), "classes") dex_dir = target_dir ensure_directory(build_dir) ensure_directory(dex_dir) if make_config.get_value("make.gradle.keepLibraries", True): for library_dir in library_dirs: src_dir = os.path.join(directory, library_dir) if os.path.isdir(src_dir): copy_directory(src_dir, os.path.join(dex_dir, library_dir), clear_dst=True) if make_config.get_value("make.gradle.keepSources", False): for source_dir in source_dirs: src_dir = os.path.join(directory, source_dir) if os.path.isdir(src_dir): copy_directory(src_dir, os.path.join(dex_dir, source_dir), clear_dst=True) with open(os.path.join(directory, "build.gradle"), "w", encoding="utf-8") as build_file: build_file.write(""" plugins { id 'com.github.johnrengelman.shadow' version '5.2.0' id "java" } dependencies { """ + ( """compile fileTree('""" + "', '". join([path.replace("\\", "\\\\") for path in library_dirs]) + """') { include '*.jar' }""" if len(library_dirs) > 0 else "") + """ } sourceSets { main { java { srcDirs = ['""" + "', '".join( [path.replace("\\", "\\\\") for path in source_dirs]) + """'] buildDir = \"""" + os.path.join( build_dir, "${project.name}").replace("\\", "\\\\") + """\" } resources { srcDirs = [] } compileClasspath += files('""" + "', '".join([ path.replace("\\", "\\\\") for path in classpath ]) + """') } } tasks.register("dex") { javaexec { main = "-jar"; args = [ \"""" + make_config.get_path("toolchain/bin/dx.jar"). replace("\\", "\\\\") + """\", "--dex", "--multi-dex", "--output=\\\"""" + os.path.join(dex_dir, ".").replace("\\", "\\\\") + """\\\"", \"""" + os.path.join(build_dir, "${project.name}", "libs", "${project.name}-all.jar").replace( "\\", "\\\\") + """\" ] } } """)