def create_tsconfig(self, temp_path): headers = [] headers.extend( glob.glob(make_config.get_path("toolchain/jslibs/**/*.d.ts"), recursive=True)) headers.extend( glob.glob(make_config.get_path( "toolchain/build/typescript-headers/**/*.d.ts"), recursive=True)) currentName = splitext(basename(temp_path))[0] for header in headers: if header.endswith(f"{currentName}.d.ts"): headers.remove(header) template = { "compilerOptions": { "target": "ES5", "lib": ["ESNext"], "outFile": temp_path, "experimentalDecorators": True, "downlevelIteration": True, "allowJs": True }, "exclude": ["**/node_modules/*", "dom", "webpack"], "include": self.include, "exclude": self.exclude, "files": headers } for key, value in self.params.items(): template["compilerOptions"][key] = value with open(self.get_tsconfig(), "w") as tsconfig: json.dump(template, tsconfig, indent="\t")
def build_source(source_path, target_path): tsconfig_path = join(source_path, "tsconfig.json") if isfile(join(source_path, ".includes")): params = read_params_from_includes(source_path) files = read_files_from_includes(source_path) elif not isfile(tsconfig_path): params = COMPILER_OPTIONS.copy() files = [file for file in glob.glob( f"{source_path}/**/*", recursive=True)] else: # if there isn't .includes but there is tsconfig.json result = build_tsconfig(tsconfig_path) if result != 0: return 1 with open(tsconfig_path) as tsconfig: config = json.load(tsconfig) library_path = normpath( join(source_path, config["compilerOptions"]["outFile"])) copy_file(library_path, target_path) declaration_path = f"{splitext(library_path)[0]}.d.ts" if(isfile(declaration_path)): copy_file(declaration_path, join(make_config.get_path( "toolchain/build/typescript-headers"), basename(declaration_path))) return 0 # decode params params["checkJs"] = not params.pop("nocheck") params["declaration"] = params.pop("declarations") params["experimentalDecorators"] = params.pop("decorators") # actually there is two directories with *.d.ts files: toolchain/jslibs (for default headers) & toolchain/build/typescript-headers (for additional libraries) headers = glob.glob(relpath(make_config.get_path( "toolchain/**/*.d.ts"), source_path), recursive=True) template = { "compilerOptions": { "target": "ES5", "lib": ["ESNext"], "allowJs": True, "downlevelIteration": True, "outFile": target_path }, "exclude": [ "**/node_modules/*", "dom" ], "include": files, "files": headers } for key, value in params.items(): template["compilerOptions"][key] = value with open(tsconfig_path, "w") as tsconfig: json.dump(template, tsconfig, indent="\t") return build_tsconfig(tsconfig_path)
def compile_all_using_make_config(abis): import time start_time = time.time() std_includes = make_config.get_path("toolchain/stdincludes") cache_dir = make_config.get_path("toolchain/build/gcc") ensure_directory(cache_dir) mod_structure.cleanup_build_target("native") overall_result = CODE_OK for native_dir in make_config.get_filtered_list("compile", prop="type", values=("native",)): if "source" not in native_dir: print("skipped invalid native directory json", native_dir, file=sys.stderr) overall_result = CODE_INVALID_JSON continue for native_dir_path in make_config.get_paths(native_dir["source"]): if os.path.isdir(native_dir_path): directory_name = os.path.basename(native_dir_path) result = build_native_dir( native_dir_path, mod_structure.new_build_target("native", directory_name + "{}"), os.path.join(cache_dir, directory_name), abis, std_includes, BaseConfig(native_dir["rules"] if "rules" in native_dir else {}) ) if result != CODE_OK: overall_result = result else: print("skipped non-existing native directory path", native_dir["source"], file=sys.stderr) overall_result = CODE_INVALID_PATH mod_structure.update_build_config_list("nativeDirs") print(f"completed native build in {int((time.time() - start_time) * 100) / 100}s with result {overall_result} - {'OK' if overall_result == CODE_OK else 'ERROR'}") return overall_result
def task_cleanup(): config = get_make_config() clear_directory(config.get_path("toolchain/build/gcc")) clear_directory(config.get_path("toolchain/build/gradle")) clear_directory(config.get_path("toolchain/build/project")) # not working # import java.java_build # java.java_build.cleanup_gradle_scripts() return 0
def compile_all_using_make_config(): import time start_time = time.time() overall_result = 0 cache_dir = make_config.get_path("toolchain/build/gradle") ensure_directory(cache_dir) directories = [] directory_names = [] for directory in make_config.get_filtered_list("compile", prop="type", values=("java", )): if "source" not in directory: print("skipped invalid java directory json", directory, file=sys.stderr) overall_result = -1 continue for path in make_config.get_paths(directory["source"]): if not os.path.isdir(path): print("skipped non-existing java directory path", directory["source"], file=sys.stderr) overall_result = -1 continue directories.append(path) if overall_result != 0: print("failed to get java directories", file=sys.stderr) return overall_result if len(directories) > 0: classpath_directories = [make_config.get_path("toolchain/classpath") ] + make_config.get_value( "make.gradle.classpath", []) overall_result = build_java_directories( directories, cache_dir, get_classpath_from_directories(classpath_directories)) if overall_result != 0: print(f"failed, clearing compiled directories {directories} ...") for directory_name in directory_names: clear_directory( make_config.get_path("output/" + directory_name)) cleanup_gradle_scripts(directories) mod_structure.update_build_config_list("javaDirs") print( f"completed java build in {int((time.time() - start_time) * 100) / 100}s with result {overall_result} - {'OK' if overall_result == 0 else 'ERROR'}" ) return overall_result
def task_build_info(): import json config = get_make_config() out_dir = os.path.join("output/debug", config.get_mod_dir()) with open(config.get_path(os.path.join(out_dir, "mod.info")), "w") as info_file: info = dict(config.get_value("global.info", fallback={"name": "No was provided"})) if "icon" in info: del info["icon"] info_file.write(json.dumps(info, indent=" " * 4)) icon_path = config.get_value("global.info.icon") if icon_path is not None: copy_file(config.get_path(icon_path, True), config.get_path(os.path.join(out_dir, "mod_icon.png"))) return 0
def task_build_package(): import shutil config = get_make_config() output_dir = config.get_path(os.path.join("output/debug", config.get_mod_dir())) ensure_directory(config.get_path("output/release")) output_file = config.get_path("output/release/"+config.get_mod_dir() + ".icmod") output_file_tmp = config.get_path("toolchain/build/mod.zip") ensure_directory(output_dir) ensure_file_dir(output_file_tmp) if os.path.isfile(output_file): os.remove(output_file) if os.path.isfile(output_file_tmp): os.remove(output_file_tmp) shutil.make_archive(output_file_tmp[:-4], 'zip', output_dir) os.rename(output_file_tmp, output_file) return 0
def build_source(self, temp_path): result = os.system(f'tsc -p "{self.get_tsconfig()}"') declaration_path = f"{splitext(temp_path)[0]}.d.ts" if(isfile(declaration_path)): move_file(declaration_path, join(make_config.get_path("toolchain/build/project/declarations"), basename(declaration_path))) #result = 0 return result
def assemble_assets(): asset_directories = get_asset_directories() if asset_directories is None: print("some asset directories are invalid") return -1 output_dir = make_config.get_path("output/assets") clear_directory(output_dir) for asset_dir in asset_directories: copy_directory(asset_dir, output_dir) return 0
def task_build_package(): import shutil config = get_make_config() output_dir = config.get_path("output") mod_folder = config.get_value("make.modFolder") output_file = config.get_path(mod_folder + ".icmod") output_root_tmp = config.get_path("toolchain/build") output_dir_tmp = output_root_tmp + "/" + mod_folder output_file_tmp = output_root_tmp + "/mod.zip" ensure_directory(output_dir) ensure_file_dir(output_file_tmp) if os.path.isfile(output_file): os.remove(output_file) if os.path.isfile(output_file_tmp): os.remove(output_file_tmp) shutil.move(output_dir, output_dir_tmp) shutil.make_archive(output_file_tmp[:-4], 'zip', output_root_tmp, mod_folder) os.rename(output_file_tmp, output_file) shutil.move(output_dir_tmp, output_dir) return 0
def install(arch="arm", reinstall=False): if not reinstall and check_installed(arch): print("toolchain for " + arch + " is already installed, installation skipped") return True else: ndk_path = get_ndk_path() if ndk_path is None: print("failed to get ndk path") result = subprocess.call([ "python", os.path.join(os.path.join(ndk_path, "build\\tools\\make_standalone_toolchain.py")), "--arch", str(arch), "--install-dir", make_config.get_path("toolchain\\ndk\\" + str(arch)), "--force" ]) if result == 0: open(make_config.get_path("toolchain\\ndk\\.installed-" + str(arch)), 'tw').close() return True else: print("installation failed with result code:", result) return False
def create_tsconfig(self, temp_path): declarations = [] declarations.extend(glob.glob(make_config.get_path( "toolchain/declarations/**/*.d.ts"), recursive=True)) declarations.extend(glob.glob(make_config.get_path( "toolchain/build/project/declarations/**/*.d.ts"), recursive=True)) currentName = splitext(basename(temp_path))[0] for d in declarations: if d.endswith(f"{currentName}.d.ts"): declarations.remove(d) template = { "compilerOptions": { "target": "ES5", "lib": ["ESNext"], "outFile": temp_path, "experimentalDecorators": True, "downlevelIteration": True, "allowJs": True }, "exclude": [ "**/node_modules/*", "dom", "webpack" ] + self.exclude, "include": self.include, } if len(declarations) > 0: template["files"] = declarations for key, value in self.params.items(): template["compilerOptions"][key] = value with open(self.get_tsconfig(), "w") as tsconfig: json.dump(template, tsconfig, indent="\t")
def build_source(self, temp_path): result = os.system(f'tsc -p "{self.get_tsconfig()}" --noEmitOnError') self.remove_flags(temp_path) self.place_flag(temp_path) declaration_path = f"{splitext(temp_path)[0]}.d.ts" if (isfile(declaration_path)): move_file( declaration_path, join( make_config.get_path("toolchain/build/typescript-headers"), basename(declaration_path))) return result
def build_java_directories(directories, cache_dir, classpath): ensure_directory(cache_dir) setup_gradle_project(cache_dir, directories, classpath) gradle_executable = make_config.get_path("toolchain/bin/gradlew.bat") result = subprocess.call([gradle_executable, "-p", cache_dir, "shadowJar"]) if result != 0: print(f"java compilation failed with code {result}") return result result = subprocess.call([gradle_executable, "-p", cache_dir, "dex"]) if result != 0: print(f"dex failed with code {result}") return result print('\033[1m' + '\033[92m' + "\n****SUCCESS****\n" + '\033[0m') return result
def require_compiler_executable(arch, install_if_required=False): ndk_dir = make_config.get_path("toolchain/ndk/" + str(arch)) file = search_for_gcc_executable(ndk_dir) if install_if_required: install(arch=arch, reinstall=False) if file is None or not os.path.isfile(file): print("ndk installation is broken, trying to re-install") install(arch=arch, reinstall=True) file = search_for_gcc_executable(ndk_dir) if file is None or not os.path.isfile(file): print("re-install haven't helped") return None return file else: return file
def task_build_package(): import shutil config = get_make_config() output_dir = config.get_project_path("output") output_file = config.get_project_path( config.get_value("currentProject", "mod") + ".icmod") output_file_tmp = config.get_path("toolchain/build/mod.zip") ensure_directory(output_dir) ensure_file_dir(output_file_tmp) if os.path.isfile(output_file): os.remove(output_file) if os.path.isfile(output_file_tmp): os.remove(output_file_tmp) shutil.make_archive(output_file_tmp[:-4], 'zip', output_dir) os.rename(output_file_tmp, output_file) return 0
def assemble_additional_directories(): result = 0 output_dir = make_config.get_path("output") for additional_dir in make_config.get_value("additional", []): if "sources" not in additional_dir or "pushTo" not in additional_dir: print("invalid formatted additional directory json", additional_dir) result = -1 break dst_dir = os.path.join(output_dir, additional_dir["pushTo"]) clear_directory(dst_dir) source_directories = get_path_set(additional_dir["sources"], error_sensitive=True) if source_directories is None: print("some additional directories are invalid") result = -1 break for source_dir in source_directories: copy_directory(source_dir, dst_dir) return result
def task_build_additional(): overall_result = 0 config = get_make_config() for additional_dir in config.get_value("additional", fallback=[]): if "source" in additional_dir and "targetDir" in additional_dir: for additional_path in config.get_paths(additional_dir["source"]): if not os.path.exists(additional_path): print("non existing additional path: " + additional_path) overall_result = 1 break target = config.get_path(os.path.join( "output", "debug", config.get_mod_dir(), additional_dir["targetDir"], os.path.basename(additional_path) )) if os.path.isdir(additional_path): copy_directory(additional_path, target) else: ensure_file_dir(target) copy_file(additional_path, target) return overall_result
"noResolve": False, "noStrictGenericChecks": False, "noUnusedLocals": False, "noUnusedParameters": False, "preserveConstEnums": False, "preserveSymlinks": False, "pretty": True, "removeComments": False, "showConfig": False, "skipLibCheck": False, "sourceMap": False, "strict": False, "tsBuildInfoFile": ".tsbuildinfo" } temp_directory = make_config.get_path("toolchain/build/typescript") class Includes: def __init__(self, directory): self.file = join(directory, ".includes") self.directory = directory self.include = [] self.exclude = [] self.params = {} def read(self): with open(self.file, encoding="utf-8") as includes: for line in includes: line = line.strip() self.decode_line(line)
def task_push_everything(): from push import push config = get_make_config() return push(config.get_path(os.path.join("output", "debug", config.get_mod_dir())))
def __init__(self, output_directory): self.directory = make_config.get_path(output_directory) self.targets = {} self.build_config = None
def setup_gradle_project(cache_dir, directories, classpath): file = open(os.path.join(cache_dir, "settings.gradle"), "w", encoding="utf-8") file.writelines([ "include ':%s'\nproject(':%s').projectDir = file('%s')\n" % (os.path.basename(item), os.path.basename(item), item.replace("\\", "\\\\")) for item in directories ]) file.close() for directory in directories: target_dir = mod_structure.new_build_target( "java", os.path.basename(directory)) clear_directory(target_dir) ensure_directory(target_dir) copy_file(os.path.join(directory, "manifest"), os.path.join(target_dir, "manifest")) with open(os.path.join(directory, "manifest"), "r", encoding="utf-8") as file: manifest = json.load(file) source_dirs = manifest["source-dirs"] library_dirs = manifest["library-dirs"] build_dir = os.path.join(cache_dir, os.path.basename(target_dir), "classes") dex_dir = target_dir ensure_directory(build_dir) ensure_directory(dex_dir) if make_config.get_value("make.gradle.keepLibraries", True): for library_dir in library_dirs: src_dir = os.path.join(directory, library_dir) if os.path.isdir(src_dir): copy_directory(src_dir, os.path.join(dex_dir, library_dir), clear_dst=True) if make_config.get_value("make.gradle.keepSources", False): for source_dir in source_dirs: src_dir = os.path.join(directory, source_dir) if os.path.isdir(src_dir): copy_directory(src_dir, os.path.join(dex_dir, source_dir), clear_dst=True) with open(os.path.join(directory, "build.gradle"), "w", encoding="utf-8") as build_file: build_file.write(""" plugins { id 'com.github.johnrengelman.shadow' version '5.2.0' id "java" } dependencies { """ + ( """compile fileTree('""" + "', '". join([path.replace("\\", "\\\\") for path in library_dirs]) + """') { include '*.jar' }""" if len(library_dirs) > 0 else "") + """ } sourceSets { main { java { srcDirs = ['""" + "', '".join( [path.replace("\\", "\\\\") for path in source_dirs]) + """'] buildDir = \"""" + os.path.join( build_dir, "${project.name}").replace("\\", "\\\\") + """\" } resources { srcDirs = [] } compileClasspath += files('""" + "', '".join([ path.replace("\\", "\\\\") for path in classpath ]) + """') } } tasks.register("dex") { javaexec { main = "-jar"; args = [ \"""" + make_config.get_path("toolchain/bin/dx.jar"). replace("\\", "\\\\") + """\", "--dex", "--multi-dex", "--output=\\\"""" + os.path.join(dex_dir, ".").replace("\\", "\\\\") + """\\\"", \"""" + os.path.join(build_dir, "${project.name}", "libs", "${project.name}-all.jar").replace( "\\", "\\\\") + """\" ] } } """)
def task_clear_output(): config = get_make_config() clear_directory(config.get_path(os.path.join("output", "debug", config.get_mod_dir()))) return 0
with open(filepath, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): total.update(chunk) """ return total.hexdigest() @staticmethod def get_file_hash(file): return md5(open(file, "rb").read()).hexdigest() def save(self): makedirs(dirname(self.file), exist_ok=True) with open(self.file, "w") as output: json.dump(self.hashes, output, indent="\t") def is_path_changed(self, path): key = self.path_to_key(path) hash = self.get_path_hash(path) return key not in self.last_hashes or self.last_hashes[key] != hash def path_to_key(self, path): # return relpath(path, self.file) return md5(path.encode("utf-8")).hexdigest() def clear(self): remove(self.file) return build_storage = HashStorage(make_config.get_path("toolchain/build/project/.buildhashes")) output_storage = HashStorage(make_config.get_path("toolchain/build/project/.outputhashes"))
def get_fake_so_dir(abi): fake_so_dir = make_config.get_path(os.path.join("toolchain/ndk/fakeso", abi)) ensure_directory(fake_so_dir) return fake_so_dir
def install(arch="arm", reinstall=False): if not reinstall and check_installed(arch): print("toolchain for " + arch + " is already installed, installation skipped") return True else: ndk_path = get_ndk_path() if ndk_path is None: from urllib import request print("failed to get ndk path") ans = input("download ndk? (Y/N) ") if ans.lower() == "y": archive_path = make_config.get_path("toolchain\\temp\\ndk.zip") makedirs(dirname(archive_path), exist_ok=True) if not isfile(archive_path): url = "https://dl.google.com/android/repository/android-ndk-r16b-windows-x86_64.zip" with request.urlopen(url) as response: with open(archive_path, 'wb') as f: info = response.info() length = int(info["Content-Length"]) downloaded = 0 while True: buffer = response.read(8192) if not buffer: break downloaded += len(buffer) f.write(buffer) print_progress_bar( downloaded, length, suffix='Downloading...' if downloaded < length else "Complete!", length=50) print("extracting ndk...") extract_path = make_config.get_path("toolchain\\temp") with ZipFile(archive_path, 'r') as archive: archive.extractall(extract_path) ndk_path = search_ndk_path(extract_path, contains_ndk=True) else: print("aborting native compilation") return False print("installing...") result = subprocess.call([ "python", join(ndk_path, "build", "tools", "make_standalone_toolchain.py"), "--arch", str(arch), "--install-dir", make_config.get_path("toolchain\\ndk\\" + str(arch)), "--force" ]) if result == 0: open( make_config.get_path("toolchain\\ndk\\.installed-" + str(arch)), 'tw').close() print("removing temp files...") clear_directory(make_config.get_path("toolchain\\temp")) print("done!") return True else: print("installation failed with result code:", result) return False
def check_installed(arch): return isfile( make_config.get_path("toolchain\\ndk\\.installed-" + str(arch)))
"noResolve": False, "noStrictGenericChecks": False, "noUnusedLocals": False, "noUnusedParameters": False, "preserveConstEnums": False, "preserveSymlinks": False, "pretty": True, "removeComments": False, "showConfig": False, "skipLibCheck": False, "sourceMap": False, "strict": False, "tsBuildInfoFile": ".tsbuildinfo" } temp_directory = make_config.get_path("toolchain/build/project/sources") class Includes: def __init__(self, directory): self.file = join(directory, ".includes") self.directory = directory self.include = [] self.exclude = [] self.params = {} def read(self): with open(self.file, encoding="utf-8") as includes: for line in includes: line = line.strip()
import json import os.path as path import os import urllib.request as request from datetime import datetime, timezone from make_config import make_config import utils from setup_commons import cleanup_if_required date_format = "%Y-%m-%dT%H:%M:%SZ" last_update_path = make_config.get_path("toolchain/bin/.last_update") def set_last_update(): with open(last_update_path, "w", encoding="utf-8") as last_update_file: last_update_file.write( datetime.now(timezone.utc).strftime(date_format)) def download_and_extract_toolchain(directory): import urllib.request import zipfile archive = path.join(directory, 'update.zip') if not path.exists(archive): url = "https://codeload.github.com/80LK/innercore-mod-toolchain/zip/master" print("downloading toolchain archive from " + url) urllib.request.urlretrieve(url, archive) else: print("toolchain archive already exists in " + directory)