def build_all_resources(): mod_structure.cleanup_build_target("resource_directory") mod_structure.cleanup_build_target("gui") mod_structure.cleanup_build_target("minecraft_resource_pack") mod_structure.cleanup_build_target("minecraft_behavior_pack") overall_result = 0 for resource in make_config.get_value("resources", fallback=[]): if "path" not in resource or "type" not in resource: print("skipped invalid source json", resource, file=sys.stderr) overall_result = 1 continue for source_path in make_config.get_paths(resource["path"]): if not exists(source_path): print("skipped non-existing resource path", resource["path"], file=sys.stderr) overall_result = 1 continue resource_type = resource["type"] if resource_type not in ("resource_directory", "gui", "minecraft_resource_pack", "minecraft_behavior_pack"): print("skipped invalid resource with type", resource_type, file=sys.stderr) overall_result = 1 continue resource_name = resource[ "target"] if "target" in resource else basename(source_path) resource_name += "{}" if resource_type in ("resource_directory", "gui"): target = mod_structure.new_build_target( resource_type, resource_name, declare={ "type": { "resource_directory": "resource", "gui": "gui" }[resource_type] }) else: target = mod_structure.new_build_target( resource_type, resource_name, exclude=True, declare_default={ "resourcePacksDir": mod_structure.get_target_directories( "minecraft_resource_pack")[0], "behaviorPacksDir": mod_structure.get_target_directories( "minecraft_behavior_pack")[0] }) clear_directory(target) copy_directory(source_path, target) mod_structure.update_build_config_list("resources") return overall_result
def _main_menu(first_time): utils.clear_screen() mygame = None if first_time: print("Welcome to {}".format(con.GAME_NAME)) choices = ', '.join(con.MAIN_MENU_CHOICES) print("Here are your choices: {}".format(choices)) user_input = input("main loop > ").lower().strip() while not user_input in con.MAIN_MENU_CHOICES: print("Sorry, I didn’t recognize that: {}".format(user_input)) user_input = input("> ").lower().strip() # --------------------------------------------- print("user input: {}".format(user_input)) if user_input == "new": char_name, char_kind = utils.get_player_info() utils.save_char_info(char_name, char_kind) s = "{} the {}".format(char_name.upper(), char_kind.upper()) print("You have created a character: {}".format(s)) print("Now let's set up a world for you ...") utils.copy_directory(char_name, char_kind) mygame = Game(char_name, char_kind, zone_name="world") elif user_input == "load": print("LOAD needs to be implemented.") elif user_input == "quit": return False else: s = "Sorry, I don't recognize that: {}".format(user_input) raise ValueError(s) input("Press <Enter> to continue.") utils.clear_screen() mygame.game_loop() return True
def download_and_extract_toolchain(directory): import urllib.request import zipfile archive = path.join(directory, 'update.zip') if not path.exists(archive): url = "https://codeload.github.com/80LK/innercore-mod-toolchain/zip/master" print("downloading toolchain archive from " + url) urllib.request.urlretrieve(url, archive) else: print("toolchain archive already exists in " + directory) print("extracting toolchain to " + directory) with zipfile.ZipFile(archive, 'r') as zip_ref: zip_ref.extractall(directory) try: utils.copy_directory(path.join( directory, "innercore-mod-toolchain-master/toolchain-mod"), directory, ignore=["make.json", "*/adb/*"], ignoreEx=True) utils.clear_directory( path.join(directory, "innercore-mod-toolchain-master")) except Exception as ex: print(ex) finally: os.remove(archive) if not path.exists(path.join(directory, "toolchain")): print( "an error occured while extracting toolchain archive, please, retry the operation" ) exit()
def assemble_assets(): asset_directories = get_asset_directories() if asset_directories is None: print("some asset directories are invalid") return -1 output_dir = make_config.get_path("output/assets") clear_directory(output_dir) for asset_dir in asset_directories: copy_directory(asset_dir, output_dir) return 0
def copy_additionals(source, destination): global root_files files = os.listdir(source) for f in files: if f in root_files: continue src = os.path.join(source, f) dest = os.path.join(destination, "src", "assets", "root") if (os.path.isfile(src)): copy_file(src, os.path.join(dest, f)) elif (os.path.isdir(src)): copy_directory(src, os.path.join(dest, f))
def createProject(self, name, author="", version="1.0", description="", folder=None): if folder == None: folder = NameToFolderName(name) path = os.path.join(self.root_dir, folder) if os.path.exists(path): raise IOError(f"""Folder "{folder}" exists""") os.mkdir(path) copy_directory(self.config.get_path("toolchain/simple-project"), path, True) make_path = os.path.join(path, "make.json") with open(make_path, "r", encoding="utf-8") as make_file: make_obj = json.loads(make_file.read()) make_obj['info']["name"] = name make_obj['info']["author"] = author make_obj['info']["version"] = version make_obj['info']["description"] = description with open(make_path, "w", encoding="utf-8") as make_file: make_file.write(json.dumps(make_obj, indent=" " * 4)) vsc_settings_path = self.config.get_path(".vscode/settings.json") with open(vsc_settings_path, "r", encoding="utf-8") as vsc_settings_file: vsc_settings_obj = json.loads(vsc_settings_file.read()) vsc_settings_obj["files.exclude"][folder] = True self.__projects.append(folder) with open(vsc_settings_path, "w", encoding="utf-8") as vsc_settings_file: vsc_settings_file.write( json.dumps(vsc_settings_obj, indent=" " * 4)) return self.countProjects() - 1
def task_build_additional(): overall_result = 0 for additional_dir in get_make_config().get_value("additional", fallback=[]): if "source" in additional_dir and "targetDir" in additional_dir: for additional_path in get_make_config().get_paths( additional_dir["source"]): if not os.path.exists(additional_path): print("non existing additional path: " + additional_path) overall_result = 1 break target = get_make_config().get_path( os.path.join("output", additional_dir["targetDir"], os.path.basename(additional_path))) if os.path.isdir(additional_path): copy_directory(additional_path, target) else: ensure_file_dir(target) copy_file(additional_path, target) return overall_result
def assemble_additional_directories(): result = 0 output_dir = make_config.get_path("output") for additional_dir in make_config.get_value("additional", []): if "sources" not in additional_dir or "pushTo" not in additional_dir: print("invalid formatted additional directory json", additional_dir) result = -1 break dst_dir = join(output_dir, additional_dir["pushTo"]) clear_directory(dst_dir) source_directories = get_path_set(additional_dir["sources"], error_sensitive=True) if source_directories is None: print("some additional directories are invalid") result = -1 break for source_dir in source_directories: copy_directory(source_dir, dst_dir) return result
def _copy_input_files(event, forward_run_dir, lasif_path, iteration_name, specfem_root, mesh=False): """ Copies the pre-generated input files from LASIF. Don't copy the Par_file or STF. Par_file will be copied later, from the LASIF/SUBMISSION directory. """ compile_data = os.path.join(specfem_root, 'DATA') mesh_data = os.path.join(forward_run_dir, 'mesh', 'DATA') event_data = os.path.join(forward_run_dir, event, 'DATA') lasif_output = os.path.join(lasif_path, 'OUTPUT') for dir in os.listdir(lasif_output): if iteration_name in dir and event in dir and 'input_files' in dir: source = os.path.join(lasif_output, dir) utils.copy_directory(source, event_data, exc=['Par_file', 'STF']) # This flag also copies the first event's data to the solver base # directory for compilation, and the mesh directory for meshing. if mesh: utils.copy_directory(source, mesh_data, exc=['Par_file', 'STF']) utils.copy_directory(source, compile_data, exc=['Par_file, STF'])
def setup_new_iteration(params, old_iteration, new_iteration): """ Sets up a new iteration, and links the mesh files from the old iteration to the new one. """ forward_stage_dir = params['forward_stage_dir'] event_list = params['event_list'] params.update({'iteration_name': new_iteration}) new_forward_run_dir = os.path.join(forward_stage_dir, new_iteration) params.update({'forward_run_dir': new_forward_run_dir}) setup_solver(params) old_database_dir = os.path.join(forward_stage_dir, old_iteration, 'mesh', 'DATABASES_MPI') new_database_dir = os.path.join(forward_stage_dir, new_iteration, 'mesh', 'DATABASES_MPI') old_optimization_dir = os.path.join(forward_stage_dir, old_iteration, 'OPTIMIZATION', 'PROCESSED_KERNELS') new_optimization_dir = os.path.join(forward_stage_dir, new_iteration, 'OPTIMIZATION', 'PROCESSED_KERNELS') utils.print_ylw("Copying mesh information...") utils.copy_directory(old_database_dir, new_database_dir) utils.print_ylw("Copying kernels...") utils.copy_directory(old_optimization_dir, new_optimization_dir, ends='smooth.bin') utils.print_ylw("Copying DATA files...") for event in event_list: old_data_dir = os.path.join(forward_stage_dir, old_iteration, event, 'DATA') new_data_dir = os.path.join(forward_stage_dir, new_iteration, event, 'DATA') utils.copy_directory(old_data_dir, new_data_dir, only=['Par_file', 'CMTSOLUTION', 'STATIONS']) old_mesh_dir = os.path.join(forward_stage_dir, old_iteration, 'mesh', 'DATA') new_mesh_dir = os.path.join(forward_stage_dir, new_iteration, 'mesh', 'DATA') utils.copy_directory(old_mesh_dir, new_mesh_dir, only=['Par_file', 'CMTSOLUTION', 'STATIONS']) utils.print_blu('Done.')
# Replace incorrect file for replace_file in config['replace']: print('Replacing "%s" with "%s"' % (os.path.join(replace_file['dir'], replace_file['filename_from']), os.path.join(replace_file['dir'], replace_file['filename_to']))) os.remove( os.path.join(original_path, replace_file['dir'], replace_file['filename_from'])) shutil.copyfile( os.path.join(downloads_path, replace_file['filename_to']), os.path.join(original_path, replace_file['dir'], replace_file['filename_to'])) print('done.') # Make a copy of the original dataset copy_directory(original_path, clean_path) # Remove duplicates, pictures depicting more than one cat, etc... print('Cleaning...') os.makedirs(removed_path) for subdir in config['remove']: for filename in config['remove'][subdir]: path_from = os.path.join(clean_path, subdir, filename) path_to = os.path.join(removed_path, subdir + '_' + filename) os.rename(path_from, path_to) os.rename(path_from + '.cat', path_to + '.cat') print('done.') # Remove landmarks 3, 5, 6, 8 (zero-based) - 2 inner points of each ear print('Removing inner ear landmarks...') cnt = 0
def build_native_dir(directory, output_dir, cache_dir, abis, std_includes_path, rules: BaseConfig): executables = {} for abi in abis: executable = prepare_compiler_executable(abi) if executable is None: print("failed to acquire GCC executable from NDK for abi " + abi) return CODE_FAILED_NO_GCC executables[abi] = executable try: manifest = get_manifest(directory) targets = {} soname = "lib" + manifest["shared"]["name"] + ".so" for abi in abis: targets[abi] = os.path.join(output_dir, "so/" + abi + "/" + soname) except Exception as err: print("failed to read manifest for directory " + directory + " error: " + str(err)) return CODE_FAILED_INVALID_MANIFEST keep_sources = rules.get_value("keepSources", fallback=False) if keep_sources: # copy everything and clear build files copy_directory(directory, output_dir, clear_dst=True) clear_directory(os.path.join(output_dir, "so")) os.remove(os.path.join(output_dir, soname)) else: clear_directory(output_dir) # copy manifest copy_file(os.path.join(directory, "manifest"), os.path.join(output_dir, "manifest")) # copy includes keep_includes = rules.get_value("keepIncludes", fallback=True) for include_path in manifest["shared"]["include"]: src_include_path = os.path.join(directory, include_path) output_include_path = os.path.join(output_dir, include_path) if keep_includes: copy_directory(src_include_path, output_include_path, clear_dst=True) else: clear_directory(output_include_path) std_includes = [] for std_includes_dir in os.listdir(std_includes_path): std_includes.append(os.path.abspath(os.path.join(std_includes_path, std_includes_dir))) # compile for every abi overall_result = CODE_OK for abi in abis: printed_compilation_title = f"compiling {os.path.basename(directory)} for {abi}" print("\n") print(f"{'=' * (48 - len(printed_compilation_title) // 2)} {printed_compilation_title} {'=' * (48 - (1 + len(printed_compilation_title)) // 2)}") executable = executables[abi] gcc = [executable, "-std=c++11"] includes = [] for std_includes_dir in std_includes: includes.append(f'-I{std_includes_dir}') dependencies = [f'-L{get_fake_so_dir(abi)}', "-landroid", "-lm", "-llog"] for link in rules.get_value("link", fallback=[]) + make_config.get_value("make.linkNative", fallback=[]) + ["horizon"]: add_fake_so(executable, abi, link) dependencies.append(f'-l{link}') if "depends" in manifest: search_dir = os.path.abspath(os.path.join(directory, "..")) # always search for dependencies in current dir for dependency in manifest["depends"]: if dependency is not None: add_fake_so(executable, abi, dependency) dependencies.append("-l" + dependency) dependency_dir = search_directory(search_dir, dependency) if dependency_dir is not None: try: for include_dir in get_manifest(dependency_dir)["shared"]["include"]: includes.append("-I" + os.path.join(dependency_dir, include_dir)) except KeyError: pass else: print(f"ERROR: dependency directory {dependency} is not found, it will be skipped") # prepare directories source_files = get_all_files(directory, extensions=(".cpp", ".c")) preprocessed_dir = os.path.abspath(os.path.join(cache_dir, "preprocessed", abi)) ensure_directory(preprocessed_dir) object_dir = os.path.abspath(os.path.join(cache_dir, "object", abi)) ensure_directory(object_dir) # pre-process and compile changes import filecmp object_files = [] recompiled_count = 0 for file in source_files: relative_file = relative_path(directory, file) sys.stdout.write("preprocessing " + relative_file + " " * 64 + "\r") object_file = os.path.join(object_dir, relative_file) + ".o" preprocessed_file = os.path.join(preprocessed_dir, relative_file) tmp_preprocessed_file = preprocessed_file + ".tmp" ensure_file_dir(preprocessed_file) ensure_file_dir(object_file) object_files.append(object_file) result = subprocess.call(gcc + ["-E", file, "-o", tmp_preprocessed_file] + includes) if result == CODE_OK: if not os.path.isfile(preprocessed_file) or not os.path.isfile(object_file) or \ not filecmp.cmp(preprocessed_file, tmp_preprocessed_file): if os.path.isfile(preprocessed_file): os.remove(preprocessed_file) os.rename(tmp_preprocessed_file, preprocessed_file) if os.path.isfile(object_file): os.remove(object_file) sys.stdout.write("compiling " + relative_file + " " * 64 + "\n") result = max(result, subprocess.call(gcc + ["-c", preprocessed_file, "-shared", "-o", object_file])) if result != CODE_OK: if os.path.isfile(object_file): os.remove(object_file) overall_result = result else: recompiled_count += 1 else: if os.path.isfile(object_file): os.remove(object_file) overall_result = result print(" " * 128) if overall_result != CODE_OK: print("failed to compile", overall_result) return overall_result else: print(f"recompiled {recompiled_count}/{len(object_files)} files with result {overall_result}") ensure_file_dir(targets[abi]) command = [] command += gcc command += object_files command.append("-shared") command.append("-Wl,-soname=" + soname) command.append("-o") command.append(targets[abi]) command += includes command += dependencies print("linking object files...") result = subprocess.call(command) if result == CODE_OK: print("build successful") else: print("linker failed with result code", result) overall_result = result return overall_result return overall_result
def import_build_config(make_file, source, destination): global root_files root_files.append("build.config") build_config = os.path.join(source, "build.config") with open(build_config, "r", encoding="utf-8") as config_file: config_obj = json.loads(config_file.read()) config = BaseConfig(config_obj) make_file["global"]["api"] = config.get_value("defaultConfig.api", "CoreEngine") src_dir = os.path.join(destination, "src") # clear assets folder assets_dir = os.path.join(src_dir, "assets") clear_directory(assets_dir) os.makedirs(assets_dir) # some pre-defined resource folders resources = [{ "path": "src/assets/resource_packs/*", "type": "minecraft_resource_pack" }, { "path": "src/assets/behavior_packs/*", "type": "minecraft_behavior_pack" }] os.makedirs(os.path.join(assets_dir, "resource_packs")) os.makedirs(os.path.join(assets_dir, "behavior_packs")) # import assets for res_dir in config.get_filtered_list("resources", "resourceType", ("resource", "gui")): if res_dir["resourceType"] == "resource": res_dir["resourceType"] = "resource_directory" path_stripped = res_dir["path"].strip('/') path_parts = path_stripped.split('/') path = os.path.join(*path_parts) copy_directory(os.path.join(source, path), os.path.join(assets_dir, path), True) resources.append({ "path": "src/assets/" + path_stripped, "type": res_dir["resourceType"] }) root_files.append(path_parts[0]) make_file["resources"] = resources # clear libraries folder and copy libraries from the old project libs_dir = os.path.join(destination, "src", "lib") clear_directory(libs_dir) clear_directory(os.path.join(destination, "src", "dev")) os.makedirs(libs_dir) old_libs = config.get_value("defaultConfig.libraryDir", "lib").strip('/') old_libs_parts = old_libs.split('/') old_libs_dir = os.path.join(source, *old_libs_parts) if os.path.isdir(old_libs_dir): root_files.append(old_libs_parts[0]) copy_directory(old_libs_dir, libs_dir) # some pre-defined source folders sources = [{ "source": "src/lib/*", "type": "library", "language": "javascript" }, { "source": "src/preloader/*", "type": "preloader", "language": "javascript" }] ensure_directory(os.path.join(src_dir, "preloader")) # import sources for source_dir in config.get_filtered_list("compile", "sourceType", ("mod", "launcher")): if source_dir["sourceType"] == "mod": source_dir["sourceType"] = "main" sourceObj = { "type": source_dir["sourceType"], "language": "javascript" } source_parts = source_dir["path"].split('/') root_files.append(source_parts[0]) build_dirs = config.get_filtered_list("buildDirs", "targetSource", (source_dir["path"])) if (len(build_dirs) > 0): old_build_path = build_dirs[0]["dir"].strip("/") old_path_parts = old_build_path.split('/') sourceObj["source"] = "src/" + old_build_path sourceObj["target"] = source_dir["path"] root_files.append(old_path_parts[0]) copy_directory(os.path.join(source, *old_path_parts), os.path.join(src_dir, *old_path_parts), True) else: sourceObj["source"] = "src/" + source_dir["path"] copy_file(os.path.join(source, *source_parts), os.path.join(src_dir, *source_parts)) sources.append(sourceObj) make_file["sources"] = sources return exit("unable to read build.config")
def setup_solver(params): """ It's dylan, you know the drill. """ # Setup local parameters. forward_run_dir = params['forward_run_dir'] forward_stage_dir = params['forward_stage_dir'] event_list = params['event_list'] lasif_path = params['lasif_path'] iteration_name = params['iteration_name'] specfem_root = params['specfem_root'] compiler_suite = params['compiler_suite'] project_name = params['project_name'] # Set up the mesh directory. _setup_dir_tree('mesh', forward_run_dir) # Set up the optimization directory. optimization_base = os.path.join(forward_run_dir, 'OPTIMIZATION') utils.mkdir_p(optimization_base) utils.mkdir_p(os.path.join(optimization_base, 'bin')) utils.mkdir_p(os.path.join(optimization_base, 'PROCESSED_KERNELS')) utils.mkdir_p(os.path.join(optimization_base, 'GRADIENT_INFO')) utils.mkdir_p(os.path.join(optimization_base, 'LOGS')) utils.mkdir_p(os.path.join(optimization_base, 'DATA')) utils.mkdir_p(os.path.join(optimization_base, 'VTK_FILES')) # Create the forward modelling directories. Also copy relevant parameter # files from the LASIF project. _copy_input_files also copies the input # files to the specfem_root directory if mesh == True. utils.print_ylw("Creating forward modelling directories...") mesh = True for i, event in enumerate(event_list): _setup_dir_tree(event, forward_run_dir) _copy_input_files(event, forward_run_dir, lasif_path, iteration_name, specfem_root, mesh=mesh) mesh = False # Copy the files in SUBMISSION to the specfem root directory. par_file = os.path.join(lasif_path, 'SUBMISSION', project_name, 'Par_file') dest = os.path.join(specfem_root, 'DATA') utils.safe_copy(par_file, dest) # Change to specfem root directory and compile. utils.print_ylw("Compiling...") os.chdir(specfem_root) with open('compilation_log.txt', 'w') as output: proc = subprocess.Popen(['./mk_daint.sh', compiler_suite, 'adjoint'], stdout=output, stderr=output) proc.communicate() proc.wait() # Distribute binaries and Par_file to directories. utils.print_ylw('Copying compiled binaries...') bin_directory = os.path.join('./bin') opt_bin_directory = os.path.join(optimization_base, 'bin') opt_dat_directory = os.path.join(optimization_base, 'DATA') utils.copy_directory(bin_directory, opt_bin_directory) for event in os.listdir(forward_run_dir): event_bin = os.path.join(forward_run_dir, event, 'bin') event_dat = os.path.join(forward_run_dir, event, 'DATA') compile_par = os.path.join(specfem_root, 'DATA', 'Par_file') utils.safe_copy(compile_par, event_dat) utils.copy_directory(bin_directory, event_bin, only=['xspecfem3D', 'xmeshfem3D']) # Also copy to the optimization directory. Recompile with vectorized cray # compiler. utils.print_ylw("Recompiling for vectorized smoother CRAY smoother...") with open('compilation_log_tomo.txt', 'w') as output: proc = subprocess.Popen(['./mk_daint.sh', 'cray.tomo', 'adjoint'], stdout=output, stderr=output) proc.communicate() proc.wait() utils.copy_directory(bin_directory, opt_bin_directory) compile_par = os.path.join(specfem_root, 'DATA', 'Par_file') utils.safe_copy(compile_par, opt_dat_directory) # Copy jobarray script to base directory. utils.print_ylw('Copying jobarray sbatch script...') source = os.path.join(lasif_path, 'SUBMISSION', project_name, 'jobArray_solver_daint.sbatch') utils.safe_copy(source, forward_stage_dir) utils.mkdir_p(os.path.join(forward_stage_dir, 'logs')) # Copy mesh submission script. source = os.path.join(lasif_path, 'SUBMISSION', project_name, 'job_mesher_daint.sbatch') dest = os.path.join(forward_run_dir, 'mesh') utils.safe_copy(source, dest) # Copy topography information to mesh directory. utils.print_ylw('Copying topography information...') master_topo_path = os.path.join(specfem_root, 'DATA', 'topo_bathy') mesh_topo_path = os.path.join(forward_run_dir, 'mesh', 'DATA', 'topo_bathy') utils.mkdir_p(mesh_topo_path) utils.copy_directory(master_topo_path, mesh_topo_path) utils.print_blu('Done.')