def help() : """print 'clean' help""" log.info(log.YELLOW + "fips clean\n" "fips clean all\n" "fips clean [config]\n" + log.DEF + " clean generated build files for config")
def help() : """print 'gdb' help""" log.info(log.YELLOW + "fips gdb [-- args]\n" "fips gdb [target] [-- args]\n" "fips gdb [target] [config] [-- args]\n" + log.DEF + " debug a single target in current or named config")
def help() : """print 'valgrind' help""" log.info(log.YELLOW + "fips valgrind\n" "fips valgrind [target]\n" "fips valgrind [target] [config]\n" + log.DEF + " debug a single target in current or named config")
def check_local_changes(fips_dir, proj_dir) : """this is a variation of check_imports which just checks for local (uncommitted or unpushed) changes. :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :returns: True if checks were valid """ success, imported_projects = get_all_imports_exports(fips_dir, proj_dir) num_imports = 0 for imp_proj_name in imported_projects : imp_proj_dir = imported_projects[imp_proj_name]['proj_dir'] # don't git-check the top-level project directory if imp_proj_dir != proj_dir : num_imports += 1 log.info("checking '{}':".format(imp_proj_name)) if os.path.isdir(imp_proj_dir) : if git.has_local_changes(imp_proj_dir) : log.warn(" '{}' has local changes (uncommitted and/or unpushed)".format(imp_proj_dir)) else : log.colored(log.GREEN, ' no local changes') else : log.warn(" '{}' does not exist, please run 'fips fetch'".format(imp_proj_dir)) if success and num_imports == 0 : log.info(' none')
def help() : """print config help""" log.info(log.YELLOW + "fips config\n" "fips config [config]\n" + log.DEF + " configure the current or named build config\n" " (runs ccmake or cmake-gui)")
def help() : """print help text for init verb""" log.info(log.YELLOW + "fips setup emscripten\n" "fips setup android\n" + log.DEF + " setup cross-platform SDK")
def write_git_ignore(proj_dir, entries) : """modify or create the .gitignore file with fips-specific entries. fips entries will go into a special section marked with: #>fips #<fips :param entries: array of fips .gitignore strings """ path = proj_dir + '/.gitignore' out_lines = [] if os.path.isfile(path) : # .gitignore already exists, read into lines array, # but drop everything between #>fips and #<fips with open(path, 'r') as f : in_lines = f.readlines() copy_line = True for l in in_lines : if '#>fips' in l : copy_line = False if copy_line : out_lines.append(l) if '#<fips' in l : copy_line = True # append the fips .gitignore entries out_lines.append('#>fips\n') out_lines.append('# this area is managed by fips, do not edit\n') out_lines.extend('\n'.join(entries) + '\n') out_lines.append('#<fips\n') # write back .gitignore file with open(path, 'w') as f : f.writelines(out_lines) log.info("wrote '{}'".format(path))
def copy_template_file(fips_dir, proj_dir, filename, values, silent=False) : """copy a template file from fips/templates to the project directory and replace template values (e.g. the project name), ask for user permission if files exist :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :param filename: filename to copy from fips/templates :param values: template key/value dictionary :param silent: if True, overwrite existing file and don't print status :returns: True file overwritten, False on not overwritten """ src_path = fips_dir + '/templates/' + filename dst_path = proj_dir + '/' + filename if not os.path.isfile(src_path) : log.error("template src file '{}' doesn't exist".format(src_path)) if not silent : if os.path.isfile(dst_path) : if not util.confirm("overwrite '{}'?".format(dst_path)) : log.info("skipping '{}'".format(dst_path)) return False content = None with open(src_path, 'r') as f : content = f.read() content = Template(content).substitute(values) with open(dst_path, 'w') as f : f.write(content) if not silent : log.info("wrote '{}'".format(dst_path)) return True
def help(): """print 'make' help""" log.info( log.YELLOW + "fips make\n" "fips make [target]\n" "fips make [target] [config]\n" + log.DEF + " build a single target in current or named config" )
def check_imports(fips_dir, proj_dir) : """do various checks on the imports of a project :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :returns: True if checks were valid """ # check whether any imported projects are in sync with the remote git repo success, imported_projects = get_all_imports_exports(fips_dir, proj_dir) num_imports = 0 for imp_proj_name in imported_projects : imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name) # don't git-check the top-level project directory if imp_proj_dir != proj_dir : num_imports += 1 log.info("git status of '{}':".format(imp_proj_name)) if os.path.isdir(imp_proj_dir) : if git.check_out_of_sync(imp_proj_dir) : log.warn(" '{}' is out of sync with remote git repo".format(imp_proj_dir)) else : log.colored(log.GREEN, ' uptodate') else : log.warn(" '{}' does not exist, please run 'fips fetch'".format(imp_proj_dir)) if success and num_imports == 0 : log.info(' none') # gather imports, this will dump warnings gather_imports(fips_dir, proj_dir)
def help(): """print run help""" log.info( log.YELLOW + "fips run [-- args]\n" "fips run [target] [-- args]\n" "fips run [target] [config] [-- args]\n" + log.DEF + " run a build target for current or named config" )
def help(): log.info(log.YELLOW + "fips markdeep build [proj]\n" "fips markdeep view [proj]\n"+log.DEF+ " Generate or view Markdeep documentation webpage.\n" " Parses all *.h files in a project, searches for special\n" " /*# #*/ comment blocks, and extracts them into Markdeep\n" " HTML files.")
def list_configs(fips_dir, proj_dir) : """list available configs""" log.colored(log.YELLOW, '=== configs:') configs = config.list(fips_dir, proj_dir, '*') for folder in configs : log.colored(log.BLUE, 'from {}:'.format(folder)) for cfg in configs[folder] : log.info(' {}'.format(cfg))
def write_workspace_settings(fips_dir, proj_dir, cfg): '''write the CLion *.xml files required to open the project ''' log.info("=== writing JetBrains CLion config files...") clion_dir = proj_dir + '/.idea' if not os.path.isdir(clion_dir): os.makedirs(clion_dir) write_clion_module_files(fips_dir, proj_dir, cfg) write_clion_workspace_file(fips_dir, proj_dir, cfg)
def help() : """print 'set' help""" log.info(log.YELLOW + "fips set config [config-name]\n" "fips set target [target-name]\n" "fips set jobs [num-build-jobs]\n" "fips set ccache [on|off]\n"+ log.DEF + " config: set active build config\n" " target: set active run target\n" " jobs: set number of parallel build jobs\n" " ccache: enable/disable using ccache")
def help() : """print help for diag verb""" log.info(log.YELLOW + "fips diag\n" "fips diag all\n" "fips diag fips\n" "fips diag tools\n" "fips diag configs\n" "fips diag imports\n" + log.DEF + " run diagnostics and check for errors")
def list_settings(proj_dir) : """list settings file content""" log.colored(log.YELLOW, '=== settings:') if util.is_valid_project_dir(proj_dir) : for key in ['config', 'target', 'jobs', 'ccache'] : value = settings.get(proj_dir, key) if type(value) is bool : value = 'on' if value else 'off' default = ' (default value)' if value == settings.get_default(key) else '' log.info(' {}{}:{} {}{}'.format(log.BLUE, key, log.DEF, value, default)) else : log.info(' currently not in a valid project directory')
def check_configs(fips_dir, proj_dir) : """find configs and check if they are valid""" log.colored(log.YELLOW, '=== configs:') dirs = [ fips_dir ] configs = config.load(fips_dir, proj_dir, '*') for cfg in configs : log.colored(log.BLUE, cfg['name']) valid, errors = config.check_config_valid(fips_dir, cfg) if valid : log.colored(log.GREEN, ' ok') else : for error in errors : log.info(' {}'.format(error))
def _rec_fetch_imports(fips_dir, proj_dir, handled) : """internal recursive function to fetch project imports, keeps an array of already handled dirs to break cyclic dependencies :param proj_dir: current project directory :param handled: array of already handled dirs :returns: updated array of handled dirs """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in handled : handled.append(proj_name) imports = get_imports(fips_dir, proj_dir) for dep in imports: dep_proj_name = dep if dep not in handled: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name)) dep_ok = False if not os.path.isdir(dep_proj_dir) : # directory did not exist, do a fresh git clone dep = imports[dep_proj_name] git_commit = None if 'rev' not in dep else dep['rev'] if git_commit : if 'depth' in dep : # when using rev, we may not want depth because the revision may not be reachable log.colored(log.YELLOW, "=== 'depth' was ignored because parameter 'rev' is specified.") dep['depth'] = None git_depth = git.clone_depth if not git_commit and 'depth' not in dep else dep['depth'] git_url = dep['git'] git_branch = dep['branch'] if git.clone(git_url, git_branch, git_depth, dep_proj_name, ws_dir) : if git_commit : log.colored(log.YELLOW, "=== revision: '{}':".format(git_commit)) dep_ok = git.checkout(dep_proj_dir, git_commit) else : dep_ok = True else : log.error('failed to git clone {} into {}'.format(git_url, dep_proj_dir)) else : # directory already exists log.info("dir '{}' exists".format(dep_proj_dir)) dep_ok = True # recuse if dep_ok : handled = _rec_fetch_imports(fips_dir, dep_proj_dir, handled) # done, return the new handled array return handled
def write_cmake_tools_settings(fips_dir, proj_dir, vscode_dir, cfg): '''write a settings.json for CMakeTools plugin settings''' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) settings = { 'cmake.buildDirectory': build_dir, 'cmake.configureSettings': { 'FIPS_CONFIG:': cfg['name'] } } settings_path = vscode_dir + '/settings.json' log.info(' writing {}'.format(settings_path)) with open(settings_path, 'w') as f: json.dump(settings, f, indent=1, separators=(',',':'))
def remove_vscode_tasks_launch_files(fips_dir, proj_dir, impex, cfg): '''walks through the dependencies, and deletes the .vscode/tasks.json and .vscode/launch.json files ''' for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) tasks_path = dep_proj_dir + '/.vscode/tasks.json' launch_path = dep_proj_dir + '/.vscode/launch.json' if os.path.exists(tasks_path): log.info(' deleting {}'.format(tasks_path)) os.remove(tasks_path) if os.path.exists(launch_path): log.info(' deleting {}'.format(launch_path)) os.remove(launch_path)
def confirm(question): """ask user to confirm (y/N) :param question: the question to confirm :return: True: user pressed 'y', False: user pressed 'n' """ validAnswers = {"": False, "yes": True, "ye": True, "y": True, "no": False, "n": False} while True: sys.stdout.write(question + " [y/N]: ") choice = raw_input().lower() if choice in validAnswers: return validAnswers[choice] else: log.info("please respond with 'y', 'yes', 'n' or 'no'")
def help() : """print help text for list verb""" log.info(log.YELLOW + "fips list\n" "fips list all\n" "fips list configs\n" "fips list build-tools\n" "fips list build-types\n" "fips list generators\n" "fips list registry\n" "fips list settings\n" "fips list targets [config]\n" + log.DEF + " list available configs, build-tools, etc...")
def run(fips_dir, proj_dir, args) : """run the get verb""" if len(args) > 0 : name = args[0] # check project registry to resolve git url if registry.exists(fips_dir, name) : url = registry.lookup_url(fips_dir, name) log.info("registry lookup: {} => {}".format(name, url)) else : url = name log.info("'{}' not in fips registry, trying as git url".format(url)) project.clone(fips_dir, url) else : log.error("expected one arg [git-url]")
def unset(proj_dir, key) : """delete a settings value from the project-local settings file :param proj_dir: absolute project directory :param key: settings key """ util.ensure_valid_project_dir(proj_dir) settings = load(proj_dir) if key in settings : del settings[key] save(proj_dir, settings) proj_name = util.get_project_name_from_dir(proj_dir) log.info("'{}' unset in project '{}'".format(key, proj_name))
def write_code_workspace_file(fips_dir, proj_dir, impex, cfg): '''write a multiroot-workspace config file''' vscode_dir = proj_dir + '/.vscode' ws = { 'folders': [], 'settings': {} } # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) ws['folders'].append({ 'path': dep_proj_dir }) proj_name = util.get_project_name_from_dir(proj_dir) ws_path = '{}/{}.code-workspace'.format(vscode_dir, proj_name) log.info(' writing {}'.format(ws_path)) with open(ws_path, 'w') as f: json.dump(ws, f, indent=1, separators=(',',':'))
def check_exists(fips_dir) : """test if cmake is in the path and has the required version :returns: True if cmake found and is the required version """ try: out = subprocess.check_output(['cmake', '--version'], universal_newlines=True) ver = out.split()[2].split('.') if int(ver[0]) > major or (int(ver[0]) == major and int(ver[1]) >= minor): return True else : log.info('{}NOTE{}: cmake must be at least version {}.{} (found: {}.{}.{})'.format( log.RED, log.DEF, major, minor, ver[0],ver[1],ver[2])) return False except (OSError, subprocess.CalledProcessError): return False
def run(fips_path, proj_path, args) : fips_path = util.fix_path(fips_path) proj_path = util.fix_path(proj_path) verb.import_verbs(fips_path, proj_path) if len(args) <= 1: print("run 'fips help' for more info") else : verb_name = args[1] verb_args = args[2:] if verb_name in ['help', '--help', '-help'] : show_help(verb_args) elif verb_name == '--version' : log.info(VERSION) elif verb_name in verb.verbs : verb.verbs[verb_name].run(fips_path, proj_path, verb_args) else : log.error("unknown verb '{}'".format(verb_name))
def loader(): modulenames = [] for node in listdir("mod"): path = join("mod", node) if isfile(path): name, ext = splitext(node) if ext != ".py": continue modulename = name elif isdir(path): modulename = node else: continue if modulename == "loader" or modulename == "__init__": continue modulenames += [modulename] # Sort names to make errors more predictable and output the same modulenames.sort() log.info("Found modules: %s", ", ".join("mod." + x for x in modulenames)) modules = [] log.debug("Loading modules...") for name in modulenames: log.debug("Loading mod.%s", name) try: module = __import__("mod." + name, globals(), locals(), [], -1).__getattribute__(name) except: log.exception("Error loading module mod.%s", name) exit() modules.append(module) log.debug("Running onloads") for func in onloads: try: func() except: log.exception("Error when running onload %s.%s", func.__name__, func.__module__) exit()
def run(fips_path, proj_path, args) : fips_path = util.fix_path(fips_path) proj_path = util.fix_path(proj_path) if ' ' in proj_path: log.warn("whitespace in project path detected, fips will not work correctly") verb.import_verbs(fips_path, proj_path) if len(args) <= 1: print("run 'fips help' for more info") else : verb_name = args[1] verb_args = args[2:] if verb_name in ['help', '--help', '-help'] : show_help(verb_args) elif verb_name == '--version' : log.info(VERSION) elif verb_name in verb.verbs : verb.verbs[verb_name].run(fips_path, proj_path, verb_args) else : log.error("unknown verb '{}'".format(verb_name))
def check_imports(fips_dir, proj_dir): """do various checks on the imports of a project :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :returns: True if checks were valid """ # check whether any imported projects are in sync with the remote git repo success, imported_projects = get_all_imports_exports(fips_dir, proj_dir) num_imports = 0 for imp_proj_name in imported_projects: imp_proj_dir = imported_projects[imp_proj_name]['proj_dir'] # don't git-check the top-level project directory if imp_proj_dir != proj_dir: num_imports += 1 log.info("git status of '{}':".format(imp_proj_name)) if os.path.isdir(imp_proj_dir): if os.path.isdir("{}/.git".format(imp_proj_dir)): if git.check_out_of_sync(imp_proj_dir): log.warn(" '{}' is out of sync with remote git repo". format(imp_proj_dir)) else: log.colored(log.GREEN, ' uptodate') else: log.colored( log.GREEN, " '{}' is not a git repository".format(imp_proj_dir)) else: log.warn( " '{}' does not exist, please run 'fips fetch'".format( imp_proj_dir)) if success and num_imports == 0: log.info(' none') # gather imports, this will dump warnings gather_imports(fips_dir, proj_dir)
def write_code_workspace_file(fips_dir, proj_dir, impex, cfg): '''write a multiroot-workspace config file''' vscode_dir = proj_dir + '/.vscode' ws = { 'folders': [], 'settings': {} } # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) excluded = False if 'vscode_exclude_from_workspace' in cfg: for exclude_dep in cfg['vscode_exclude_from_workspace']: if dep_proj_name == exclude_dep: excluded = True break if not excluded: ws['folders'].append({ 'path': dep_proj_dir }) proj_name = util.get_project_name_from_dir(proj_dir) ws_path = '{}/{}.code-workspace'.format(vscode_dir, proj_name) log.info(' writing {}'.format(ws_path)) with open(ws_path, 'w') as f: json.dump(ws, f, indent=1, separators=(',',':'))
def read_cmake_defines(fips_dir, proj_dir, cfg): '''reads the fips_defines.yml file which was created during "fips gen" and returns map of unique top-level defines. ''' result = [] success, defs = util.get_cfg_defines_by_target(fips_dir, proj_dir, cfg) if success: log.info(' defines from cmake:') for _,val in defs.items(): if val: for define in val: if define not in result: result.append(define) log.info(' {}'.format(define)) if 'vscode_additional_defines' in cfg: log.info(' defines from build config (vscode_additional_defines):') for define in cfg['vscode_additional_defines']: if define not in result: result.append(define) log.info(' {}'.format(define)) else: log.info(' no additional defines from build config (vscode_additional_defines)') return result
def cleanup(fips_dir, proj_dir): '''goes through all dependencies and deletes the .vscode directory''' # fetch all project dependencies success, impex = dep.get_all_imports_exports(fips_dir, proj_dir) if not success: log.warn("missing import project directories, please run 'fips fetch'") log.info(log.RED + 'Please confirm to delete the following directories:' + log.DEF) for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) vscode_dir = dep_proj_dir + '/.vscode/' if os.path.isdir(vscode_dir): log.info(' {}'.format(vscode_dir)) if util.confirm(log.RED + 'Delete those directories?' + log.DEF): for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) vscode_dir = dep_proj_dir + '/.vscode/' if os.path.isdir(vscode_dir): log.info(' deleting {}'.format(vscode_dir)) shutil.rmtree(vscode_dir) log.info('Done.') else: log.info('Nothing deleted, done.')
def write_workspace_settings(fips_dir, proj_dir, cfg): '''write the VSCode launch.json, tasks.json and c_cpp_properties.json files from cmake output files ''' log.info("=== writing Visual Studio Code config files...") vscode_dir = proj_dir + '/.vscode' if not os.path.isdir(vscode_dir): os.makedirs(vscode_dir) # fetch all project dependencies success, impex = dep.get_all_imports_exports(fips_dir, proj_dir) if not success: log.warn("missing import project directories, please run 'fips fetch'") vscode_extensions = list_extensions() has_cmake_tools = any('vector-of-bool.cmake-tools' in ext for ext in vscode_extensions) remove_vscode_tasks_launch_files(fips_dir, proj_dir, impex, cfg) write_tasks_json(fips_dir, proj_dir, vscode_dir, cfg) write_launch_json(fips_dir, proj_dir, vscode_dir, cfg) if has_cmake_tools: write_cmake_tools_settings(fips_dir, proj_dir, vscode_dir, cfg) else: write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg) write_code_workspace_file(fips_dir, proj_dir, impex, cfg)
def gen_project(fips_dir, proj_dir, cfg, force) : """private: generate build files for one config""" proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) defines = {} defines['FIPS_USE_CCACHE'] = 'ON' if settings.get(proj_dir, 'ccache') else 'OFF' defines['FIPS_AUTO_IMPORT'] = 'OFF' if dep.get_policy(proj_dir, 'no_auto_import') else 'ON' if cfg['build_tool'] == 'vscode_cmake': defines['CMAKE_EXPORT_COMPILE_COMMANDS'] = 'ON' if cfg['platform'] == 'ios': ios_team_id = settings.get(proj_dir, 'iosteam') if ios_team_id: defines['FIPS_IOS_TEAMID'] = ios_team_id do_it = force if not os.path.isdir(build_dir) : os.makedirs(build_dir) if not os.path.isfile(build_dir + '/CMakeCache.txt'): do_it = True if do_it : # if Ninja build tool and on Windows, need to copy # the precompiled ninja.exe to the build dir log.colored(log.YELLOW, "=== generating: {}".format(cfg['name'])) log.info("config file: {}".format(cfg['path'])) toolchain_path = config.get_toolchain(fips_dir, proj_dir, cfg) if toolchain_path : log.info("Using Toolchain File: {}".format(toolchain_path)) if cfg['build_tool'] == 'ninja' : ninja.prepare_ninja_tool(fips_dir, build_dir) cmake_result = cmake.run_gen(cfg, fips_dir, proj_dir, build_dir, toolchain_path, defines) if cfg['build_tool'] == 'vscode_cmake': vscode.write_workspace_settings(fips_dir, proj_dir, cfg) if cfg['build_tool'] == 'clion': clion.write_workspace_settings(fips_dir, proj_dir, cfg) return cmake_result else : return True
def gen_project(fips_dir, proj_dir, cfg, force): """private: generate build files for one config""" proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg) defines = {} defines['FIPS_USE_CCACHE'] = 'ON' if settings.get(proj_dir, 'ccache') else 'OFF' do_it = force if not os.path.isdir(build_dir): os.makedirs(build_dir) do_it = True if do_it: # if Ninja build tool and on Windows, need to copy # the precompiled ninja.exe to the build dir if cfg['build_tool'] == 'ninja': ninja.prepare_ninja_tool(fips_dir, build_dir) log.colored(log.YELLOW, "=== generating: {}".format(cfg['name'])) toolchain_path = config.get_toolchain(fips_dir, proj_dir, cfg) if toolchain_path: log.info("Using Toolchain File: {}".format(toolchain_path)) return cmake.run_gen(cfg, proj_dir, build_dir, toolchain_path, defines) else: return True
def clean(fips_dir, proj_dir, cfg_name): """clean build files :param fips_dir: absolute path of fips :param proj_dir: absolute project path :param cfg_name: config name (or pattern) """ proj_name = util.get_project_name_from_dir(proj_dir) configs = config.load(fips_dir, proj_dir, cfg_name) if configs: for cfg in configs: log.colored(log.YELLOW, "=== clean: {}".format(cfg['name'])) build_dir = util.get_build_dir(fips_dir, proj_name, cfg) if os.path.isdir(build_dir): shutil.rmtree(build_dir) log.info(" deleted '{}'".format(build_dir)) deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg) if os.path.isdir(deploy_dir): shutil.rmtree(deploy_dir) log.info(" deleted '{}'".format(deploy_dir)) else: log.error("No valid configs found for '{}'".format(cfg_name))
def confirm(question): """ask user to confirm (y/N) :param question: the question to confirm :return: True: user pressed 'y', False: user pressed 'n' """ validAnswers = { '': False, 'yes': True, 'ye': True, 'y': True, 'no': False, 'n': False } while True: sys.stdout.write(question + ' [y/N]: ') if sys.version_info[0] >= 3: choice = str(input()).lower() else: choice = raw_input().lower() if choice in validAnswers: return validAnswers[choice] else: log.info("please respond with 'y', 'yes', 'n' or 'no'")
def setup(fips_dir, proj_dir) : """setup the emscripten SDK from scratch""" log.colored(log.YELLOW, '=== setup emscripten SDK:') ensure_sdk_dirs(fips_dir) # download SDK archive if not os.path.isfile(get_archive_path(fips_dir)) : log.info("downloading '{}'...".format(get_archive_name())) urllib.urlretrieve(get_sdk_url(), get_archive_path(fips_dir), util.url_download_hook) else : log.info("'{}' already exists".format(get_archive_name())) # uncompress SDK archive log.info("uncompressing '{}'...".format(get_archive_name())) uncompress(get_archive_path(fips_dir), get_sdk_dir(fips_dir), 'emsdk_portable') # setup SDK log.info("setup emscripten SDK...") finish(get_emsdk_dir(fips_dir)) log.colored(log.GREEN, "done.")
def setup(fips_dir, proj_dir): """main setup function""" log.colored(log.YELLOW, '=== setup NaCl SDK:') ensure_sdk_dirs(fips_dir) # download SDK archive if not os.path.isfile(get_archive_path(fips_dir)): log.info("downloading...") urllib.urlretrieve(get_sdk_url(), get_archive_path(fips_dir), util.url_download_hook) else: log.info("'nacl_sdk.zip' already exists") # uncompress SDK archive log.info("unpacking...") uncompress(fips_dir, get_archive_path(fips_dir)) # setup SDK log.info("setup NaCl SDK...") update_nacl_sdk(fips_dir) log.colored(log.GREEN, "done.")
def list_targets(fips_dir, proj_dir, args) : log.colored(log.YELLOW, "=== targets:") if util.is_valid_project_dir(proj_dir) : # get config name if len(args) == 0 : cfg_name = settings.get(proj_dir, 'config') else : cfg_name = args[0] log.info('{} config:{} {}'.format(log.BLUE, log.DEF, cfg_name)) # get the target list success, targets = project.get_target_list(fips_dir, proj_dir, cfg_name) if success : # split targets by type for type in ['lib', 'module', 'sharedlib', 'app'] : type_targets = [tgt for tgt in targets if targets[tgt] == type] if len(type_targets) > 0 : log.colored(log.BLUE, ' {}:'.format(type)) for tgt in type_targets : log.info(' ' + tgt) else : log.info(" can't fetch project target list, please run 'fips gen' first!") else : log.info(' currently not in a valid project directory')
def check_out_of_sync(proj_dir): """check through all branches of the git repo in proj_dir and returns an array of all branches that are out-of-sync with their remote branches (either have unpushed local changes, or un-pulled remote changes) :param proj_dir: a git repo directory :returns: array with branch names that are out-of-sync """ if not check_exists(): log.error("git not found, please run and fix './fips diag tools'") return False out_of_sync = False # first check whether there are uncommitted changes status, status_output = has_uncommitted_files(proj_dir) if status: out_of_sync = True log.warn("'{}' has uncommitted changes:".format(proj_dir)) log.info(status_output) # check whether local and remote branch are out of sync branches_out_of_sync = False branches = get_branches(proj_dir) if not branches: log.warn("'{}' no remote branches found".format(proj_dir)) for local_branch in branches: remote_branch = branches[local_branch] remote_rev = get_remote_rev(proj_dir, remote_branch) # remote_rev can be None if the remote branch doesn't exists, # this is not an error if remote_rev: local_rev = get_local_rev(proj_dir, local_branch) if remote_rev != local_rev: out_of_sync = True if not branches_out_of_sync: # only show this once log.warn("'{}' branches out of sync:".format(proj_dir)) branches_out_of_sync = True log.info(" {}: {}".format(local_branch, local_rev)) log.info(" {}: {}".format(remote_branch, remote_rev)) return out_of_sync
def list_imports(fips_dir, proj_dir) : """list project imports""" log.colored(log.YELLOW, '=== imports:') if util.is_valid_project_dir(proj_dir) : success, result = dep.get_all_imports_exports(fips_dir, proj_dir) if not success : log.warn("missing import project directories, please run 'fips fetch'") for dep_proj_name in result : # top level project is in result, but has no URL set, filter # this from the output log.colored(log.BLUE, "project '{}' imports:".format(dep_proj_name)) cur_dep = result[dep_proj_name] if cur_dep['imports'] : for imp_proj in cur_dep['imports'] : git_url = cur_dep['imports'][imp_proj]['git'] git_branch = cur_dep['imports'][imp_proj]['branch'] log.info(" '{}' from '{}' at branch '{}'".format(imp_proj, git_url, git_branch)) else : log.info(" nothing") else : log.info(' currently not in a valid project directory')
def run(fips_dir, proj_dir, args): """run the 'nebula' verb""" if len(args) > 0: noun = args[0] if noun == 'set': if len(args) > 2: setKey(args[1], args[2]) else: log.error("expected setting and value") elif noun == 'get': if len(args) > 1: key = argToKey(args[1]) if key != "": reg_key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, base_reg, 0, _winreg.KEY_READ) keyval, regtype = _winreg.QueryValueEx(reg_key, key) _winreg.CloseKey(reg_key) log.info(keyval) else: log.error("invalid setting") else: log.error("expected setting name") elif noun == 'cleannidl': proj = util.get_project_name_from_dir(proj_dir) cfg = settings.get(proj_dir, 'config') path = util.get_build_dir(fips_dir, proj, cfg) + "/nidl" shutil.rmtree(path, True) else: try: reg_key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, base_reg, 0, _winreg.KEY_READ) workval, regtype = _winreg.QueryValueEx(reg_key, "workdir") rootval, regtype = _winreg.QueryValueEx(reg_key, "path") _winreg.CloseKey(reg_key) log.info(log.YELLOW + "Current settings:\n" "Project directory: " + workval + "\n" "Nebula root directory: " + rootval + "\n") except WindowsError: log.info(log.YELLOW + "No Nebula settings in registry\n")
def deploy_webpage(fips_dir, proj_dir, webpage_dir): emsc_deploy_dir = util.get_deploy_dir(fips_dir, 'chips-test', BuildConfig) # build the thumbnail gallery content = '' for item in items: if item['type'] == 'test': continue title = item['title'] system = item['system'] url = item['url'] ui_url = url.replace(".html", "-ui.html") image = to_webp_ext(item['img']) note = item['note'] log.info('> adding thumbnail for {}'.format(url)) content += '<div class="thumb-frame">\n' content += ' <div class="thumb-title">{}</div>\n'.format(title) if os.path.exists(emsc_deploy_dir + '/' + system + '-ui.js'): content += '<a class="ui-btn-link" href="{}"><div class="ui-btn">UI</div></a>'.format( ui_url) content += ' <a class="image-frame" href="{}"><img class="image" src="{}"></img></a>\n'.format( url, image) if note != '': content += ' <div class="thumb-footer">{}</div>\n'.format(note) content += '</div>\n' # populate the html template, and write to the build directory with open(proj_dir + '/webpage/index.html', 'r') as f: templ = Template(f.read()) html = templ.safe_substitute(samples=content) with open(webpage_dir + '/index.html', 'w') as f: f.write(html) # and the same with the CSS template with open(proj_dir + '/webpage/style.css', 'r') as f: templ = Template(f.read()) css = templ.safe_substitute() with open(webpage_dir + '/style.css', 'w') as f: f.write(css) # copy other required files for name in ['favicon.png', 'help.html']: log.info('> copy file: {}'.format(name)) shutil.copy(proj_dir + '/webpage/' + name, webpage_dir + '/' + name) # generate emu HTML pages for system in systems: log.info('> generate emscripten HTML page: {}'.format(system)) for ext in ['wasm', 'js']: src_path = '{}/{}.{}'.format(emsc_deploy_dir, system, ext) if os.path.isfile(src_path): shutil.copy(src_path, '{}/'.format(webpage_dir)) with open(proj_dir + '/webpage/emsc.html', 'r') as f: templ = Template(f.read()) html = templ.safe_substitute(name=system, prog=system) with open('{}/{}.html'.format(webpage_dir, system), 'w') as f: f.write(html) # copy data files and images for asset_dir in asset_dirs: src_dir = proj_dir + '/webpage/' + asset_dir dst_dir = webpage_dir + '/' + asset_dir if os.path.isdir(src_dir): if os.path.isdir(dst_dir): shutil.rmtree(dst_dir) os.makedirs(dst_dir) for filename in os.listdir(src_dir): src_file = src_dir + '/' + filename dst_file = dst_dir + '/' + filename if filename.endswith('.jpg'): dst_file = to_webp_ext(dst_file) cwebp(src_file, dst_file) else: print('> copy {} => {}'.format(src_file, dst_file)) shutil.copyfile(src_file, dst_file)
def help(): log.info(log.YELLOW + "fips vscode clean\n" + log.DEF + " delete .vscode directories (useful before git operations)")
def help(): log.info(log.YELLOW + "fips update\n" "fips update [proj|fips]\n" + log.DEF + " update external dependencies for current or named project,\n" " or update fips itself")
def deploy_webpage(fips_dir, proj_dir, webpage_dir) : """builds the final webpage under under fips-deploy/sokol-webpage""" ws_dir = util.get_workspace_dir(fips_dir) wasm_deploy_dir = '{}/fips-deploy/sokol-samples/{}'.format(ws_dir, BuildConfig) # create directories for platform in ['wasm'] : platform_dir = '{}/{}'.format(webpage_dir, platform) if not os.path.isdir(platform_dir) : os.makedirs(platform_dir) # build the thumbnail gallery content = '' for sample in samples : name = sample[0] log.info('> adding thumbnail for {}'.format(name)) url = "wasm/{}-sapp.html".format(name) ui_url = "wasm/{}-sapp-ui.html".format(name) img_name = name + '.jpg' img_path = proj_dir + '/webpage/' + img_name if not os.path.exists(img_path): img_name = 'dummy.jpg' img_path = proj_dir + 'webpage/dummy.jpg' content += '<div class="thumb">\n' content += ' <div class="thumb-title">{}</div>\n'.format(name) if os.path.exists(wasm_deploy_dir + '/' + name + '-sapp-ui.js'): content += '<a class="img-btn-link" href="{}"><div class="img-btn">UI</div></a>'.format(ui_url) content += ' <div class="img-frame"><a href="{}"><img class="image" src="{}"></img></a></div>\n'.format(url,img_name) content += '</div>\n' # populate the html template, and write to the build directory with open(proj_dir + '/webpage/index.html', 'r') as f : templ = Template(f.read()) html = templ.safe_substitute(samples=content) with open(webpage_dir + '/index.html', 'w') as f : f.write(html) # copy other required files for name in ['dummy.jpg', 'favicon.png'] : log.info('> copy file: {}'.format(name)) shutil.copy(proj_dir + '/webpage/' + name, webpage_dir + '/' + name) # generate WebAssembly HTML pages if emscripten.check_exists(fips_dir) : for sample in samples : name = sample[0] source = sample[1] log.info('> generate wasm HTML page: {}'.format(name)) for postfix in ['sapp', 'sapp-ui']: for ext in ['wasm', 'js'] : src_path = '{}/{}-{}.{}'.format(wasm_deploy_dir, name, postfix, ext) if os.path.isfile(src_path) : shutil.copy(src_path, '{}/wasm/'.format(webpage_dir)) with open(proj_dir + '/webpage/wasm.html', 'r') as f : templ = Template(f.read()) src_url = GitHubSamplesURL + source html = templ.safe_substitute(name=name, prog=name+'-'+postfix, source=src_url) with open('{}/wasm/{}-{}.html'.format(webpage_dir, name, postfix), 'w') as f : f.write(html) # copy the screenshots for sample in samples : img_name = sample[0] + '.jpg' img_path = proj_dir + '/webpage/' + img_name if os.path.exists(img_path): log.info('> copy screenshot: {}'.format(img_name)) shutil.copy(img_path, webpage_dir + '/' + img_name)
def deploy_webpage(fips_dir, proj_dir, webpage_dir): """builds the final webpage under under fips-deploy/chips-webpage""" ws_dir = util.get_workspace_dir(fips_dir) # build the thumbnail gallery content = '' for item in items: title = item['title'] system = item['system'] url = item['url'] image = item['img'] note = item['note'] log.info('> adding thumbnail for {}'.format(url)) content += '<div class="thumb">\n' content += ' <div class="thumb-title">{}</div>\n'.format(title) content += ' <div class="img-frame"><a href="{}"><img class="image" src="{}"></img></a></div>\n'.format( url, image) content += ' <div class="thumb-bar">{}</div>\n'.format(note) content += '</div>\n' # populate the html template, and write to the build directory with open(proj_dir + '/webpage/index.html', 'r') as f: templ = Template(f.read()) html = templ.safe_substitute(samples=content) with open(webpage_dir + '/index.html', 'w') as f: f.write(html) # and the same with the CSS template with open(proj_dir + '/webpage/style.css', 'r') as f: templ = Template(f.read()) css = templ.safe_substitute() with open(webpage_dir + '/style.css', 'w') as f: f.write(css) # copy other required files for name in ['emsc.js', 'favicon.png', 'help.html']: log.info('> copy file: {}'.format(name)) shutil.copy(proj_dir + '/webpage/' + name, webpage_dir + '/' + name) # generate emu HTML pages emsc_deploy_dir = '{}/fips-deploy/chips-test/{}'.format( ws_dir, BuildConfig) for item in items: if item['type'] != 'emu': continue name = os.path.splitext(item['url'])[0] title = item['title'] log.info('> generate emscripten HTML page: {}'.format(name)) for ext in ['wasm', 'js']: src_path = '{}/{}.{}'.format(emsc_deploy_dir, name, ext) if os.path.isfile(src_path): shutil.copy(src_path, '{}/'.format(webpage_dir)) with open(proj_dir + '/webpage/emsc.html', 'r') as f: templ = Template(f.read()) src_url = GitHubSamplesURL + name html = templ.safe_substitute(name=title, prog=name, source=src_url) with open('{}/{}.html'.format(webpage_dir, name), 'w') as f: f.write(html) # copy data files and images for system in systems: src_dir = proj_dir + '/webpage/' + system if os.path.exists(src_dir): dst_dir = webpage_dir + '/' + system if os.path.isdir(dst_dir): shutil.rmtree(dst_dir) shutil.copytree(src_dir, dst_dir)
def help(): log.info(log.YELLOW + 'fips webpage build\n' + 'fips webpage rebuild\n' + 'fips webpage serve\n' + log.DEF + ' build learnopengl examples webpage')
def help(): log.info( log.YELLOW + 'fips wasmtests build\n' + 'fips wasmtests serve\n' + log.DEF + ' build WebAssembly build-suite samples (https://github.com/WebAssembly/build-suite)' )
def help(): """print help text for 'clone' verb""" log.info( log.YELLOW + "fips clone [project]\n" + log.DEF + " fetch a project directory from a git repo, project is either\n" " a direct git-url, or a project name in the fips registry")
def write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg): '''write the .vscode/c_cpp_properties.json files for main project and all dependent projects ''' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) defines = read_cmake_defines(fips_dir, proj_dir, cfg) compile_commands_path = build_dir + '/compile_commands.json' has_compile_command_json = os.path.isfile(compile_commands_path) inc_paths = None if not has_compile_command_json: inc_paths = read_cmake_headerdirs(fips_dir, proj_dir, cfg) props = {'configurations': [], 'version': 3} for config_name in ['Mac', 'Linux', 'Win32']: c = { 'name': config_name, 'browse': { 'limitSymbolsToIncludedHeaders': True, 'databaseFilename': '{}/browse.VS.code'.format(build_dir) } } config_incl_paths = None compiler_path = None intellisense_mode = 'clang-x64' if config_name == 'Mac': if not has_compile_command_json: config_incl_paths = get_cc_header_paths() config_defines = ['_DEBUG', '__GNUC__', '__APPLE__', '__clang__'] compiler_path = '/usr/bin/c++' elif config_name == 'Linux': if not has_compile_command_json: config_incl_paths = get_cc_header_paths() config_defines = ['_DEBUG', '__GNUC__'] compiler_path = '/usr/bin/c++' else: if not has_compile_command_json: config_incl_paths = get_vs_header_paths( fips_dir, proj_dir, cfg) intellisense_mode = 'msvc-x64' config_defines = ['_DEBUG', '_WIN32'] if inc_paths: config_incl_paths.extend(inc_paths) config_defines.extend(defines) if compiler_path: c['compilerPath'] = compiler_path if has_compile_command_json: c['compileCommands'] = compile_commands_path if config_incl_paths: c['includePath'] = config_incl_paths c['defines'] = config_defines if config_incl_paths: c['browse']['path'] = config_incl_paths c['intelliSenseMode'] = intellisense_mode props['configurations'].append(c) # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) vscode_dir = dep_proj_dir + '/.vscode' if not os.path.isdir(vscode_dir): os.makedirs(vscode_dir) prop_path = vscode_dir + '/c_cpp_properties.json' log.info(' writing {}'.format(prop_path)) with open(prop_path, 'w') as f: json.dump(props, f, indent=1, separators=(',', ':'))
def write_launch_json(fips_dir, proj_dir, vscode_dir, cfg): '''write the .vscode/launch.json file''' proj_name = util.get_project_name_from_dir(proj_dir) exe_targets = read_cmake_targets(fips_dir, proj_dir, cfg, ['app']) deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name']) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) pre_launch_build_options = [('', True), (' [Skip Build]', False)] stop_at_entry_options = [('', False), (' [Stop At Entry]', True)] launch = {'version': '0.2.0', 'configurations': []} for tgt in exe_targets: for pre_launch_build in pre_launch_build_options: for stop_at_entry in stop_at_entry_options: path = deploy_dir + '/' + tgt if util.get_host_platform() == 'win': path += '.exe' cwd = os.path.dirname(path) osx_path = path + '.app/Contents/MacOS/' + tgt osx_cwd = os.path.dirname(osx_path) if os.path.isdir(osx_cwd): path = osx_path cwd = osx_cwd if util.get_host_platform() == 'win': c = { 'name': tgt + pre_launch_build[0] + stop_at_entry[0], 'type': 'cppvsdbg', 'request': 'launch', 'program': path, 'args': [], 'stopAtEntry': stop_at_entry[1], 'cwd': cwd, 'environment': [], 'externalConsole': False, 'preLaunchTask': tgt if pre_launch_build[1] else '' } elif util.get_host_platform() == 'linux': c = { 'name': tgt + pre_launch_build[0] + stop_at_entry[0], 'type': 'cppdbg', 'request': 'launch', 'program': path, 'args': [], 'stopAtEntry': stop_at_entry[1], 'cwd': cwd, 'externalConsole': False, 'MIMode': 'gdb', 'preLaunchTask': tgt if pre_launch_build[1] else '' } else: c = { 'name': tgt + pre_launch_build[0] + stop_at_entry[0], 'type': 'cppdbg', 'request': 'launch', 'program': path, 'args': [], 'stopAtEntry': stop_at_entry[1], 'cwd': cwd, 'externalConsole': False, 'MIMode': 'lldb', 'preLaunchTask': tgt if pre_launch_build[1] else '' } launch['configurations'].append(c) # add a python code-generator debug config c = { 'name': 'fips codegen', 'type': 'python', 'request': 'launch', 'stopOnEntry': True, 'pythonPath': '${config:python.pythonPath}', 'program': build_dir + '/fips-gen.py', 'args': [build_dir + '/fips_codegen.yml'], "cwd": proj_dir, "debugOptions": ["WaitOnAbnormalExit", "WaitOnNormalExit", "RedirectOutput"] } launch['configurations'].append(c) # add a python debug config for each fips verb for verb_name, verb_mod in verb.verbs.items(): # ignore standard verbs if fips_dir not in inspect.getfile(verb_mod): c = { 'name': 'fips {}'.format(verb_name), 'type': 'python', 'request': 'launch', 'stopOnEntry': True, 'pythonPath': '${config:python.pythonPath}', 'program': proj_dir + '/fips', 'args': [verb_name], 'cwd': proj_dir, "debugOptions": ["WaitOnAbnormalExit", "WaitOnNormalExit", "RedirectOutput"] } launch['configurations'].append(c) launch_path = vscode_dir + '/launch.json' log.info(' writing {}'.format(launch_path)) with open(launch_path, 'w') as f: json.dump(launch, f, indent=1, separators=(',', ':'))
def help(): """print help for verb 'open'""" log.info(log.YELLOW + "fips open\n" "fips open [config]\n" + log.DEF + " open IDE for current or named config")
def help(): log.info(log.YELLOW + 'fips webpage build\n' + 'fips webpage rebuild\n' + 'fips webpage serve\n' + log.DEF + ' build chips samples webpage')
def deploy_webpage(fips_dir, proj_dir, webpage_dir): """builds the final webpage under under fips-deploy/oryol-webpage""" ws_dir = util.get_workspace_dir(fips_dir) # load the websamples.yml file, should have been created during the last build with open(webpage_dir + '/websamples.yml', 'r') as f: samples = yaml.load(f.read()) # build the thumbnail gallery content = '' for sample in samples: if sample['name'] != '__end__': name = sample['name'] imgPath = sample['image'] types = sample['type'] desc = sample['desc'] head, tail = os.path.split(imgPath) if tail == 'none': imgFileName = 'dummy.jpg' else: imgFileName = tail content += '<div class="thumb">\n' content += ' <div class="thumb-title">{}</div>\n'.format(name) content += ' <div class="img-frame"><a href="{}.html"><img class="image" src="{}" title="{}"></img></a></div>\n'.format( name, imgFileName, desc) content += ' <div class="thumb-bar">\n' content += ' <ul class="thumb-list">\n' if 'emscripten' in types: content += ' <li class="thumb-item"><a class="thumb-link" href="{}.html">emsc</a></li>\n'.format( name) if 'pnacl' in types: content += ' <li class="thumb-item"><a class="thumb-link" href="{}_pnacl.html">pnacl</a></li>\n'.format( name) if 'android' in types: content += ' <li class="thumb-item"><a class="thumb-link" href="{}-debug.apk">apk</a></li>\n'.format( name) content += ' </ul>\n' content += ' </div>\n' content += '</div>\n' # populate the html template, and write to the build directory with open(proj_dir + '/web/index.html', 'r') as f: templ = Template(f.read()) html = templ.safe_substitute(samples=content) with open(webpage_dir + '/index.html', 'w') as f: f.write(html) # copy other required files for name in [ 'style.css', 'dummy.jpg', 'emsc.js', 'pnacl.js', 'about.html', 'favicon.png' ]: log.info('> copy file: {}'.format(name)) shutil.copy(proj_dir + '/web/' + name, webpage_dir + '/' + name) # generate emscripten HTML pages if BuildEmscripten and emscripten.check_exists(fips_dir): emsc_deploy_dir = '{}/fips-deploy/oryol/emsc-make-release'.format( ws_dir) for sample in samples: name = sample['name'] if name != '__end__' and 'emscripten' in sample['type']: log.info('> generate emscripten HTML page: {}'.format(name)) for ext in ['js', 'html.mem']: src_path = '{}/{}.{}'.format(emsc_deploy_dir, name, ext) if os.path.isfile(src_path): shutil.copy(src_path, webpage_dir) with open(proj_dir + '/web/emsc.html', 'r') as f: templ = Template(f.read()) src_url = GitHubSamplesURL + sample['src'] html = templ.safe_substitute(name=name, source=src_url) with open('{}/{}.html'.format(webpage_dir, name), 'w') as f: f.write(html) # copy PNaCl HTML pages if BuildPNaCl and nacl.check_exists(fips_dir): pnacl_deploy_dir = '{}/fips-deploy/oryol/pnacl-make-release'.format( ws_dir) for sample in samples: name = sample['name'] if name != '__end__' and 'pnacl' in sample['type']: log.info('> generate PNaCl HTML page: {}'.format(name)) for ext in ['nmf', 'pexe']: shutil.copy('{}/{}.{}'.format(pnacl_deploy_dir, name, ext), webpage_dir) with open(proj_dir + '/web/pnacl.html', 'r') as f: templ = Template(f.read()) src_url = GitHubSamplesURL + sample['src'] html = templ.safe_substitute(name=name, source=src_url) with open('{}/{}_pnacl.html'.format(webpage_dir, name), 'w') as f: f.write(html) # copy the screenshots for sample in samples: if sample['name'] != '__end__': img_path = sample['image'] head, tail = os.path.split(img_path) if tail != 'none': log.info('> copy screenshot: {}'.format(tail)) shutil.copy(img_path, webpage_dir + '/' + tail) # copy the Android sample files over if BuildAndroid and android.check_exists(fips_dir): android_deploy_dir = '{}/fips-deploy/oryol/android-make-release'.format( ws_dir) for sample in samples: if sample['name'] != '__end__' and 'android' in sample['type']: log.info('> copy android sample files: {}'.format( sample['name'])) shutil.copy( '{}/{}-debug.apk'.format(android_deploy_dir, sample['name']), webpage_dir)
def setup(fips_dir, proj_dir) : """setup the Android SDK and NDK""" log.colored(log.YELLOW, '=== setup Android SDK/NDK :') # first make sure that java is present, otherwise the Android # SDK setup will finish without errors, but is not actually usable if not java.check_exists(fips_dir) : log.error("please install java first (see './fips diag tools')") ensure_sdk_dirs(fips_dir) # download and setup the Android SDK sdk_archive_path = get_androidsdk_archive_path(fips_dir) if not os.path.isfile(sdk_archive_path) : sdk_url = get_sdk_url() log.info("downloading '{}'...".format(sdk_url)) urllib.urlretrieve(sdk_url, sdk_archive_path, util.url_download_hook) else : log.info("'{}' already exists".format(sdk_archive_path)) log.info("\nunpacking '{}'...".format(sdk_archive_path)) uncompress(fips_dir, sdk_archive_path) log.info("downloading additional SDK files...") update_android_sdk(fips_dir, proj_dir) # download the Android NDK ndk_archive_path = get_androidndk_archive_path(fips_dir) if not os.path.isfile(ndk_archive_path) : ndk_url = get_ndk_url() log.info("downloading '{}'...".format(ndk_url)) urllib.urlretrieve(ndk_url, ndk_archive_path, util.url_download_hook) else : log.info("'{}' already exists".format(ndk_archive_path)) log.info("\nunpacking '{}'...".format(ndk_archive_path)) uncompress(fips_dir, ndk_archive_path) log.colored(log.GREEN, "done.")
def deploy_webpage(fips_dir, proj_dir, webpage_dir): """builds the final webpage under under fips-deploy/learnopengl-examples""" ws_dir = util.get_workspace_dir(fips_dir) wasm_deploy_dir = '{}/fips-deploy/learnopengl-examples/{}'.format( ws_dir, BuildConfig) # create directories if not os.path.exists(webpage_dir): os.makedirs(webpage_dir) # build the thumbnail gallery content = '' for chapter in items: chapter_title = chapter[0] lessons = chapter[1] content += '<h2>{}</i></h2>\n'.format(chapter_title) for lesson in lessons: lesson_title = lesson[0] lesson_link = lesson[1] examples = lesson[3] content += '<article>\n' content += '<section class="header"><h3><a href="{}">{} <i class="icon-link-ext"></i></a></h3></section>\n'.format( lesson_link, lesson_title) content += '<section class="group examples">\n' for example in examples: name = example[0] filename = example[1] log.info('> adding thumbnail for {}'.format(filename)) url = "{}.html".format(filename) img_name = filename + '.jpg' img_path = proj_dir + '/webpage/' + img_name if not os.path.exists(img_path): img_name = 'dummy.jpg' img_path = proj_dir + 'webpage/dummy.jpg' content += '<figure class="col-15">\n' content += '<figcaption><h4>{}</h4></figcaption>\n'.format( name) content += '<div><img class="responsive" src="{}" alt=""></div>\n'.format( img_name) content += '<a href="{}">Read More</a>\n'.format(url) content += '</figure>\n' content += '</section>\n' content += '</article>\n' content += '<hr>\n' # populate the html template, and write to the build directory with open(proj_dir + '/webpage/index.html', 'r') as f: templ = Template(f.read()) html = templ.safe_substitute( samples=content, date=datetime.date.today().strftime("%B %d %Y")) with open(webpage_dir + '/index.html', 'w') as f: f.write(html) # copy other required files for name in [ 'dummy.jpg', 'favicon.png', 'fontello.woff', 'fontello.woff2' ]: log.info('> copy file: {}'.format(name)) shutil.copy(proj_dir + '/webpage/' + name, webpage_dir + '/' + name) # generate WebAssembly HTML pages if emscripten.check_exists(fips_dir): for chapter in items: lessons = chapter[1] for lesson in lessons: dir = lesson[2] examples = lesson[3] for example in examples: filename = example[1] source = example[2] glsl = example[3] log.info('> generate wasm HTML page: {}'.format(filename)) for ext in ['wasm', 'js']: src_path = '{}/{}.{}'.format(wasm_deploy_dir, filename, ext) if os.path.isfile(src_path): shutil.copy(src_path, '{}/'.format(webpage_dir)) with open(proj_dir + '/webpage/wasm.html', 'r') as f: templ = Template(f.read()) src_url = '{}/{}/{}'.format(GitHubExamplesURL, dir, source) if glsl is None: glsl_url = "." glsl_hidden = "hidden" else: glsl_url = '{}/{}/{}'.format( GitHubExamplesURL, dir, glsl) glsl_hidden = "" html = templ.safe_substitute(name=filename, prog=filename, source=src_url, glsl=glsl_url, hidden=glsl_hidden) with open('{}/{}.html'.format(webpage_dir, filename), 'w') as f: f.write(html) # copy assets from deploy directory for asset in assets: log.info('> copy asset file: {}'.format(asset)) src_path = '{}/{}'.format(wasm_deploy_dir, asset) if os.path.isfile(src_path): shutil.copy(src_path, webpage_dir) # copy the screenshots for chapter in items: lessons = chapter[1] for lesson in lessons: examples = lesson[3] for example in examples: img_name = example[1] + '.jpg' img_path = proj_dir + '/webpage/' + img_name if os.path.exists(img_path): log.info('> copy screenshot: {}'.format(img_name)) shutil.copy(img_path, webpage_dir + '/' + img_name)