def _rec_update_imports(fips_dir, proj_dir, handled) : """same as _rec_fetch_imports() but for updating the imported projects """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in handled : handled.append(proj_name) imports = get_imports(fips_dir, proj_dir) for dep in imports: dep_proj_name = dep if dep not in handled: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name)) dep_ok = False if os.path.isdir(dep_proj_dir) : # directory did not exist, do a fresh git clone dep = imports[dep_proj_name] git_commit = None if 'rev' not in dep else dep['rev'] if git.has_local_changes(dep_proj_dir) : log.warn(" '{}' has local changes, skipping...".format(dep_proj_dir)) else : log.colored(log.BLUE, " updating '{}'...".format(dep_proj_dir)) git.update(dep_proj_dir) if git_commit: log.colored(log.YELLOW, "=== revision: '{}':".format(git_commit)) dep_ok = git.checkout(dep_proj_dir, git_commit) else: dep_ok = True else : log.warn(" '{}' does not exist, please run 'fips fetch'".format(dep_proj_dir)) # recuse if dep_ok : handled = _rec_update_imports(fips_dir, dep_proj_dir, handled) # done, return the new handled array return handled
def check_imports(fips_dir, proj_dir) : """do various checks on the imports of a project :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :returns: True if checks were valid """ # check whether any imported projects are in sync with the remote git repo success, imported_projects = get_all_imports_exports(fips_dir, proj_dir) num_imports = 0 for imp_proj_name in imported_projects : imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name) # don't git-check the top-level project directory if imp_proj_dir != proj_dir : num_imports += 1 log.info("git status of '{}':".format(imp_proj_name)) if os.path.isdir(imp_proj_dir) : if git.check_out_of_sync(imp_proj_dir) : log.warn(" '{}' is out of sync with remote git repo".format(imp_proj_dir)) else : log.colored(log.GREEN, ' uptodate') else : log.warn(" '{}' does not exist, please run 'fips fetch'".format(imp_proj_dir)) if success and num_imports == 0 : log.info(' none') # gather imports, this will dump warnings gather_imports(fips_dir, proj_dir)
def check_imports(fips_dir, proj_dir): """do various checks on the imports of a project :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :returns: True if checks were valid """ # check whether any imported projects are in sync with the remote git repo success, imported_projects = get_all_imports_exports(fips_dir, proj_dir) num_imports = 0 for imp_proj_name in imported_projects: imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name) # don't git-check the top-level project directory if imp_proj_dir != proj_dir: num_imports += 1 log.info("git status of '{}':".format(imp_proj_name)) if os.path.isdir(imp_proj_dir): if git.check_out_of_sync(imp_proj_dir): log.warn( " '{}' is out of sync with remote git repo".format( imp_proj_dir)) else: log.colored(log.GREEN, ' uptodate') else: log.warn( " '{}' does not exist, please run 'fips fetch'".format( imp_proj_dir)) if success and num_imports == 0: log.info(' none') # gather imports, this will dump warnings gather_imports(fips_dir, proj_dir)
def check_local_changes(fips_dir, proj_dir): """this is a variation of check_imports which just checks for local (uncommitted or unpushed) changes. :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :returns: True if checks were valid """ success, imported_projects = get_all_imports_exports(fips_dir, proj_dir) num_imports = 0 for imp_proj_name in imported_projects: imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name) # don't git-check the top-level project directory if imp_proj_dir != proj_dir: num_imports += 1 log.info("checking '{}':".format(imp_proj_name)) if os.path.isdir(imp_proj_dir): if git.has_local_changes(imp_proj_dir): log.warn( " '{}' has local changes (uncommitted and/or unpushed)" .format(imp_proj_dir)) else: log.colored(log.GREEN, ' no local changes') else: log.warn( " '{}' does not exist, please run 'fips fetch'".format( imp_proj_dir)) if success and num_imports == 0: log.info(' none')
def _rec_get_all_imports_exports(fips_dir, proj_dir, result): """recursively get all imported projects, their exported and imported modules in a dictionary object: project-1: url: git-url (not valid for first, top-level project) exports: header-dirs: [ ] conditional-header-dirs: dir: cmake-if condition string lib-dirs: [ ] defines: def-key: def-val ... modules : mod: dir mod: dir ... imports: name: git: [git-url] branch: [optional: branch or tag] cond: [optional: cmake-if condition string conditionally including the dependency] name: ... ... ... :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :param result: in/out current result :returns: bool success, and modified result dictionary """ success = True ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in result: imports = get_imports(fips_dir, proj_dir) exports = get_exports(proj_dir) for dep_proj_name in imports: if dep_proj_name not in result: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) dep_url = imports[dep_proj_name]['git'] success, result = _rec_get_all_imports_exports( fips_dir, dep_proj_dir, result) # break recursion on error if not success: return success, result result[proj_name] = {} result[proj_name]['proj_dir'] = proj_dir result[proj_name]['imports'] = imports result[proj_name]['exports'] = exports # done return success, result
def run(fips_dir, proj_dir, args) : """fetch external project imports :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :args: additional args """ if len(args) > 0 : proj_name = args[0] proj_dir = util.get_project_dir(fips_dir, proj_name) dep.fetch_imports(fips_dir, proj_dir)
def _rec_get_all_imports_exports(fips_dir, proj_dir, result) : """recursively get all imported projects, their exported and imported modules in a dictionary object: project-1: url: git-url (not valid for first, top-level project) exports: header-dirs: [ ] lib-dirs: [ ] defines: def-key: def-val ... modules : mod: dir mod: dir ... imports: name: git: [git-url] branch: [optional: branch or tag] cond: [optional: cmake-if condition string conditionally including the dependency] name: ... ... ... :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :param result: in/out current result :returns: bool success, and modified result dictionary """ success = True ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in result : imports = get_imports(fips_dir, proj_dir) exports = get_exports(proj_dir) for dep_proj_name in imports : if dep_proj_name not in result : dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) dep_url = imports[dep_proj_name]['git'] success, result = _rec_get_all_imports_exports(fips_dir, dep_proj_dir, result) # break recursion on error if not success : return success, result result[proj_name] = {} result[proj_name]['proj_dir'] = proj_dir result[proj_name]['imports'] = imports result[proj_name]['exports'] = exports # done return success, result
def run(fips_dir, proj_dir, args): if len(args) > 0 and args[0] == 'fips': if git.has_local_changes(fips_dir): log.warn(" '{}' has local changes, skipping...".format(fips_dir)) else: log.colored(log.BLUE, " updating '{}'...".format(fips_dir)) git.update(fips_dir) else: if len(args) > 0: proj_name = args[0] proj_dir = util.get_project_dir(fips_dir, proj_name) dep.update_imports(fips_dir, proj_dir)
def get_toolchain(fips_dir, proj_dir, cfg): """get the toolchain path location for a config, this first checks for a 'cmake-toolchain' attribute, and if this does not exist, builds a xxx.toolchain.cmake file from the platform name (only for cross- compiling platforms). Toolchain files are searched in the following locations: - a fips-toolchains subdirectory in the project directory - a fips-toolchains subdirectory in all imported projects - finally in the cmake-toolchains subdirectory of the fips directory :param fips_dir: absolute path to fips :param plat: the target platform name :returns: path to toolchain file or None for non-cross-compiling """ # ignore native target platforms if 'platform' in cfg: if cfg['platform'] in native_platforms: return None else: log.error("config has no 'platform' attribute!'") # build toolchain file name toolchain = None if 'cmake-toolchain' in cfg: toolchain = cfg['cmake-toolchain'] else: toolchain = '{}.toolchain.cmake'.format(cfg['platform']) # look for toolchain file in current project directory toolchain_path = '{}/fips-toolchains/{}'.format(proj_dir, toolchain) if os.path.isfile(toolchain_path): return toolchain_path else: # look for toolchain in all imported directories _, imported_projs = dep.get_all_imports_exports(fips_dir, proj_dir) for imported_proj_name in imported_projs: imported_proj_dir = util.get_project_dir(fips_dir, imported_proj_name) toolchain_path = '{}/fips-toolchains/{}'.format( imported_proj_dir, toolchain) if os.path.isfile(toolchain_path): return toolchain_path else: # toolchain is not in current project or imported projects, # try the fips directory toolchain_path = '{}/cmake-toolchains/{}'.format( fips_dir, toolchain) if os.path.isfile(toolchain_path): return toolchain_path # fallthrough: no toolchain file found return None
def cleanup(fips_dir, proj_dir): '''goes through all dependencies and deletes the .vscode directory''' # fetch all project dependencies success, impex = dep.get_all_imports_exports(fips_dir, proj_dir) if not success : log.warn("missing import project directories, please run 'fips fetch'") log.info(log.RED + 'Please confirm to delete the following directories:' + log.DEF) for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) vscode_dir = dep_proj_dir + '/.vscode/' if os.path.isdir(vscode_dir): log.info(' {}'.format(vscode_dir)) if util.confirm(log.RED + 'Delete those directories?' + log.DEF): for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) vscode_dir = dep_proj_dir + '/.vscode/' if os.path.isdir(vscode_dir): log.info(' deleting {}'.format(vscode_dir)) shutil.rmtree(vscode_dir) log.info('Done.') else: log.info('Nothing deleted, done.')
def write_code_workspace_file(fips_dir, proj_dir, impex, cfg): '''write a multiroot-workspace config file''' vscode_dir = proj_dir + '/.vscode' ws = {'folders': [], 'settings': {}} # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) ws['folders'].append({'path': dep_proj_dir}) proj_name = util.get_project_name_from_dir(proj_dir) ws_path = '{}/{}.code-workspace'.format(vscode_dir, proj_name) log.info(' writing {}'.format(ws_path)) with open(ws_path, 'w') as f: json.dump(ws, f, indent=1, separators=(',', ':'))
def _rec_fetch_imports(fips_dir, proj_dir, handled) : """internal recursive function to fetch project imports, keeps an array of already handled dirs to break cyclic dependencies :param proj_dir: current project directory :param handled: array of already handled dirs :returns: updated array of handled dirs """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in handled : handled.append(proj_name) imports = get_imports(fips_dir, proj_dir) for dep in imports: dep_proj_name = dep if dep not in handled: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name)) dep_ok = False if not os.path.isdir(dep_proj_dir) : # directory did not exist, do a fresh git clone dep = imports[dep_proj_name] git_commit = None if 'rev' not in dep else dep['rev'] if git_commit : if 'depth' in dep : # when using rev, we may not want depth because the revision may not be reachable log.colored(log.YELLOW, "=== 'depth' was ignored because parameter 'rev' is specified.") dep['depth'] = None git_depth = git.clone_depth if not git_commit and 'depth' not in dep else dep['depth'] git_url = dep['git'] git_branch = dep['branch'] if git.clone(git_url, git_branch, git_depth, dep_proj_name, ws_dir) : if git_commit : log.colored(log.YELLOW, "=== revision: '{}':".format(git_commit)) dep_ok = git.checkout(dep_proj_dir, git_commit) else : dep_ok = True else : log.error('failed to git clone {} into {}'.format(git_url, dep_proj_dir)) else : # directory already exists log.info("dir '{}' exists".format(dep_proj_dir)) dep_ok = True # recuse if dep_ok : handled = _rec_fetch_imports(fips_dir, dep_proj_dir, handled) # done, return the new handled array return handled
def remove_vscode_tasks_launch_files(fips_dir, proj_dir, impex, cfg): '''walks through the dependencies, and deletes the .vscode/tasks.json and .vscode/launch.json files ''' for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) tasks_path = dep_proj_dir + '/.vscode/tasks.json' launch_path = dep_proj_dir + '/.vscode/launch.json' if os.path.exists(tasks_path): log.info(' deleting {}'.format(tasks_path)) os.remove(tasks_path) if os.path.exists(launch_path): log.info(' deleting {}'.format(launch_path)) os.remove(launch_path)
def get_toolchain(fips_dir, proj_dir, cfg) : """get the toolchain path location for a config, this first checks for a 'cmake-toolchain' attribute, and if this does not exist, builds a xxx.toolchain.cmake file from the platform name (only for cross- compiling platforms). Toolchain files are searched in the following locations: - a fips-toolchains subdirectory in the project directory - a fips-toolchains subdirectory in all imported projects - finally in the cmake-toolchains subdirectory of the fips directory :param fips_dir: absolute path to fips :param plat: the target platform name :returns: path to toolchain file or None for non-cross-compiling """ # ignore native target platforms if 'platform' in cfg : if cfg['platform'] in native_platforms : return None else : log.error("config has no 'platform' attribute!'") # build toolchain file name toolchain = None if 'cmake-toolchain' in cfg : toolchain = cfg['cmake-toolchain'] else : toolchain = '{}.toolchain.cmake'.format(cfg['platform']) # look for toolchain file in current project directory toolchain_path = '{}/fips-toolchains/{}'.format(proj_dir, toolchain) if os.path.isfile(toolchain_path) : return toolchain_path else : # look for toolchain in all imported directories _, imported_projs = dep.get_all_imports_exports(fips_dir, proj_dir) for imported_proj_name in imported_projs : imported_proj_dir = util.get_project_dir(fips_dir, imported_proj_name) toolchain_path = '{}/fips-toolchains/{}'.format(imported_proj_dir, toolchain) if os.path.isfile(toolchain_path) : return toolchain_path else : # toolchain is not in current project or imported projects, # try the fips directory toolchain_path = '{}/cmake-toolchains/{}'.format(fips_dir, toolchain) if os.path.isfile(toolchain_path) : return toolchain_path # fallthrough: no toolchain file found return None
def write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg): '''write the .vscode/c_cpp_properties.json files for main project and all dependent projects ''' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg) inc_paths = read_cmake_headerdirs(fips_dir, proj_dir, cfg) defines = read_cmake_defines(fips_dir, proj_dir, cfg) props = {'configurations': [], 'version': 3} for config_name in ['Mac', 'Linux', 'Win32']: c = { 'name': config_name, 'browse': { 'limitSymbolsToIncludeHeaders': True, 'databaseFilename': '{}/browse.VS.code'.format(build_dir) } } config_incl_paths = [] intellisense_mode = 'clang-x64' if config_name == 'Mac': config_incl_paths = get_cc_header_paths() config_defines = ['_DEBUG', '__GNUC__', '__APPLE__', '__clang__'] elif config_name == 'Linux': config_incl_paths = get_cc_header_paths() config_defines = ['_DEBUG', '__GNUC__'] else: intellisense_mode = 'msvc-x64' config_incl_paths = get_vs_header_paths(fips_dir, proj_dir, cfg) config_defines = ['_DEBUG', '_WIN32'] config_incl_paths.extend(inc_paths) config_defines.extend(defines) c['includePath'] = config_incl_paths c['defines'] = config_defines c['browse']['path'] = config_incl_paths c['intelliSenseMode'] = intellisense_mode props['configurations'].append(c) # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) vscode_dir = dep_proj_dir + '/.vscode' if not os.path.isdir(vscode_dir): os.makedirs(vscode_dir) prop_path = vscode_dir + '/c_cpp_properties.json' log.info(' writing {}'.format(prop_path)) with open(prop_path, 'w') as f: json.dump(props, f, indent=1, separators=(',', ':'))
def write_code_workspace_file(fips_dir, proj_dir, impex, cfg): '''write a multiroot-workspace config file''' vscode_dir = proj_dir + '/.vscode' ws = { 'folders': [], 'settings': {} } # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) ws['folders'].append({ 'path': dep_proj_dir }) proj_name = util.get_project_name_from_dir(proj_dir) ws_path = '{}/{}.code-workspace'.format(vscode_dir, proj_name) log.info(' writing {}'.format(ws_path)) with open(ws_path, 'w') as f: json.dump(ws, f, indent=1, separators=(',',':'))
def run(fips_dir, proj_dir, args): if len(args) > 0: if len(args) > 1: proj_name = args[1] proj_dir = util.get_project_dir(fips_dir, proj_name) if not util.is_valid_project_dir(proj_dir): log.error('{} is not a valid fips project!'.format(proj_name)) if args[0] == 'build': markdeep.build(fips_dir, proj_dir) elif args[0] == 'view': # view also build the markdown docs first markdeep.build(fips_dir, proj_dir) markdeep.view(fips_dir, proj_dir) else: log.error("expected 'build' or 'view' arg") else: log.error("expected 'build' or 'view' arg")
def write_code_workspace_file(fips_dir, proj_dir, vscode_dir, cfg): '''write a multiroot-workspace config file''' ws = { 'folders': [], 'settings': {} } # fetch all project dependencies success, impex = dep.get_all_imports_exports(fips_dir, proj_dir) if not success : log.warn("missing import project directories, please run 'fips fetch'") # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) ws['folders'].append({ 'path': dep_proj_dir }) proj_name = util.get_project_name_from_dir(proj_dir) with open('{}/{}.code-workspace'.format(vscode_dir, proj_name), 'w') as f: json.dump(ws, f, indent=1, separators=(',',':'))
def import_verbs(fips_dir, proj_dir) : """import verbs from local and imported projects, populates the 'verbs' and 'proj_verbs' dictionaries :param fipsdir: absolute fips directory :param proj_dir: absolute project directory """ # first import verbs from fips directory import_verbs_from('fips', fips_dir, fips_dir + '/verbs') # now go through all imported projects if fips_dir != proj_dir : _, imported_projs = dep.get_all_imports_exports(fips_dir, proj_dir) for imported_proj_name in imported_projs : imported_proj_dir = util.get_project_dir(fips_dir, imported_proj_name) import_verbs_from(imported_proj_name, imported_proj_dir, imported_proj_dir + '/fips-verbs')
def run(fips_dir, proj_dir, args): if len(args) > 0: if len(args) > 1: proj_name = args[1] proj_dir = util.get_project_dir(fips_dir, proj_name) if not util.is_valid_project_dir(proj_dir): log.error('{} is not a valid fips project!'.format(proj_name)) if args[0] == 'build': markdeep.build(fips_dir, proj_dir) elif args[0] == 'view': # view also build the markdown docs first markdeep.build(fips_dir, proj_dir) markdeep.view(fips_dir, proj_dir) else: log.error("expected 'build' or 'serve' arg") else: log.error("expected 'build' or 'serve' arg")
def import_verbs(fips_dir, proj_dir): """import verbs from local and imported projects, populates the 'verbs' and 'proj_verbs' dictionaries :param fipsdir: absolute fips directory :param proj_dir: absolute project directory """ # first import verbs from fips directory import_verbs_from('fips', fips_dir, fips_dir + '/verbs') # now go through all imported projects if fips_dir != proj_dir: _, imported_projs = dep.get_all_imports_exports(fips_dir, proj_dir) for imported_proj_name in imported_projs: imported_proj_dir = util.get_project_dir(fips_dir, imported_proj_name) import_verbs_from(imported_proj_name, imported_proj_dir, imported_proj_dir + '/fips-verbs')
def get_config_dirs(fips_dir, proj_dir) : """return list of config directories, including all imports :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :returns: list of all directories with config files """ dirs = [ fips_dir + '/configs' ] if fips_dir != proj_dir : success, result = dep.get_all_imports_exports(fips_dir, proj_dir) if success : for dep_proj_name in result : dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) dep_configs_dir = dep_proj_dir + '/fips-configs' if os.path.isdir(dep_configs_dir) : dirs.append(dep_configs_dir) else : log.warn("missing import directories, please run 'fips fetch'") return dirs
def get_config_dirs(fips_dir, proj_dir): """return list of config directories, including all imports :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :returns: list of all directories with config files """ dirs = [fips_dir + '/configs'] if fips_dir != proj_dir: success, result = dep.get_all_imports_exports(fips_dir, proj_dir) if success: for dep_proj_name in result: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) dep_configs_dir = dep_proj_dir + '/fips-configs' if os.path.isdir(dep_configs_dir): dirs.append(dep_configs_dir) else: log.warn("missing import directories, please run 'fips fetch'") return dirs
def _rec_fetch_imports(fips_dir, proj_dir, handled): """internal recursive function to fetch project imports, keeps an array of already handled dirs to break cyclic dependencies :param proj_dir: current project directory :param handled: array of already handled dirs :returns: updated array of handled dirs """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in handled: handled.append(proj_name) imports = get_imports(fips_dir, proj_dir) for dep in imports: dep_proj_name = dep if dep not in handled: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name)) dep_ok = False if not os.path.isdir(dep_proj_dir): # directory did not exist, do a fresh git clone git_url = imports[dep_proj_name]['git'] git_branch = imports[dep_proj_name]['branch'] if git.clone(git_url, git_branch, dep_proj_name, ws_dir): dep_ok = True else: log.error('failed to git clone {} into {}'.format( git_url, dep_proj_dir)) else: # directory already exists log.info("dir '{}' exists".format(dep_proj_dir)) dep_ok = True # recuse if dep_ok: handled = _rec_fetch_imports(fips_dir, dep_proj_dir, handled) # done, return the new handled array return handled
def init(fips_dir, proj_name): """initialize an existing project directory as a fips directory by copying essential files and creating or updating .gitignore :param fips_dir: absolute path to fips :param proj_name: project directory name (dir must exist) :returns: True if the project was successfully initialized """ ws_dir = util.get_workspace_dir(fips_dir) proj_dir = util.get_project_dir(fips_dir, proj_name) if os.path.isdir(proj_dir): templ_values = {'project': proj_name} for f in ['CMakeLists.txt', 'fips', 'fips.cmd', 'fips.yml']: template.copy_template_file(fips_dir, proj_dir, f, templ_values) os.chmod(proj_dir + '/fips', 0o744) gitignore_entries = ['.fips-*', '*.pyc', '.vscode/', '.idea/'] template.write_git_ignore(proj_dir, gitignore_entries) else: log.error("project dir '{}' does not exist".format(proj_dir)) return False
def init(fips_dir, proj_name) : """initialize an existing project directory as a fips directory by copying essential files and creating or updating .gitignore :param fips_dir: absolute path to fips :param proj_name: project directory name (dir must exist) :returns: True if the project was successfully initialized """ ws_dir = util.get_workspace_dir(fips_dir) proj_dir = util.get_project_dir(fips_dir, proj_name) if os.path.isdir(proj_dir) : templ_values = { 'project': proj_name } for f in ['CMakeLists.txt', 'fips', 'fips.cmd', 'fips.yml'] : template.copy_template_file(fips_dir, proj_dir, f, templ_values) os.chmod(proj_dir + '/fips', 0o744) gitignore_entries = ['.fips-*', '*.pyc'] template.write_git_ignore(proj_dir, gitignore_entries) else : log.error("project dir '{}' does not exist".format(proj_dir)) return False
def _rec_fetch_imports(fips_dir, proj_dir, handled) : """internal recursive function to fetch project imports, keeps an array of already handled dirs to break cyclic dependencies :param proj_dir: current project directory :param handled: array of already handled dirs :returns: updated array of handled dirs """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in handled : handled.append(proj_name) imports = get_imports(fips_dir, proj_dir) for dep in imports: dep_proj_name = dep if dep not in handled: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name)) dep_ok = False if not os.path.isdir(dep_proj_dir) : # directory did not exist, do a fresh git clone git_url = imports[dep_proj_name]['git'] git_branch = imports[dep_proj_name]['branch'] if git.clone(git_url, git_branch, dep_proj_name, ws_dir) : dep_ok = True else : log.error('failed to git clone {} into {}'.format(git_url, dep_proj_dir)) else : # directory already exists log.info("dir '{}' exists".format(dep_proj_dir)) dep_ok = True # recuse if dep_ok : handled = _rec_fetch_imports(fips_dir, dep_proj_dir, handled) # done, return the new handled array return handled
def clone(fips_dir, url) : """clone an existing fips project with git, do NOT fetch dependencies :param fips_dir: absolute path to fips :param url: git url to clone from (may contain branch name separated by '#') :return: True if project was successfully cloned """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_url(url) proj_dir = util.get_project_dir(fips_dir, proj_name) if not os.path.isdir(proj_dir) : git_url = util.get_giturl_from_url(url) git_branch = util.get_gitbranch_from_url(url) if git.clone(git_url, git_branch, proj_name, ws_dir) : # fetch imports dep.fetch_imports(fips_dir, proj_dir) return True else : log.error("failed to 'git clone {}' into '{}'".format(url, proj_dir)) return False else : log.error("project dir '{}' already exists".format(proj_dir)) return False
def clone(fips_dir, url) : """clone an existing fips project with git, do NOT fetch dependencies :param fips_dir: absolute path to fips :param url: git url to clone from (may contain branch name separated by '#') :return: True if project was successfully cloned """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_url(url) proj_dir = util.get_project_dir(fips_dir, proj_name) if not os.path.isdir(proj_dir) : git_url = util.get_giturl_from_url(url) git_branch = util.get_gitbranch_from_url(url) if git.clone(git_url, git_branch, git.clone_depth, proj_name, ws_dir) : # fetch imports dep.fetch_imports(fips_dir, proj_dir) return True else : log.error("failed to 'git clone {}' into '{}'".format(url, proj_dir)) return False else : log.error("project dir '{}' already exists".format(proj_dir)) return False
def write_imports(fips_dir, proj_dir, imported): """write the big imports directory created with 'gather_imports' to a .fips-imports.cmake file in the current project :params fips_dir: absolute path to fips :params proj_dir: absolute path to current project :params imported: the imports dictionary created with 'gather_imports' """ if imported: unique_hdrdirs = [] unique_libdirs = [] unique_defines = {} unique_modules = {} # write a temporary .fips-imports.cmake.tmp file, # this will replace the old file, but only if the # content is different, this will prevent an unnecessary # cmake run if the imports haven't changed import_filename = proj_dir + '/.fips-imports.cmake' import_tmp_filename = import_filename + '.tmp' with open(import_tmp_filename, 'w') as f: f.write( "#\n# generated by 'fips gen', don't edit, don't add to version control!\n#\n" ) for imp_proj_name in imported: imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name) # add include and lib search paths if imp_proj_dir != proj_dir: f.write('if (EXISTS "{}/fips-include.cmake")\n'.format( imp_proj_dir)) f.write(' include("{}/fips-include.cmake")\n'.format( imp_proj_dir)) f.write('endif()\n') f.write('if (EXISTS "{}/lib/${{FIPS_PLATFORM_NAME}}")\n'. format(imp_proj_dir)) f.write( ' link_directories("{}/lib/${{FIPS_PLATFORM_NAME}}")\n' .format(imp_proj_dir)) f.write('endif()\n') # add header search paths for hdrdir in imported[imp_proj_name]['hdrdirs']: if hdrdir not in unique_hdrdirs: f.write('include_directories("{}")\n'.format(hdrdir)) unique_hdrdirs.append(hdrdir) # add lib search paths for libdir in imported[imp_proj_name]['libdirs']: if libdir not in unique_libdirs: f.write('link_directories("{}")\n'.format(libdir)) unique_libdirs.append(libdir) # add defines for define in imported[imp_proj_name]['defines']: value = imported[imp_proj_name]['defines'][define] if define not in unique_defines: unique_defines[define] = value if type(value) is str: f.write('add_definitions(-D{}="{}")\n'.format( define, value)) else: f.write('add_definitions(-D{}={})\n'.format( define, value)) # add modules if len(imported[imp_proj_name]['modules']) > 0: f.write('fips_ide_group("Imports")\n') for module in imported[imp_proj_name]['modules']: module_path = imported[imp_proj_name]['modules'][ module] if module not in unique_modules: unique_modules[module] = module_path f.write('add_subdirectory("{}" "{}")\n'.format( module, module_path)) f.write('fips_ide_group("")\n') # check content of old and new file, only replace if changed imports_dirty = True if os.path.isfile(import_filename): if filecmp.cmp(import_filename, import_tmp_filename, shallow=False): imports_dirty = False if imports_dirty: if os.path.isfile(import_filename): os.remove(import_filename) os.rename(import_tmp_filename, import_filename) else: os.remove(import_tmp_filename) # write the .fips-imports.py file (copy from template) gen_search_paths = '"{}","{}/generators",\n'.format(fips_dir, fips_dir) if os.path.isdir("{}/fips-generators".format(proj_dir)): gen_search_paths += '"{}","{}/fips-generators",\n'.format( proj_dir, proj_dir) for imp_proj_name in imported: gen_dir = util.get_project_dir(fips_dir, imp_proj_name) + '/fips-generators' if os.path.isdir(gen_dir): gen_search_paths += '"' + gen_dir + '",\n' template.copy_template_file(fips_dir, proj_dir, '.fips-gen.py', {'genpaths': gen_search_paths}, True)
def write_imports(fips_dir, proj_dir, cfg_name, imported) : """write the big imports map created with 'gather_imports' to a .fips-imports.cmake file in the current project :params fips_dir: absolute path to fips :params proj_dir: absolute path to current project :params imported: the imports dictionary created with 'gather_imports' """ if imported : unique_hdrdirs = [] unique_libdirs = [] unique_defines = {} unique_modules = {} # write a temporary .fips-imports.cmake.tmp file, # this will replace the old file, but only if the # content is different, this will prevent an unnecessary # cmake run if the imports haven't changed import_filename = proj_dir + '/.fips-imports.cmake' import_tmp_filename = import_filename + '.tmp' with open(import_tmp_filename, 'w') as f : f.write("#\n# generated by 'fips gen', don't edit, don't add to version control!\n#\n") for imp_proj_name in imported : imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name) if imported[imp_proj_name]['cond']: f.write('if ({})\n'.format(imported[imp_proj_name]['cond'])) # add include and lib search paths if imp_proj_dir != proj_dir : f.write('if (EXISTS "{}/fips-include.cmake")\n'.format(imp_proj_dir)) f.write(' include("{}/fips-include.cmake")\n'.format(imp_proj_dir)) f.write('elseif (EXISTS "{}/fips-files/include.cmake")\n'.format(imp_proj_dir)) f.write(' include ("{}/fips-files/include.cmake")\n'.format(imp_proj_dir)) f.write('endif()\n') f.write('if (EXISTS "{}/lib/${{FIPS_PLATFORM_NAME}}")\n'.format(imp_proj_dir)) f.write(' link_directories("{}/lib/${{FIPS_PLATFORM_NAME}}")\n'.format(imp_proj_dir)) f.write('endif()\n') # add header search paths for hdrdir in imported[imp_proj_name]['hdrdirs'] : if hdrdir not in unique_hdrdirs : f.write('include_directories("{}")\n'.format(hdrdir)) unique_hdrdirs.append(hdrdir) # add lib search paths for libdir in imported[imp_proj_name]['libdirs'] : if libdir not in unique_libdirs : f.write('link_directories("{}")\n'.format(libdir)) unique_libdirs.append(libdir) # add defines for define in imported[imp_proj_name]['defines'] : value = imported[imp_proj_name]['defines'][define] if define not in unique_defines : unique_defines[define] = value if type(value) is str : f.write('add_definitions(-D{}="{}")\n'.format(define, value)) else : f.write('add_definitions(-D{}={})\n'.format(define, value)) # add import modules if len(imported[imp_proj_name]['modules']) > 0 : import_functions = [] # first add all module import functions for module in imported[imp_proj_name]['modules'] : module_path = imported[imp_proj_name]['modules'][module] if module not in unique_modules : unique_modules[module] = module_path import_func = 'fips_import_{}'.format(module_path).replace('-','_') import_functions.append(import_func) f.write('macro({})\n'.format(import_func)) f.write(' set(FIPS_IMPORT 1)\n') f.write(' add_subdirectory("{}" "{}")\n'.format(module, module_path)) f.write(' set(FIPS_IMPORT)\n') f.write('endmacro()\n') # if auto-import is enabled, also actually import all modules f.write('if (FIPS_AUTO_IMPORT)\n') group = "Imports" if imported[imp_proj_name]['group'] : group += "/" + imported[imp_proj_name]['group'] if len(imported[imp_proj_name]['modules']) > 3 : group += "/" + imp_proj_name f.write(' fips_ide_group("{}")\n'.format(group)) for import_func in import_functions : f.write(' {}()\n'.format(import_func)) f.write(' fips_ide_group("")\n') f.write('endif()\n') if imported[imp_proj_name]['cond']: f.write('endif()\n') # check content of old and new file, only replace if changed imports_dirty = True if os.path.isfile(import_filename) : if filecmp.cmp(import_filename, import_tmp_filename, shallow=False) : imports_dirty = False if imports_dirty : if os.path.isfile(import_filename) : os.remove(import_filename) os.rename(import_tmp_filename, import_filename) else : os.remove(import_tmp_filename) # write the .fips-imports.py file (copy from template) gen_search_paths = '"{}","{}/generators",\n'.format(fips_dir, fips_dir) proj_gen_dir = util.get_generators_dir(proj_dir) if proj_gen_dir: gen_search_paths += '"{}","{}",\n'.format(proj_dir, proj_gen_dir) for imp_proj_name in imported : gen_dir = util.get_generators_dir(util.get_project_dir(fips_dir, imp_proj_name)) if gen_dir: gen_search_paths += '"' + gen_dir + '",\n' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg_name); if not os.path.isdir(build_dir): os.makedirs(build_dir) template.copy_template_file(fips_dir, build_dir, 'fips-gen.py', { 'genpaths': gen_search_paths}, True)
def run(fips_dir, proj_dir, args) : if len(args) > 0 : proj_name = args[0] proj_dir = util.get_project_dir(fips_dir, proj_name) dep.update_imports(fips_dir, proj_dir)
def write_imports(fips_dir, proj_dir, imported) : """write the big imports directory created with 'gather_imports' to a .fips-imports.cmake file in the current project :params fips_dir: absolute path to fips :params proj_dir: absolute path to current project :params imported: the imports dictionary created with 'gather_imports' """ if imported : unique_hdrdirs = [] unique_libdirs = [] unique_defines = {} unique_modules = {} # write a temporary .fips-imports.cmake.tmp file, # this will replace the old file, but only if the # content is different, this will prevent an unnecessary # cmake run if the imports haven't changed import_filename = proj_dir + '/.fips-imports.cmake' import_tmp_filename = import_filename + '.tmp' with open(import_tmp_filename, 'w') as f : f.write("#\n# generated by 'fips gen', don't edit, don't add to version control!\n#\n") for imp_proj_name in imported : imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name) # add include and lib search paths if imp_proj_dir != proj_dir : f.write('if (EXISTS "{}/fips-include.cmake")\n'.format(imp_proj_dir)) f.write(' include("{}/fips-include.cmake")\n'.format(imp_proj_dir)) f.write('endif()\n') f.write('if (EXISTS "{}/lib/${{FIPS_PLATFORM_NAME}}")\n'.format(imp_proj_dir)) f.write(' link_directories("{}/lib/${{FIPS_PLATFORM_NAME}}")\n'.format(imp_proj_dir)) f.write('endif()\n') # add header search paths for hdrdir in imported[imp_proj_name]['hdrdirs'] : if hdrdir not in unique_hdrdirs : f.write('include_directories("{}")\n'.format(hdrdir)) unique_hdrdirs.append(hdrdir) # add lib search paths for libdir in imported[imp_proj_name]['libdirs'] : if libdir not in unique_libdirs : f.write('link_directories("{}")\n'.format(libdir)) unique_libdirs.append(libdir) # add defines for define in imported[imp_proj_name]['defines'] : value = imported[imp_proj_name]['defines'][define] if define not in unique_defines : unique_defines[define] = value if type(value) is str : f.write('add_definitions(-D{}="{}")\n'.format(define, value)) else : f.write('add_definitions(-D{}={})\n'.format(define, value)) # add modules if len(imported[imp_proj_name]['modules']) > 0 : f.write('fips_ide_group("Imports")\n') for module in imported[imp_proj_name]['modules'] : module_path = imported[imp_proj_name]['modules'][module] if module not in unique_modules : unique_modules[module] = module_path f.write('add_subdirectory("{}" "{}")\n'.format(module, module_path)) f.write('fips_ide_group("")\n') # check content of old and new file, only replace if changed imports_dirty = True if os.path.isfile(import_filename) : if filecmp.cmp(import_filename, import_tmp_filename, shallow=False) : imports_dirty = False if imports_dirty : if os.path.isfile(import_filename) : os.remove(import_filename) os.rename(import_tmp_filename, import_filename) else : os.remove(import_tmp_filename) # write the .fips-imports.py file (copy from template) gen_search_paths = '"{}","{}/generators",\n'.format(fips_dir, fips_dir) if os.path.isdir("{}/fips-generators".format(proj_dir)) : gen_search_paths += '"{}","{}/fips-generators",\n'.format(proj_dir, proj_dir) for imp_proj_name in imported : gen_dir = util.get_project_dir(fips_dir, imp_proj_name) + '/fips-generators' if os.path.isdir(gen_dir) : gen_search_paths += '"' + gen_dir + '",\n' template.copy_template_file(fips_dir, proj_dir, '.fips-gen.py', { 'genpaths': gen_search_paths}, True)
def write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg): '''write the .vscode/c_cpp_properties.json files for main project and all dependent projects ''' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) defines = read_cmake_defines(fips_dir, proj_dir, cfg) compile_commands_path = build_dir + '/compile_commands.json' has_compile_command_json = os.path.isfile(compile_commands_path) inc_paths = None if not has_compile_command_json: inc_paths = read_cmake_headerdirs(fips_dir, proj_dir, cfg) props = { 'configurations': [], 'version': 3 } for config_name in ['Mac','Linux','Win32']: c = { 'name': config_name, 'browse': { 'limitSymbolsToIncludedHeaders': True, 'databaseFilename': '{}/browse.VS.code'.format(build_dir) } } config_incl_paths = None compiler_path = None intellisense_mode = 'clang-x64' if config_name == 'Mac': if not has_compile_command_json: config_incl_paths = get_cc_header_paths() config_defines = ['_DEBUG','__GNUC__','__APPLE__','__clang__'] compiler_path = '/usr/bin/c++' elif config_name == 'Linux': if not has_compile_command_json: config_incl_paths = get_cc_header_paths() config_defines = ['_DEBUG','__GNUC__'] compiler_path = '/usr/bin/c++' else: if not has_compile_command_json: config_incl_paths = get_vs_header_paths(fips_dir, proj_dir, cfg) intellisense_mode = 'msvc-x64' config_defines = ['_DEBUG','_WIN32'] if inc_paths: config_incl_paths.extend(inc_paths) config_defines.extend(defines) if compiler_path: c['compilerPath'] = compiler_path if has_compile_command_json: c['compileCommands'] = compile_commands_path if config_incl_paths: c['includePath'] = config_incl_paths c['defines'] = config_defines if config_incl_paths: c['browse']['path'] = config_incl_paths c['intelliSenseMode'] = intellisense_mode props['configurations'].append(c) # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) vscode_dir = dep_proj_dir + '/.vscode' if not os.path.isdir(vscode_dir): os.makedirs(vscode_dir) prop_path = vscode_dir + '/c_cpp_properties.json' log.info(' writing {}'.format(prop_path)) with open(prop_path, 'w') as f: json.dump(props, f, indent=1, separators=(',',':'))
def write_imports(fips_dir, proj_dir, cfg_name, imported) : """write the big imports map created with 'gather_imports' to a .fips-imports.cmake file in the current project :params fips_dir: absolute path to fips :params proj_dir: absolute path to current project :params imported: the imports dictionary created with 'gather_imports' """ if imported is not None: unique_hdrdirs = [] unique_condhdrdirs = {} unique_libdirs = [] unique_defines = {} unique_modules = {} # write a temporary .fips-imports.cmake.tmp file, # this will replace the old file, but only if the # content is different, this will prevent an unnecessary # cmake run if the imports haven't changed import_filename = proj_dir + '/.fips-imports.cmake' import_tmp_filename = import_filename + '.tmp' with open(import_tmp_filename, 'w') as f : f.write("#\n# generated by 'fips gen', don't edit, don't add to version control!\n#\n") for imp_proj_name in imported : imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name) if imported[imp_proj_name]['cond']: f.write('if ({})\n'.format(imported[imp_proj_name]['cond'])) # add include and lib search paths if imp_proj_dir != proj_dir : f.write('if (EXISTS "{}/fips-include.cmake")\n'.format(imp_proj_dir)) f.write(' include("{}/fips-include.cmake")\n'.format(imp_proj_dir)) f.write('elseif (EXISTS "{}/fips-files/include.cmake")\n'.format(imp_proj_dir)) f.write(' include ("{}/fips-files/include.cmake")\n'.format(imp_proj_dir)) f.write('endif()\n') f.write('if (EXISTS "{}/lib/${{FIPS_PLATFORM_NAME}}")\n'.format(imp_proj_dir)) f.write(' link_directories("{}/lib/${{FIPS_PLATFORM_NAME}}")\n'.format(imp_proj_dir)) f.write('endif()\n') # add header search paths for hdrdir in imported[imp_proj_name]['hdrdirs'] : if hdrdir not in unique_hdrdirs : f.write('include_directories("{}")\n'.format(hdrdir)) unique_hdrdirs.append(hdrdir) # add conditional header search paths for hdrdir in imported[imp_proj_name]['condhdrdirs'] : value = imported[imp_proj_name]['condhdrdirs'][hdrdir] if hdrdir not in unique_condhdrdirs : unique_condhdrdirs[hdrdir] = value f.write('if ({})\n'.format(value)) f.write(' include_directories("{}")\n'.format(hdrdir)) f.write('endif()\n') # add lib search paths for libdir in imported[imp_proj_name]['libdirs'] : if libdir not in unique_libdirs : f.write('link_directories("{}")\n'.format(libdir)) unique_libdirs.append(libdir) # add defines for define in imported[imp_proj_name]['defines'] : value = imported[imp_proj_name]['defines'][define] if define not in unique_defines : unique_defines[define] = value if type(value) is str : f.write('add_definitions(-D{}="{}")\n'.format(define, value)) else : f.write('add_definitions(-D{}={})\n'.format(define, value)) # add import modules if len(imported[imp_proj_name]['modules']) > 0 : import_functions = [] # first add all module import functions for module in imported[imp_proj_name]['modules'] : module_path = imported[imp_proj_name]['modules'][module] if module not in unique_modules : unique_modules[module] = module_path import_func = 'fips_import_{}'.format(module_path).replace('-','_') import_functions.append(import_func) f.write('macro({})\n'.format(import_func)) f.write(' set(FIPS_IMPORT 1)\n') f.write(' add_subdirectory("{}" "{}")\n'.format(module, module_path)) f.write(' set(FIPS_IMPORT)\n') f.write('endmacro()\n') # if auto-import is enabled, also actually import all modules f.write('if (FIPS_AUTO_IMPORT)\n') group = "Imports" if imported[imp_proj_name]['group'] : group += "/" + imported[imp_proj_name]['group'] if len(imported[imp_proj_name]['modules']) > 3 : group += "/" + imp_proj_name f.write(' fips_ide_group("{}")\n'.format(group)) for import_func in import_functions : f.write(' {}()\n'.format(import_func)) f.write(' fips_ide_group("")\n') f.write('endif()\n') if imported[imp_proj_name]['cond']: f.write('endif()\n') # check content of old and new file, only replace if changed imports_dirty = True if os.path.isfile(import_filename) : if filecmp.cmp(import_filename, import_tmp_filename, shallow=False) : imports_dirty = False if imports_dirty : if os.path.isfile(import_filename) : os.remove(import_filename) os.rename(import_tmp_filename, import_filename) else : os.remove(import_tmp_filename) # write the .fips-imports.py file (copy from template) gen_search_paths = '"{}","{}/generators",\n'.format(fips_dir, fips_dir) proj_gen_dir = util.get_generators_dir(proj_dir) if proj_gen_dir: gen_search_paths += '"{}","{}",\n'.format(proj_dir, proj_gen_dir) for imp_proj_name in imported : gen_dir = util.get_generators_dir(util.get_project_dir(fips_dir, imp_proj_name)) if gen_dir: gen_search_paths += '"' + gen_dir + '",\n' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg_name); if not os.path.isdir(build_dir): os.makedirs(build_dir) template.copy_template_file(fips_dir, build_dir, 'fips-gen.py', { 'genpaths': gen_search_paths}, True)