def _rec_update_imports(fips_dir, proj_dir, handled) : """same as _rec_fetch_imports() but for updating the imported projects """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in handled : handled.append(proj_name) imports = get_imports(fips_dir, proj_dir) for dep in imports: dep_proj_name = dep if dep not in handled: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name)) dep_ok = False if os.path.isdir(dep_proj_dir) : # directory did not exist, do a fresh git clone dep = imports[dep_proj_name] git_commit = None if 'rev' not in dep else dep['rev'] if git.has_local_changes(dep_proj_dir) : log.warn(" '{}' has local changes, skipping...".format(dep_proj_dir)) else : log.colored(log.BLUE, " updating '{}'...".format(dep_proj_dir)) git.update(dep_proj_dir) if git_commit: log.colored(log.YELLOW, "=== revision: '{}':".format(git_commit)) dep_ok = git.checkout(dep_proj_dir, git_commit) else: dep_ok = True else : log.warn(" '{}' does not exist, please run 'fips fetch'".format(dep_proj_dir)) # recuse if dep_ok : handled = _rec_update_imports(fips_dir, dep_proj_dir, handled) # done, return the new handled array return handled
def gdb(fips_dir, proj_dir, cfg_name, target=None, target_args=None) : """debug a single target with gdb""" # prepare proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) # load the config(s) configs = config.load(fips_dir, proj_dir, cfg_name) if configs : for cfg in configs : # check if config is valid config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors = True) if config_valid : deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name']) log.colored(log.YELLOW, "=== gdb: {}".format(cfg['name'])) cmdLine = ['gdb', "-ex", "run", "--args", target] if target_args : cmdLine.extend(target_args) try: subprocess.call(args = cmdLine, cwd = deploy_dir) except OSError : log.error("Failed to execute gdb (not installed?)") else : log.error("Config '{}' not valid in this environment".format(cfg['name'])) else : log.error("No valid configs found for '{}'".format(cfg_name)) return True
def gdb(fips_dir, proj_dir, cfg_name, target=None, target_args=None): """debug a single target with gdb""" # prepare proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) # load the config(s) configs = config.load(fips_dir, proj_dir, cfg_name) if configs: for cfg in configs: # check if config is valid config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors=True) if config_valid: deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name']) log.colored(log.YELLOW, "=== gdb: {}".format(cfg['name'])) cmdLine = ['gdb', "-ex", "run", "--args", target] if target_args: cmdLine.extend(target_args) try: subprocess.call(args=cmdLine, cwd=deploy_dir) except OSError: log.error("Failed to execute gdb (not installed?)") else: log.error("Config '{}' not valid in this environment".format( cfg['name'])) else: log.error("No valid configs found for '{}'".format(cfg_name)) return True
def run(proj_dir): exe = exe_name() proj_name = util.get_project_name_from_dir(proj_dir) try: subprocess.call('{} .vscode/{}.code-workspace'.format(exe, proj_name), cwd=proj_dir, shell=True) except OSError: log.error("Failed to run Visual Studio Code as '{}'".format(exe))
def write_clion_module_files(fips_dir, proj_dir, cfg): '''write misc.xml, modules.xml, *.iml''' proj_name = util.get_project_name_from_dir(proj_dir) iml_path = '{}/.idea/{}.iml'.format(proj_dir, proj_name) if os.path.exists(iml_path): return with open(iml_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<module classpath="CMake" type="CPP_MODULE" version="4" />') ws_path = '{}/.idea/misc.xml'.format(proj_dir) with open(ws_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<project version="4">\n') f.write( ' <component name="CMakeWorkspace" IGNORE_OUTSIDE_FILES="true" PROJECT_DIR="$PROJECT_DIR$" />\n' ) f.write('</project>') ws_path = '{}/.idea/modules.xml'.format(proj_dir) with open(ws_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<project version="4">\n') f.write(' <component name="ProjectModuleManager">\n') f.write(' <modules>\n') f.write( ' <module fileurl="file://$PROJECT_DIR$/.idea/{}.iml" filepath="$PROJECT_DIR$/.idea/{}.iml" />\n' .format(proj_name, proj_name)) f.write(' </modules>\n') f.write(' </component>\n') f.write('</project>')
def write_clion_module_files(fips_dir, proj_dir, cfg): '''write misc.xml, modules.xml, *.iml''' proj_name = util.get_project_name_from_dir(proj_dir) iml_path = '{}/.idea/{}.iml'.format(proj_dir, proj_name) if os.path.exists(iml_path): return with open(iml_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<module classpath="CMake" type="CPP_MODULE" version="4" />') ws_path = '{}/.idea/misc.xml'.format(proj_dir) with open(ws_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<project version="4">\n') f.write(' <component name="CMakeWorkspace" IGNORE_OUTSIDE_FILES="true" PROJECT_DIR="$PROJECT_DIR$" />\n') f.write('</project>') ws_path = '{}/.idea/modules.xml'.format(proj_dir) with open(ws_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<project version="4">\n') f.write(' <component name="ProjectModuleManager">\n') f.write(' <modules>\n') f.write(' <module fileurl="file://$PROJECT_DIR$/.idea/{}.iml" filepath="$PROJECT_DIR$/.idea/{}.iml" />\n'.format(proj_name, proj_name)) f.write(' </modules>\n') f.write(' </component>\n') f.write('</project>')
def get_vs_header_paths(fips_dir, proj_dir, cfg): '''hacky way to find the header search path in the latest installed Windows 10 Kit and Visual Studio instance ''' if util.get_host_platform() != 'win': return [] # Windows system headers are in 2 locations, first find the latest Windows Kit result = [] kits = glob.glob('C:/Program Files (x86)/Windows Kits/10/Include/*/') if kits: latest = max(kits).replace('\\', '/') subdirs = glob.glob(latest + '/*/') for d in subdirs: result.append(d.replace('\\', '/')) # next get the used active Visual Studio instance from the cmake cache proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) outp = subprocess.check_output(['cmake', '-LA', '.'], cwd=build_dir).decode("utf-8") for line in outp.splitlines(): if line.startswith('CMAKE_LINKER:FILEPATH='): bin_index = line.find('/bin/') if bin_index > 0: result.append(line[22:bin_index + 1] + 'include') return result
def get_vs_header_paths(fips_dir, proj_dir, cfg): '''hacky way to find the header search path in the latest installed Windows 10 Kit and Visual Studio instance ''' if util.get_host_platform() != 'win': return [] # Windows system headers are in 2 locations, first find the latest Windows Kit result = [] kits = glob.glob('C:/Program Files (x86)/Windows Kits/10/Include/*/') if kits: latest = max(kits).replace('\\','/') subdirs = glob.glob(latest + '/*/') for d in subdirs: result.append(d.replace('\\','/')) # next get the used active Visual Studio instance from the cmake cache proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) outp = subprocess.check_output(['cmake', '-LA', '.'], cwd=build_dir).decode("utf-8") for line in outp.splitlines(): if line.startswith('CMAKE_LINKER:FILEPATH='): bin_index = line.find('/bin/') if bin_index > 0: result.append(line[22:bin_index+1]+'include') return result
def gen_project(fips_dir, proj_dir, cfg, force) : """private: generate build files for one config""" proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg) defines = {} defines['FIPS_USE_CCACHE'] = 'ON' if settings.get(proj_dir, 'ccache') else 'OFF' defines['FIPS_AUTO_IMPORT'] = 'OFF' if dep.get_policy(proj_dir, 'no_auto_import') else 'ON' if cfg['platform'] == 'ios': ios_team_id = settings.get(proj_dir, 'iosteam') if ios_team_id: defines['FIPS_IOS_TEAMID'] = ios_team_id do_it = force if not os.path.isdir(build_dir) : os.makedirs(build_dir) do_it = True if do_it : # if Ninja build tool and on Windows, need to copy # the precompiled ninja.exe to the build dir log.colored(log.YELLOW, "=== generating: {}".format(cfg['name'])) log.info("config file: {}".format(cfg['path'])) toolchain_path = config.get_toolchain(fips_dir, proj_dir, cfg) if toolchain_path : log.info("Using Toolchain File: {}".format(toolchain_path)) if cfg['build_tool'] == 'ninja' : ninja.prepare_ninja_tool(fips_dir, build_dir) cmake_result = cmake.run_gen(cfg, fips_dir, proj_dir, build_dir, toolchain_path, defines) if cfg['build_tool'] == 'vscode_cmake': vscode.write_workspace_settings(fips_dir, proj_dir, cfg) return cmake_result else : return True
def gen_project(fips_dir, proj_dir, cfg, force): """private: generate build files for one config""" proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg) defines = {} defines['FIPS_USE_CCACHE'] = 'ON' if settings.get(proj_dir, 'ccache') else 'OFF' do_it = force if not os.path.isdir(build_dir): os.makedirs(build_dir) do_it = True if do_it: # if Ninja build tool and on Windows, need to copy # the precompiled ninja.exe to the build dir if cfg['build_tool'] == 'ninja': ninja.prepare_ninja_tool(fips_dir, build_dir) log.colored(log.YELLOW, "=== generating: {}".format(cfg['name'])) toolchain_path = config.get_toolchain(fips_dir, proj_dir, cfg) if toolchain_path: log.info("Using Toolchain File: {}".format(toolchain_path)) return cmake.run_gen(cfg, fips_dir, proj_dir, build_dir, toolchain_path, defines) else: return True
def configure(fips_dir, proj_dir, cfg_name) : """run ccmake or cmake-gui on the provided project and config :param fips_dir: absolute fips path :param proj_dir: absolute project dir :cfg_name: build config name """ dep.fetch_imports(fips_dir, proj_dir) proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) dep.gather_and_write_imports(fips_dir, proj_dir) # load configs, if more then one, only use first one configs = config.load(fips_dir, proj_dir, cfg_name) if configs : cfg = configs[0] log.colored(log.YELLOW, '=== configuring: {}'.format(cfg['name'])) # generate build files if not gen_project(fips_dir, proj_dir, cfg, True) : log.error("Failed to generate '{}' of project '{}'".format(cfg['name'], proj_name)) # run ccmake or cmake-gui build_dir = util.get_build_dir(fips_dir, proj_name, cfg) if ccmake.check_exists(fips_dir) : ccmake.run(build_dir) elif cmake_gui.check_exists(fips_dir) : cmake_gui.run(build_dir) else : log.error("Neither 'ccmake' nor 'cmake-gui' found (run 'fips diag')") else : log.error("No configs found for '{}'".format(cfg_name))
def configure(fips_dir, proj_dir, cfg_name): """run ccmake or cmake-gui on the provided project and config :param fips_dir: absolute fips path :param proj_dir: absolute project dir :cfg_name: build config name """ dep.fetch_imports(fips_dir, proj_dir) proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) dep.gather_and_write_imports(fips_dir, proj_dir, cfg_name) # load configs, if more then one, only use first one configs = config.load(fips_dir, proj_dir, cfg_name) if configs: cfg = configs[0] log.colored(log.YELLOW, '=== configuring: {}'.format(cfg['name'])) # generate build files if not gen_project(fips_dir, proj_dir, cfg, True): log.error("Failed to generate '{}' of project '{}'".format( cfg['name'], proj_name)) # run ccmake or cmake-gui build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) if ccmake.check_exists(fips_dir): ccmake.run(build_dir) elif cmake_gui.check_exists(fips_dir): cmake_gui.run(build_dir) else: log.error( "Neither 'ccmake' nor 'cmake-gui' found (run 'fips diag')") else: log.error("No configs found for '{}'".format(cfg_name))
def clean(fips_dir, proj_dir, cfg_name): """clean build files :param fips_dir: absolute path of fips :param proj_dir: absolute project path :param cfg_name: config name (or pattern) """ proj_name = util.get_project_name_from_dir(proj_dir) configs = config.load(fips_dir, proj_dir, cfg_name) if configs: num_cleaned_configs = 0 for cfg in configs: build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) build_dir_exists = os.path.isdir(build_dir) deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name']) deploy_dir_exists = os.path.isdir(deploy_dir) if build_dir_exists or deploy_dir_exists: log.colored(log.YELLOW, "=== clean: {}".format(cfg['name'])) num_cleaned_configs += 1 if build_dir_exists: shutil.rmtree(build_dir) log.info(" deleted '{}'".format(build_dir)) if deploy_dir_exists: shutil.rmtree(deploy_dir) log.info(" deleted '{}'".format(deploy_dir)) if num_cleaned_configs == 0: log.colored(log.YELLOW, "=== clean: nothing to clean for {}".format(cfg_name)) else: log.error("No valid configs found for '{}'".format(cfg_name))
def build(fips_dir, proj_dir, cfg_name, target=None, build_tool_args=None): """perform a build of config(s) in project :param fips_dir: absolute path of fips :param proj_dir: absolute path of project dir :param cfg_name: config name or pattern :param target: optional target name (build all if None) :param build_tool_args: optional string array of cmdline args forwarded to the build tool :returns: True if build was successful """ # prepare dep.fetch_imports(fips_dir, proj_dir) proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) dep.gather_and_write_imports(fips_dir, proj_dir, cfg_name) # load the config(s) configs = config.load(fips_dir, proj_dir, cfg_name) num_valid_configs = 0 if configs: for cfg in configs: # check if config is valid config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors=True) if config_valid: log.colored(log.YELLOW, "=== building: {}".format(cfg['name'])) if not gen_project(fips_dir, proj_dir, cfg, False): log.error("Failed to generate '{}' of project '{}'".format( cfg['name'], proj_name)) # select and run build tool build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) num_jobs = settings.get(proj_dir, 'jobs') result = cmake.run_build(fips_dir, target, cfg['build_type'], build_dir, num_jobs, build_tool_args) if result: num_valid_configs += 1 else: log.error( "Failed to build config '{}' of project '{}'".format( cfg['name'], proj_name)) else: log.error("Config '{}' not valid in this environment".format( cfg['name'])) else: log.error("No valid configs found for '{}'".format(cfg_name)) if num_valid_configs != len(configs): log.error('{} out of {} configs failed!'.format( len(configs) - num_valid_configs, len(configs))) return False else: log.colored(log.GREEN, '{} configs built'.format(num_valid_configs)) return True
def run(fips_dir, proj_dir, args): """run the 'physx' verb""" if len(args) > 0: noun = args[0] if noun == 'build': # FIXME all of this only works on windows at the moment and is super hacky if len(args) != 2: log.error("expected compiler target (win-vs15, win-vs16)") preset = util.fix_path(os.path.dirname(os.path.abspath( __file__))) + "/physx-presets/" + "fips" + args[1] + ".xml" if not os.path.isfile(preset): log.error("unrecognized compiler target") shutil.copy2(preset, proj_dir + "/../physx/physx/buildtools/presets/") subprocess.call(proj_dir + "/../physx/physx/generate_projects.bat fips" + args[1]) # figure out a version number for vswhere version = args[1][6:] version_next = str(int(version) + 1) version = version + ".0," + version_next + ".0" #use vswhere to figure out where vs is devenvPath = subprocess.check_output( proj_dir + "/../physx/externals/vswhere/vswhere -version [" + version + "] -property productPath").decode("utf-8").rstrip() devenvPath = util.fix_path(devenvPath) if not os.path.isfile(devenvPath): log.error("could not detect visual studio installation") log.info("Using Visual Studio from" + devenvPath) log.info("Compiling PhysX, this might take a while") log.info("Building debug version") subprocess.call(devenvPath + " " + proj_dir + "/../physx/physx/compiler/fips" + args[1] + "/PhysXSDK.sln /Build debug /Project INSTALL") log.info("Building release version") subprocess.call(devenvPath + " " + proj_dir + "/../physx/physx/compiler/fips" + args[1] + "/PhysXSDK.sln /Build release /Project INSTALL") if noun == 'deploy': ps_deploy = util.get_workspace_dir( fips_dir) + "/fips-deploy/physx/bin/" cur_cfg = settings.get(proj_dir, "config") cfg = config.load(fips_dir, proj_dir, cur_cfg)[0] px_target = cfg['defines']['PX_TARGET'] target_dir = util.get_deploy_dir( fips_dir, util.get_project_name_from_dir(proj_dir), cur_cfg) for dll in glob.glob(ps_deploy + px_target + "/debug/*.dll"): shutil.copy2(dll, target_dir) for dll in glob.glob(ps_deploy + px_target + "/release/*.dll"): shutil.copy2(dll, target_dir) else: def run(fips_dir, proj_dir, args): log.error("Not supported")
def run(proj_dir): try: proj_name = util.get_project_name_from_dir(proj_dir) subprocess.call('code .vscode/{}.code-workspace'.format(proj_name), cwd=proj_dir, shell=True) except OSError: log.error("Failed to run Visual Studio Code as 'code'")
def build(fips_dir, proj_dir, cfg_name, target=None) : """perform a build of config(s) in project :param fips_dir: absolute path of fips :param proj_dir: absolute path of project dir :param cfg_name: config name or pattern :param target: optional target name (build all if None) :returns: True if build was successful """ # prepare dep.fetch_imports(fips_dir, proj_dir) proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) dep.gather_and_write_imports(fips_dir, proj_dir) # load the config(s) configs = config.load(fips_dir, proj_dir, cfg_name) num_valid_configs = 0 if configs : for cfg in configs : # check if config is valid config_valid, _ = config.check_config_valid(fips_dir, cfg, print_errors=True) if config_valid : log.colored(log.YELLOW, "=== building: {}".format(cfg['name'])) if not gen_project(fips_dir, proj_dir, cfg, False) : log.error("Failed to generate '{}' of project '{}'".format(cfg['name'], proj_name)) # select and run build tool build_dir = util.get_build_dir(fips_dir, proj_name, cfg) num_jobs = settings.get(proj_dir, 'jobs') result = False if cfg['build_tool'] == make.name : result = make.run_build(fips_dir, target, build_dir, num_jobs) elif cfg['build_tool'] == ninja.name : result = ninja.run_build(fips_dir, target, build_dir, num_jobs) elif cfg['build_tool'] == xcodebuild.name : result = xcodebuild.run_build(fips_dir, target, cfg['build_type'], build_dir, num_jobs) else : result = cmake.run_build(fips_dir, target, cfg['build_type'], build_dir) if result : num_valid_configs += 1 else : log.error("Failed to build config '{}' of project '{}'".format(cfg['name'], proj_name)) else : log.error("Config '{}' not valid in this environment".format(cfg['name'])) else : log.error("No valid configs found for '{}'".format(cfg_name)) if num_valid_configs != len(configs) : log.error('{} out of {} configs failed!'.format(len(configs) - num_valid_configs, len(configs))) return False else : log.colored(log.GREEN, '{} configs built'.format(num_valid_configs)) return True
def _rec_get_all_imports_exports(fips_dir, proj_dir, result): """recursively get all imported projects, their exported and imported modules in a dictionary object: project-1: url: git-url (not valid for first, top-level project) exports: header-dirs: [ ] conditional-header-dirs: dir: cmake-if condition string lib-dirs: [ ] defines: def-key: def-val ... modules : mod: dir mod: dir ... imports: name: git: [git-url] branch: [optional: branch or tag] cond: [optional: cmake-if condition string conditionally including the dependency] name: ... ... ... :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :param result: in/out current result :returns: bool success, and modified result dictionary """ success = True ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in result: imports = get_imports(fips_dir, proj_dir) exports = get_exports(proj_dir) for dep_proj_name in imports: if dep_proj_name not in result: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) dep_url = imports[dep_proj_name]['git'] success, result = _rec_get_all_imports_exports( fips_dir, dep_proj_dir, result) # break recursion on error if not success: return success, result result[proj_name] = {} result[proj_name]['proj_dir'] = proj_dir result[proj_name]['imports'] = imports result[proj_name]['exports'] = exports # done return success, result
def run(fips_dir, proj_dir, args): """run the 'open' verb (opens project in IDE)""" if not util.is_valid_project_dir(proj_dir): log.error('must be run in a project directory') proj_name = util.get_project_name_from_dir(proj_dir) cfg_name = None if len(args) > 0: cfg_name = args[0] if not cfg_name: cfg_name = settings.get(proj_dir, 'config') # check the cmake generator of this config configs = config.load(fips_dir, proj_dir, cfg_name) if configs: # hmm, only look at first match, 'open' doesn't # make sense with config-patterns cfg = configs[0] # find build dir, if it doesn't exist, generate it build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) if not os.path.isdir(build_dir): log.warn("build dir not found, generating...") project.gen(fips_dir, proj_dir, cfg['name']) # first check if this is a VSCode project if cfg['build_tool'] == 'vscode_cmake': vscode.run(proj_dir) return # check if this is a CLion project if cfg['build_tool'] == 'clion': clion.run(proj_dir) return # try to open as Xcode project proj = glob.glob(build_dir + '/*.xcodeproj') if proj: subprocess.call('open "{}"'.format(proj[0]), shell=True) return # try to open as VS project proj = glob.glob(build_dir + '/*.sln') if proj: subprocess.call('cmd /c start {}'.format(proj[0]), shell=True) return # try to open as eclipse project proj = glob.glob(build_dir + '/.cproject') if proj: subprocess.call( 'eclipse -nosplash --launcher.timeout 60 -application org.eclipse.cdt.managedbuilder.core.headlessbuild -import "{}"' .format(build_dir), shell=True) subprocess.call('eclipse', shell=True) return log.error("don't know how to open a '{}' project in {}".format( cfg['generator'], build_dir)) else: log.error("config '{}' not found".format(cfg_name))
def write_cmake_tools_settings(fips_dir, proj_dir, vscode_dir, cfg): '''write a settings.json for CMakeTools plugin settings''' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg) settings = { 'cmake.buildDirectory': build_dir, 'cmake.configureSettings': { 'FIPS_CONFIG:': cfg['name'] } } with open(vscode_dir + '/settings.json', 'w') as f: json.dump(settings, f, indent=1, separators=(',',':'))
def _rec_get_all_imports_exports(fips_dir, proj_dir, result) : """recursively get all imported projects, their exported and imported modules in a dictionary object: project-1: url: git-url (not valid for first, top-level project) exports: header-dirs: [ ] lib-dirs: [ ] defines: def-key: def-val ... modules : mod: dir mod: dir ... imports: name: git: [git-url] branch: [optional: branch or tag] cond: [optional: cmake-if condition string conditionally including the dependency] name: ... ... ... :param fips_dir: absolute fips directory :param proj_dir: absolute project directory :param result: in/out current result :returns: bool success, and modified result dictionary """ success = True ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in result : imports = get_imports(fips_dir, proj_dir) exports = get_exports(proj_dir) for dep_proj_name in imports : if dep_proj_name not in result : dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) dep_url = imports[dep_proj_name]['git'] success, result = _rec_get_all_imports_exports(fips_dir, dep_proj_dir, result) # break recursion on error if not success : return success, result result[proj_name] = {} result[proj_name]['proj_dir'] = proj_dir result[proj_name]['imports'] = imports result[proj_name]['exports'] = exports # done return success, result
def get_imports(fips_dir, proj_dir): """get the imports from the fips.yml file in proj_dir :param proj_dir: the project directory :returns: dictionary object with imports (can be empty) """ proj_name = util.get_project_name_from_dir(proj_dir) imports = {} if util.is_valid_project_dir(proj_dir): dic = util.load_fips_yml(proj_dir) if 'imports' in dic: imports = dic['imports'] # warn if this is an old-style list instead of new style dict if imports: if type(imports) is list: log.warn("imports in '{}/fips.yml' uses obsolete array format". format(proj_dir)) # convert old style to new dict format # FIXME: should be removed after a while new_imports = {} for dep in imports: dep_url = registry.get_url(fips_dir, dep) if not util.is_git_url(dep_url): log.error( "'{}' cannot be resolved into a git url (in project '{}')" .format(dep_url, proj_name)) dep_proj_name = util.get_project_name_from_url(dep_url) new_imports[dep_proj_name] = {} new_imports[dep_proj_name][ 'git'] = util.get_giturl_from_url(dep_url) new_imports[dep_proj_name][ 'branch'] = util.get_gitbranch_from_url(dep_url) imports = new_imports elif type(imports) is dict: for dep in imports: if not 'branch' in imports[dep]: imports[dep]['branch'] = 'master' if not 'cond' in imports[dep]: imports[dep]['cond'] = None if not 'git' in imports[dep]: log.error( "no git URL in import '{}' in '{}/fips.yml'!\n". format(dep, proj_dir)) if not 'group' in imports[dep]: imports[dep]['group'] = None else: log.error( "imports in '{}/fips.yml' must be a dictionary!".format( proj_dir)) return imports
def run(fips_dir, proj_dir, args) : """run the 'open' verb (opens project in IDE)""" if not util.is_valid_project_dir(proj_dir) : log.error('must be run in a project directory') proj_name = util.get_project_name_from_dir(proj_dir) cfg_name = None if len(args) > 0 : cfg_name = args[0] if not cfg_name : cfg_name = settings.get(proj_dir, 'config') # check the cmake generator of this config configs = config.load(fips_dir, proj_dir, cfg_name) if configs : # hmm, only look at first match, 'open' doesn't # make sense with config-patterns cfg = configs[0] # find build dir, if it doesn't exist, generate it build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) if not os.path.isdir(build_dir) : log.warn("build dir not found, generating...") project.gen(fips_dir, proj_dir, cfg['name']) # first check if this is a VSCode project if cfg['build_tool'] == 'vscode_cmake': vscode.run(proj_dir) return # check if this is a CLion project if cfg['build_tool'] == 'clion': clion.run(proj_dir) return # try to open as Xcode project proj = glob.glob(build_dir + '/*.xcodeproj') if proj : subprocess.call('open "{}"'.format(proj[0]), shell=True) return # try to open as VS project proj = glob.glob(build_dir + '/*.sln') if proj : subprocess.call('cmd /c start {}'.format(proj[0]), shell=True) return # try to open as eclipse project proj = glob.glob(build_dir + '/.cproject') if proj : subprocess.call('eclipse -nosplash --launcher.timeout 60 -application org.eclipse.cdt.managedbuilder.core.headlessbuild -import "{}"'.format(build_dir), shell=True) subprocess.call('eclipse', shell=True) return log.error("don't know how to open a '{}' project in {}".format(cfg['generator'], build_dir)) else : log.error("config '{}' not found".format(cfg_name))
def view(fips_dir, proj_dir): proj_name = util.get_project_name_from_dir(proj_dir) out_dir = util.get_workspace_dir(fips_dir)+'/fips-deploy/'+proj_name+'-markdeep' if os.path.isfile(out_dir+'/index.html'): p = util.get_host_platform() if p == 'osx': subprocess.call('open index.html', cwd=out_dir, shell=True) elif p == 'win': subprocess.call('start index.html', cwd=out_dir, shell=True) elif p == 'linux': subprocess.call('xdg-open index.html', cwd=out_dir, shell=True) else: log.error('no generated index.html found: {}'.format(out_dir+'/index.html'))
def write_code_workspace_file(fips_dir, proj_dir, impex, cfg): '''write a multiroot-workspace config file''' vscode_dir = proj_dir + '/.vscode' ws = {'folders': [], 'settings': {}} # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) ws['folders'].append({'path': dep_proj_dir}) proj_name = util.get_project_name_from_dir(proj_dir) ws_path = '{}/{}.code-workspace'.format(vscode_dir, proj_name) log.info(' writing {}'.format(ws_path)) with open(ws_path, 'w') as f: json.dump(ws, f, indent=1, separators=(',', ':'))
def _rec_fetch_imports(fips_dir, proj_dir, handled) : """internal recursive function to fetch project imports, keeps an array of already handled dirs to break cyclic dependencies :param proj_dir: current project directory :param handled: array of already handled dirs :returns: updated array of handled dirs """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in handled : handled.append(proj_name) imports = get_imports(fips_dir, proj_dir) for dep in imports: dep_proj_name = dep if dep not in handled: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name)) dep_ok = False if not os.path.isdir(dep_proj_dir) : # directory did not exist, do a fresh git clone dep = imports[dep_proj_name] git_commit = None if 'rev' not in dep else dep['rev'] if git_commit : if 'depth' in dep : # when using rev, we may not want depth because the revision may not be reachable log.colored(log.YELLOW, "=== 'depth' was ignored because parameter 'rev' is specified.") dep['depth'] = None git_depth = git.clone_depth if not git_commit and 'depth' not in dep else dep['depth'] git_url = dep['git'] git_branch = dep['branch'] if git.clone(git_url, git_branch, git_depth, dep_proj_name, ws_dir) : if git_commit : log.colored(log.YELLOW, "=== revision: '{}':".format(git_commit)) dep_ok = git.checkout(dep_proj_dir, git_commit) else : dep_ok = True else : log.error('failed to git clone {} into {}'.format(git_url, dep_proj_dir)) else : # directory already exists log.info("dir '{}' exists".format(dep_proj_dir)) dep_ok = True # recuse if dep_ok : handled = _rec_fetch_imports(fips_dir, dep_proj_dir, handled) # done, return the new handled array return handled
def make_clean(fips_dir, proj_dir, cfg_name): """perform a 'make clean' on the project :param fips_dir: absolute path of fips :param proj_dir: absolute path of project dir :param cfg_name: config name or pattern """ proj_name = util.get_project_name_from_dir(proj_dir) configs = config.load(fips_dir, proj_dir, cfg_name) num_valid_configs = 0 if configs: for cfg in configs: config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors=True) if config_valid: log.colored(log.YELLOW, "=== cleaning: {}".format(cfg['name'])) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) result = False if cfg['build_tool'] == make.name: result = make.run_clean(fips_dir, build_dir) elif cfg['build_tool'] == ninja.name: result = ninja.run_clean(fips_dir, build_dir) elif cfg['build_tool'] == xcodebuild.name: result = xcodebuild.run_clean(fips_dir, build_dir) else: result = cmake.run_clean(fips_dir, build_dir) if result: num_valid_configs += 1 else: log.error( "Failed to clean config '{}' of project '{}'".format( cfg['name'], proj_name)) else: log.error("Config '{}' not valid in this environment".format( cfg['name'])) else: log.error("No valid configs found for '{}'".format(cfg_name)) if num_valid_configs != len(configs): log.error('{} out of {} configs failed!'.format( len(configs) - num_valid_configs, len(configs))) return False else: log.colored(log.GREEN, '{} configs cleaned'.format(num_valid_configs)) return True
def write_cmake_tools_settings(fips_dir, proj_dir, vscode_dir, cfg): '''write a settings.json for CMakeTools plugin settings''' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) settings = { 'cmake.buildDirectory': build_dir, 'cmake.configureSettings': { 'FIPS_CONFIG:': cfg['name'] } } settings_path = vscode_dir + '/settings.json' log.info(' writing {}'.format(settings_path)) with open(settings_path, 'w') as f: json.dump(settings, f, indent=1, separators=(',',':'))
def unset(proj_dir, key) : """delete a settings value from the project-local settings file :param proj_dir: absolute project directory :param key: settings key """ util.ensure_valid_project_dir(proj_dir) settings = load(proj_dir) if key in settings : del settings[key] save(proj_dir, settings) proj_name = util.get_project_name_from_dir(proj_dir) log.info("'{}' unset in project '{}'".format(key, proj_name))
def write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg): '''write the .vscode/c_cpp_properties.json files for main project and all dependent projects ''' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg) inc_paths = read_cmake_headerdirs(fips_dir, proj_dir, cfg) defines = read_cmake_defines(fips_dir, proj_dir, cfg) props = {'configurations': [], 'version': 3} for config_name in ['Mac', 'Linux', 'Win32']: c = { 'name': config_name, 'browse': { 'limitSymbolsToIncludeHeaders': True, 'databaseFilename': '{}/browse.VS.code'.format(build_dir) } } config_incl_paths = [] intellisense_mode = 'clang-x64' if config_name == 'Mac': config_incl_paths = get_cc_header_paths() config_defines = ['_DEBUG', '__GNUC__', '__APPLE__', '__clang__'] elif config_name == 'Linux': config_incl_paths = get_cc_header_paths() config_defines = ['_DEBUG', '__GNUC__'] else: intellisense_mode = 'msvc-x64' config_incl_paths = get_vs_header_paths(fips_dir, proj_dir, cfg) config_defines = ['_DEBUG', '_WIN32'] config_incl_paths.extend(inc_paths) config_defines.extend(defines) c['includePath'] = config_incl_paths c['defines'] = config_defines c['browse']['path'] = config_incl_paths c['intelliSenseMode'] = intellisense_mode props['configurations'].append(c) # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) vscode_dir = dep_proj_dir + '/.vscode' if not os.path.isdir(vscode_dir): os.makedirs(vscode_dir) prop_path = vscode_dir + '/c_cpp_properties.json' log.info(' writing {}'.format(prop_path)) with open(prop_path, 'w') as f: json.dump(props, f, indent=1, separators=(',', ':'))
def gen_project(fips_dir, proj_dir, cfg, force): """private: generate build files for one config""" proj_name = util.get_project_name_from_dir(proj_dir) deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name']) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) defines = {} defines['FIPS_USE_CCACHE'] = 'ON' if settings.get(proj_dir, 'ccache') else 'OFF' defines['FIPS_AUTO_IMPORT'] = 'OFF' if dep.get_policy( proj_dir, 'no_auto_import') else 'ON' if cfg['generator'] in ['Ninja', 'Unix Makefiles']: defines['CMAKE_EXPORT_COMPILE_COMMANDS'] = 'ON' if cfg['platform'] == 'ios': defines['CMAKE_OSX_SYSROOT'] = xcrun.get_ios_sdk_sysroot() ios_team_id = settings.get(proj_dir, 'iosteam') if ios_team_id: defines['FIPS_IOS_TEAMID'] = ios_team_id if cfg['platform'] == 'osx': defines['CMAKE_OSX_SYSROOT'] = xcrun.get_macos_sdk_sysroot() if cfg['platform'] == 'emscripten': defines['EMSCRIPTEN_ROOT'] = emsdk.get_emscripten_root(fips_dir) if cfg['platform'] == 'wasisdk': defines['WASISDK_ROOT'] = wasisdk.get_wasisdk_root(fips_dir) do_it = force if not os.path.isdir(build_dir): os.makedirs(build_dir) if not os.path.isfile(build_dir + '/CMakeCache.txt'): do_it = True if do_it: # if Ninja build tool and on Windows, need to copy # the precompiled ninja.exe to the build dir log.colored(log.YELLOW, "=== generating: {}".format(cfg['name'])) log.info("config file: {}".format(cfg['path'])) toolchain_path = config.get_toolchain(fips_dir, proj_dir, cfg) if toolchain_path: log.info("Using Toolchain File: {}".format(toolchain_path)) is_local_build = settings.get(proj_dir, 'local') cmake_result = cmake.run_gen(cfg, fips_dir, proj_dir, build_dir, is_local_build, toolchain_path, defines) if vscode.match(cfg['build_tool']): vscode.write_workspace_settings( fips_dir, proj_dir, cfg, settings.get_all_settings(proj_dir)) if clion.match(cfg['build_tool']): clion.write_workspace_settings(fips_dir, proj_dir, cfg) return cmake_result else: return True
def gen(fips_dir, proj_dir, cfg_name) : """generate build files with cmake :param fips_dir: absolute path to fips :param proj_dir: absolute path to project :param cfg_name: config name or pattern (e.g. osx-make-debug) :returns: True if successful """ # prepare dep.fetch_imports(fips_dir, proj_dir) proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) dep.gather_and_write_imports(fips_dir, proj_dir, cfg_name) # load the config(s) configs = config.load(fips_dir, proj_dir, cfg_name) fips_yml_defines = dep.get_fips_yml_defines(proj_dir) num_valid_configs = 0 if configs : for cfg in configs : # Merge fips.yml defines into config defines if fips_yml_defines: if 'defines' in cfg and cfg['defines']: cfg['defines'].update(fips_yml_defines) else: cfg['defines'] = fips_yml_defines # check if config is valid config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors = True) if config_valid : if gen_project(fips_dir, proj_dir, cfg, True) : num_valid_configs += 1 else : log.error("failed to generate build files for config '{}'".format(cfg['name']), False) else : log.error("'{}' is not a valid config".format(cfg['name']), False) else : log.error("No configs found for '{}'".format(cfg_name)) if num_valid_configs != len(configs) : log.error('{} out of {} configs failed!'.format(len(configs) - num_valid_configs, len(configs))) return False else : log.colored(log.GREEN, '{} configs generated'.format(num_valid_configs)) return True
def write_code_workspace_file(fips_dir, proj_dir, impex, cfg): '''write a multiroot-workspace config file''' vscode_dir = proj_dir + '/.vscode' ws = { 'folders': [], 'settings': {} } # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) ws['folders'].append({ 'path': dep_proj_dir }) proj_name = util.get_project_name_from_dir(proj_dir) ws_path = '{}/{}.code-workspace'.format(vscode_dir, proj_name) log.info(' writing {}'.format(ws_path)) with open(ws_path, 'w') as f: json.dump(ws, f, indent=1, separators=(',',':'))
def write_code_workspace_file(fips_dir, proj_dir, vscode_dir, cfg): '''write a multiroot-workspace config file''' ws = { 'folders': [], 'settings': {} } # fetch all project dependencies success, impex = dep.get_all_imports_exports(fips_dir, proj_dir) if not success : log.warn("missing import project directories, please run 'fips fetch'") # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) ws['folders'].append({ 'path': dep_proj_dir }) proj_name = util.get_project_name_from_dir(proj_dir) with open('{}/{}.code-workspace'.format(vscode_dir, proj_name), 'w') as f: json.dump(ws, f, indent=1, separators=(',',':'))
def valgrind(fips_dir, proj_dir, cfg_name, target, target_args): """debug a single target with valgrind""" # prepare proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) # load the config(s) configs = config.load(fips_dir, proj_dir, cfg_name) if configs: for cfg in configs: # check if config is valid config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors=True) if config_valid: deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name']) valgrind_bin = settings.get(proj_dir, 'valgrind') if not valgrind_bin: valgrind_bin = 'valgrind' log.colored( log.YELLOW, "=== valgrind: {} ({})".format(cfg['name'], valgrind_bin)) cmd_line = valgrind_bin if target_args: cmd_line += ' ' + ' '.join(target_args) else: cmd_line += ' ' + '--leak-check=no' cmd_line += ' ' + '--show-reachable=yes' cmd_line += ' ' + '--track-fds=yes' cmd_line += ' ' + '--run-libc-freeres=no' cmd_line += ' ' + "--log-file={}/valgrind-{}.log".format( proj_dir, target) cmd_line += ' ' + "./{}".format(target) #log.colored(log.GREEN, "cmdline: {}".format(cmd_line)) subprocess.call(args=cmd_line, cwd=deploy_dir, shell=True) else: log.error("Config '{}' not valid in this environment".format( cfg['name'])) else: log.error("No valid configs found for '{}'".format(cfg_name)) return True
def get_imports(fips_dir, proj_dir) : """get the imports from the fips.yml file in proj_dir :param proj_dir: the project directory :returns: dictionary object with imports (can be empty) """ proj_name = util.get_project_name_from_dir(proj_dir) imports = {} if util.is_valid_project_dir(proj_dir) : dic = util.load_fips_yml(proj_dir) if 'imports' in dic : imports = dic['imports'] # warn if this is an old-style list instead of new style dict if imports : if type(imports) is list : log.warn("imports in '{}/fips.yml' uses obsolete array format".format(proj_dir)) # convert old style to new dict format # FIXME: should be removed after a while new_imports = {} for dep in imports : dep_url = registry.get_url(fips_dir, dep) if not util.is_git_url(dep_url) : log.error("'{}' cannot be resolved into a git url (in project '{}')".format(dep_url, proj_name)) dep_proj_name = util.get_project_name_from_url(dep_url) new_imports[dep_proj_name] = {} new_imports[dep_proj_name]['git'] = util.get_giturl_from_url(dep_url) new_imports[dep_proj_name]['branch'] = util.get_gitbranch_from_url(dep_url) imports = new_imports elif type(imports) is dict : for dep in imports : if not 'branch' in imports[dep] : imports[dep]['branch'] = 'master' if not 'cond' in imports[dep] : imports[dep]['cond'] = None if not 'git' in imports[dep] : log.error("no git URL in import '{}' in '{}/fips.yml'!\n".format(dep, proj_dir)) if not 'group' in imports[dep] : imports[dep]['group'] = None else : log.error("imports in '{}/fips.yml' must be a dictionary!".format(proj_dir)) return imports
def build_deploy_webpage(fips_dir, proj_dir, chip): ws_dir = util.get_workspace_dir(fips_dir) webpage_dir = '{}/fips-deploy/{}-webpage'.format(ws_dir, chip) if not os.path.isdir(webpage_dir): os.makedirs(webpage_dir) project.gen(fips_dir, proj_dir, BuildConfig) project.build(fips_dir, proj_dir, BuildConfig) proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) src_dir = '{}/fips-deploy/NextSim/{}'.format(ws_dir, BuildConfig) dst_dir = webpage_dir #This area can be cleaned up sometime ns = "NextSim" shutil.copy(src_dir + '/' + ns + '.html', dst_dir + '/index.html') shutil.copy(src_dir + '/' + ns + '.wasm', dst_dir + '/' + ns + '.wasm') shutil.copy(src_dir + '/' + ns + '.js', dst_dir + '/' + ns + '.js') shutil.copy(proj_dir + '/src/res/favicon.png', dst_dir + '/favicon.png') log.colored(log.GREEN, 'Generated Samples web page under {}.'.format(webpage_dir))
def make_clean(fips_dir, proj_dir, cfg_name) : """perform a 'make clean' on the project :param fips_dir: absolute path of fips :param proj_dir: absolute path of project dir :param cfg_name: config name or pattern """ proj_name = util.get_project_name_from_dir(proj_dir) configs = config.load(fips_dir, proj_dir, cfg_name) num_valid_configs = 0 if configs : for cfg in configs : config_valid, _ = config.check_config_valid(fips_dir, cfg, print_errors=True) if config_valid : log.colored(log.YELLOW, "=== cleaning: {}".format(cfg['name'])) build_dir = util.get_build_dir(fips_dir, proj_name, cfg) result = False if cfg['build_tool'] == make.name : result = make.run_clean(fips_dir, build_dir) elif cfg['build_tool'] == ninja.name : result = ninja.run_clean(fips_dir, build_dir) elif cfg['build_tool'] == xcodebuild.name : result = xcodebuild.run_clean(fips_dir, build_dir) else : result = cmake.run_clean(fips_dir, build_dir) if result : num_valid_configs += 1 else : log.error("Failed to clean config '{}' of project '{}'".format(cfg['name'], proj_name)) else : log.error("Config '{}' not valid in this environment".format(cfg['name'])) else : log.error("No valid configs found for '{}'".format(cfg_name)) if num_valid_configs != len(configs) : log.error('{} out of {} configs failed!'.format(len(configs) - num_valid_configs, len(configs))) return False else : log.colored(log.GREEN, '{} configs cleaned'.format(num_valid_configs)) return True
def _rec_fetch_imports(fips_dir, proj_dir, handled): """internal recursive function to fetch project imports, keeps an array of already handled dirs to break cyclic dependencies :param proj_dir: current project directory :param handled: array of already handled dirs :returns: updated array of handled dirs """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in handled: handled.append(proj_name) imports = get_imports(fips_dir, proj_dir) for dep in imports: dep_proj_name = dep if dep not in handled: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name)) dep_ok = False if not os.path.isdir(dep_proj_dir): # directory did not exist, do a fresh git clone git_url = imports[dep_proj_name]['git'] git_branch = imports[dep_proj_name]['branch'] if git.clone(git_url, git_branch, dep_proj_name, ws_dir): dep_ok = True else: log.error('failed to git clone {} into {}'.format( git_url, dep_proj_dir)) else: # directory already exists log.info("dir '{}' exists".format(dep_proj_dir)) dep_ok = True # recuse if dep_ok: handled = _rec_fetch_imports(fips_dir, dep_proj_dir, handled) # done, return the new handled array return handled
def set(proj_dir, key, value) : """update a settings value by key and save project-local .fips-settings file :param proj_dir: absolute project directory :param key: settings key :param value: new value associated with key """ util.ensure_valid_project_dir(proj_dir) settings = load(proj_dir) settings[key] = value save(proj_dir, settings) proj_name = util.get_project_name_from_dir(proj_dir) if type(value) is bool : value_str = 'on' if value else 'off' else : value_str = str(value) log.info("'{}' set to '{}' in project '{}'".format(key, value_str, proj_name))
def write_clion_workspace_file(fips_dir, proj_dir, cfg): '''write bare-bone workspace.xml config file''' proj_name = util.get_project_name_from_dir(proj_dir) gen_options = '-DFIPS_CONFIG={}'.format(cfg['name']) gen_dir = '$PROJECT_DIR$/../fips-build/{}/{}'.format(proj_name, cfg['name']) ws_path = '{}/.idea/workspace.xml'.format(proj_dir) # do not overwrite existing .xml if os.path.exists(ws_path): return with open(ws_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<project version="4">\n') # TODO: CMakeRunConfigurationManager f.write(' <component name="CMakeSettings">\n') f.write(' <configurations>\n') f.write(' <configuration PROFILE_NAME="Debug" CONFIG_NAME="Debug" GENERATION_OPTIONS="{}" GENERATION_DIR="{}" />\n'.format(gen_options, gen_dir)) f.write(' </configurations>\n') f.write(' </component>\n') # TODO: RunManager f.write('</project>')
def set(proj_dir, key, value) : """update a settings value by key and save project-local .fips-settings file :param proj_dir: absolute project directory :param key: settings key :param value: new value associated with key """ util.ensure_valid_project_dir(proj_dir) settings = load(proj_dir) settings[key] = value save(proj_dir, settings) proj_name = util.get_project_name_from_dir(proj_dir) if type(value) is bool : value_str = 'on' if value else 'off'; else : value_str = str(value) log.info("'{}' set to '{}' in project '{}'".format(key, value_str, proj_name))
def gen_project(fips_dir, proj_dir, cfg, force) : """private: generate build files for one config""" proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg) defines = {} defines['FIPS_USE_CCACHE'] = 'ON' if settings.get(proj_dir, 'ccache') else 'OFF' do_it = force if not os.path.isdir(build_dir) : os.makedirs(build_dir) do_it = True if do_it : # if Ninja build tool and on Windows, need to copy # the precompiled ninja.exe to the build dir if cfg['build_tool'] == 'ninja' : ninja.prepare_ninja_tool(fips_dir, build_dir) log.colored(log.YELLOW, "=== generating: {}".format(cfg['name'])) toolchain_path = config.get_toolchain_for_platform(fips_dir, cfg['platform']) return cmake.run_gen(cfg, proj_dir, build_dir, toolchain_path, defines) else : return True
def _rec_fetch_imports(fips_dir, proj_dir, handled) : """internal recursive function to fetch project imports, keeps an array of already handled dirs to break cyclic dependencies :param proj_dir: current project directory :param handled: array of already handled dirs :returns: updated array of handled dirs """ ws_dir = util.get_workspace_dir(fips_dir) proj_name = util.get_project_name_from_dir(proj_dir) if proj_name not in handled : handled.append(proj_name) imports = get_imports(fips_dir, proj_dir) for dep in imports: dep_proj_name = dep if dep not in handled: dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name)) dep_ok = False if not os.path.isdir(dep_proj_dir) : # directory did not exist, do a fresh git clone git_url = imports[dep_proj_name]['git'] git_branch = imports[dep_proj_name]['branch'] if git.clone(git_url, git_branch, dep_proj_name, ws_dir) : dep_ok = True else : log.error('failed to git clone {} into {}'.format(git_url, dep_proj_dir)) else : # directory already exists log.info("dir '{}' exists".format(dep_proj_dir)) dep_ok = True # recuse if dep_ok : handled = _rec_fetch_imports(fips_dir, dep_proj_dir, handled) # done, return the new handled array return handled
def gen(fips_dir, proj_dir, cfg_name) : """generate build files with cmake :param fips_dir: absolute path to fips :param proj_dir: absolute path to project :param cfg_name: config name or pattern (e.g. osx-make-debug) :returns: True if successful """ # prepare dep.fetch_imports(fips_dir, proj_dir) proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) dep.gather_and_write_imports(fips_dir, proj_dir) # load the config(s) configs = config.load(fips_dir, proj_dir, cfg_name) num_valid_configs = 0 if configs : for cfg in configs : # check if config is valid config_valid, _ = config.check_config_valid(fips_dir, cfg, print_errors = True) if config_valid : if gen_project(fips_dir, proj_dir, cfg, True) : num_valid_configs += 1 else : log.error("failed to generate build files for config '{}'".format(cfg['name']), False) else : log.error("'{}' is not a valid config".format(cfg['name']), False) else : log.error("No configs found for '{}'".format(cfg_name)) if num_valid_configs != len(configs) : log.error('{} out of {} configs failed!'.format(len(configs) - num_valid_configs, len(configs))) return False else : log.colored(log.GREEN, '{} configs generated'.format(num_valid_configs)) return True
def run(fips_dir, proj_dir, args) : """run the 'open' verb (opens project in IDE)""" if not util.is_valid_project_dir(proj_dir) : log.error('must be run in a project directory') proj_name = util.get_project_name_from_dir(proj_dir) cfg_name = None if len(args) > 0 : cfg_name = args[0] if not cfg_name : cfg_name = settings.get(proj_dir, 'config') # check the cmake generator of this config configs = config.load(fips_dir, proj_dir, cfg_name) if configs : # hmm, only look at first match, 'open' doesn't # make sense with config-patterns cfg = configs[0] # find build dir, if it doesn't exist, generate it build_dir = util.get_build_dir(fips_dir, proj_name, cfg) if not os.path.isdir(build_dir) : log.warn("build dir not found, generating...") project.gen(fips_dir, proj_dir, cfg['name']) # try to open as Xcode project proj = glob.glob(build_dir + '/*.xcodeproj') if proj : subprocess.call('open {}'.format(proj[0]), shell=True) else : # try to open as VS project proj = glob.glob(build_dir + '/*.sln') if proj : subprocess.call('cmd /c start {}'.format(proj[0]), shell=True) else : log.error("don't know how to open a '{}' project".format(cfg['generator'])) else : log.error("config '{}' not found".format(cfg_name))
def valgrind(fips_dir, proj_dir, cfg_name, target, target_args) : """debug a single target with valgrind""" # prepare proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) # load the config(s) configs = config.load(fips_dir, proj_dir, cfg_name) if configs : for cfg in configs : # check if config is valid config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors = True) if config_valid : deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name']) valgrind_bin = settings.get(proj_dir, 'valgrind') if not valgrind_bin : valgrind_bin = 'valgrind' log.colored(log.YELLOW, "=== valgrind: {} ({})".format(cfg['name'], valgrind_bin)) cmd_line = valgrind_bin if target_args : cmd_line += ' ' + ' '.join(target_args) else : cmd_line += ' ' + '--leak-check=no' cmd_line += ' ' + '--show-reachable=yes' cmd_line += ' ' + '--track-fds=yes' cmd_line += ' ' + '--run-libc-freeres=no' cmd_line += ' ' + "--log-file={}/valgrind-{}.log".format(proj_dir, target) cmd_line += ' ' + "./{}".format(target) #log.colored(log.GREEN, "cmdline: {}".format(cmd_line)) subprocess.call(args = cmd_line, cwd = deploy_dir, shell = True) else : log.error("Config '{}' not valid in this environment".format(cfg['name'])) else : log.error("No valid configs found for '{}'".format(cfg_name)) return True
def get_target_list(fips_dir, proj_dir, cfg_name) : """get project targets config name, only works if a cmake run was performed before :param fips_dir: absolute path to fips :param proj_dir: absolute project path :param cfg_name: the config name :returns: (success, targets) """ proj_name = util.get_project_name_from_dir(proj_dir) configs = config.load(fips_dir, proj_dir, cfg_name) if configs : cfg = configs[0] build_dir = util.get_build_dir(fips_dir, proj_name, cfg) targets_path = build_dir + '/fips_targets.yml' if os.path.isfile(targets_path) : targets = [] with open(targets_path) as f : targets = yaml.load(f) return True, targets else : return False, [] else : log.error("No valid configs found for '{}'".format(cfg_name))
def clean(fips_dir, proj_dir, cfg_name) : """clean build files :param fips_dir: absolute path of fips :param proj_dir: absolute project path :param cfg_name: config name (or pattern) """ proj_name = util.get_project_name_from_dir(proj_dir) configs = config.load(fips_dir, proj_dir, cfg_name) if configs : for cfg in configs : log.colored(log.YELLOW, "=== clean: {}".format(cfg['name'])) build_dir = util.get_build_dir(fips_dir, proj_name, cfg) if os.path.isdir(build_dir) : shutil.rmtree(build_dir) log.info(" deleted '{}'".format(build_dir)) deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg) if os.path.isdir(deploy_dir) : shutil.rmtree(deploy_dir) log.info(" deleted '{}'".format(deploy_dir)) else : log.error("No valid configs found for '{}'".format(cfg_name))
def run(fips_dir, proj_dir, cfg_name, target_name, target_args, target_cwd) : """run a build target executable :param fips_dir: absolute path of fips :param proj_dir: absolute path of project dir :param cfg_name: config name or pattern :param target_name: the target name :param target_args: command line arguments for build target :param target_cwd: working directory or None """ retcode = 10 proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) # load the config(s) configs = config.load(fips_dir, proj_dir, cfg_name) if configs : for cfg in configs : log.colored(log.YELLOW, "=== run '{}' (config: {}, project: {}):".format(target_name, cfg['name'], proj_name)) # find deploy dir where executables live deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg) if not target_cwd : target_cwd = deploy_dir if cfg['platform'] in ['emscripten', 'pnacl'] : # special case: emscripten app if cfg['platform'] == 'emscripten' : html_name = target_name + '.html' else : html_name = target_name + '_pnacl.html' if util.get_host_platform() == 'osx' : try : subprocess.call( 'open http://localhost:8000/{} ; python {}/mod/httpserver.py'.format(html_name, fips_dir), cwd = target_cwd, shell=True) except KeyboardInterrupt : return 0 elif util.get_host_platform() == 'win' : try : cmd = 'cmd /c start http://localhost:8000/{} && python {}/mod/httpserver.py'.format(html_name, fips_dir) subprocess.call(cmd, cwd = target_cwd, shell=True) except KeyboardInterrupt : return 0 elif util.get_host_platform() == 'linux' : try : subprocess.call( 'xdg-open http://localhost:8000/{}; python {}/mod/httpserver.py'.format(html_name, fips_dir), cwd = target_cwd, shell=True) except KeyboardInterrupt : return 0 else : log.error("don't know how to start HTML app on this platform") elif os.path.isdir('{}/{}.app'.format(deploy_dir, target_name)) : # special case: Mac app cmd_line = '{}/{}.app/Contents/MacOS/{}'.format(deploy_dir, target_name, target_name) else : cmd_line = '{}/{}'.format(deploy_dir, target_name) if cmd_line : if target_args : cmd_line += ' ' + ' '.join(target_args) try: retcode = subprocess.call(args=cmd_line, cwd=target_cwd, shell=True) except OSError, e: log.error("Failed to execute '{}' with '{}'".format(target_name, e.strerror))
def write_imports(fips_dir, proj_dir, cfg_name, imported) : """write the big imports map created with 'gather_imports' to a .fips-imports.cmake file in the current project :params fips_dir: absolute path to fips :params proj_dir: absolute path to current project :params imported: the imports dictionary created with 'gather_imports' """ if imported : unique_hdrdirs = [] unique_libdirs = [] unique_defines = {} unique_modules = {} # write a temporary .fips-imports.cmake.tmp file, # this will replace the old file, but only if the # content is different, this will prevent an unnecessary # cmake run if the imports haven't changed import_filename = proj_dir + '/.fips-imports.cmake' import_tmp_filename = import_filename + '.tmp' with open(import_tmp_filename, 'w') as f : f.write("#\n# generated by 'fips gen', don't edit, don't add to version control!\n#\n") for imp_proj_name in imported : imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name) if imported[imp_proj_name]['cond']: f.write('if ({})\n'.format(imported[imp_proj_name]['cond'])) # add include and lib search paths if imp_proj_dir != proj_dir : f.write('if (EXISTS "{}/fips-include.cmake")\n'.format(imp_proj_dir)) f.write(' include("{}/fips-include.cmake")\n'.format(imp_proj_dir)) f.write('elseif (EXISTS "{}/fips-files/include.cmake")\n'.format(imp_proj_dir)) f.write(' include ("{}/fips-files/include.cmake")\n'.format(imp_proj_dir)) f.write('endif()\n') f.write('if (EXISTS "{}/lib/${{FIPS_PLATFORM_NAME}}")\n'.format(imp_proj_dir)) f.write(' link_directories("{}/lib/${{FIPS_PLATFORM_NAME}}")\n'.format(imp_proj_dir)) f.write('endif()\n') # add header search paths for hdrdir in imported[imp_proj_name]['hdrdirs'] : if hdrdir not in unique_hdrdirs : f.write('include_directories("{}")\n'.format(hdrdir)) unique_hdrdirs.append(hdrdir) # add lib search paths for libdir in imported[imp_proj_name]['libdirs'] : if libdir not in unique_libdirs : f.write('link_directories("{}")\n'.format(libdir)) unique_libdirs.append(libdir) # add defines for define in imported[imp_proj_name]['defines'] : value = imported[imp_proj_name]['defines'][define] if define not in unique_defines : unique_defines[define] = value if type(value) is str : f.write('add_definitions(-D{}="{}")\n'.format(define, value)) else : f.write('add_definitions(-D{}={})\n'.format(define, value)) # add import modules if len(imported[imp_proj_name]['modules']) > 0 : import_functions = [] # first add all module import functions for module in imported[imp_proj_name]['modules'] : module_path = imported[imp_proj_name]['modules'][module] if module not in unique_modules : unique_modules[module] = module_path import_func = 'fips_import_{}'.format(module_path).replace('-','_') import_functions.append(import_func) f.write('macro({})\n'.format(import_func)) f.write(' set(FIPS_IMPORT 1)\n') f.write(' add_subdirectory("{}" "{}")\n'.format(module, module_path)) f.write(' set(FIPS_IMPORT)\n') f.write('endmacro()\n') # if auto-import is enabled, also actually import all modules f.write('if (FIPS_AUTO_IMPORT)\n') group = "Imports" if imported[imp_proj_name]['group'] : group += "/" + imported[imp_proj_name]['group'] if len(imported[imp_proj_name]['modules']) > 3 : group += "/" + imp_proj_name f.write(' fips_ide_group("{}")\n'.format(group)) for import_func in import_functions : f.write(' {}()\n'.format(import_func)) f.write(' fips_ide_group("")\n') f.write('endif()\n') if imported[imp_proj_name]['cond']: f.write('endif()\n') # check content of old and new file, only replace if changed imports_dirty = True if os.path.isfile(import_filename) : if filecmp.cmp(import_filename, import_tmp_filename, shallow=False) : imports_dirty = False if imports_dirty : if os.path.isfile(import_filename) : os.remove(import_filename) os.rename(import_tmp_filename, import_filename) else : os.remove(import_tmp_filename) # write the .fips-imports.py file (copy from template) gen_search_paths = '"{}","{}/generators",\n'.format(fips_dir, fips_dir) proj_gen_dir = util.get_generators_dir(proj_dir) if proj_gen_dir: gen_search_paths += '"{}","{}",\n'.format(proj_dir, proj_gen_dir) for imp_proj_name in imported : gen_dir = util.get_generators_dir(util.get_project_dir(fips_dir, imp_proj_name)) if gen_dir: gen_search_paths += '"' + gen_dir + '",\n' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg_name); if not os.path.isdir(build_dir): os.makedirs(build_dir) template.copy_template_file(fips_dir, build_dir, 'fips-gen.py', { 'genpaths': gen_search_paths}, True)
def build(fips_dir, proj_dir): # target directory will be 'fips-deploy/[proj]-markdeep proj_name = util.get_project_name_from_dir(proj_dir) out_dir = util.get_workspace_dir(fips_dir)+'/fips-deploy/'+proj_name+'-markdeep' log.info('building to: {}...'.format(out_dir)) if os.path.isdir(out_dir): shutil.rmtree(out_dir) os.makedirs(out_dir) # check all .h files for embedded documentation hdrs = [] for root, dirnames, filenames in os.walk(proj_dir): for filename in fnmatch.filter(filenames, '*.h'): hdrs.append(os.path.join(root, filename).replace('\\','/')) markdeep_files = [] capture_begin = re.compile(r'/\*#\s') for hdr in hdrs: log.info(' parsing {}'.format(hdr)) capturing = False markdeep_lines = [] with open(hdr, 'r') as src: lines = src.readlines() for line in lines: if "#*/" in line and capturing: capturing = False if capturing: # remove trailing tab if line.startswith(' '): line = line[4:] elif line.startswith('\t'): line = line[1:] markdeep_lines.append(line) if capture_begin.match(line) and not capturing: capturing = True if markdeep_lines: markdeep_files.append(hdr) dst_path = out_dir + '/' + os.path.relpath(hdr,proj_dir) + '.html' log.info(' markdeep block(s) found, writing: {}'.format(dst_path)) dst_dir = os.path.dirname(dst_path) if not os.path.isdir(dst_dir): os.makedirs(dst_dir) with open(dst_path, 'w') as dst: dst.write("<meta charset='utf-8' emacsmode='-*- markdown -*-'>\n") dst.write("<link rel='stylesheet' href='https://casual-effects.com/markdeep/latest/apidoc.css?'>\n") for line in markdeep_lines: dst.write(line) dst.write("<script>markdeepOptions={tocStyle:'medium'};</script>") dst.write("<!-- Markdeep: --><script src='https://casual-effects.com/markdeep/latest/markdeep.min.js?'></script>") # write a toplevel index.html if markdeep_files: markdeep_files = sorted(markdeep_files) dst_path = out_dir + '/index.html' log.info('writing toc file: {}'.format(dst_path)) with open(dst_path, 'w') as dst: dst.write("<meta charset='utf-8' emacsmode='-*- markdown -*-'>\n") dst.write("<link rel='stylesheet' href='https://casual-effects.com/markdeep/latest/apidoc.css?'>\n") dst.write('# {}\n'.format(proj_name)) for hdr in markdeep_files: rel_path = os.path.relpath(hdr,proj_dir) dst.write('- [{}]({})\n'.format(rel_path, rel_path+'.html')) dst.write("<script>markdeepOptions={tocStyle:'medium'};</script>") dst.write("<!-- Markdeep: --><script src='https://casual-effects.com/markdeep/latest/markdeep.min.js?'></script>") else: log.error("no headers with embedded markdeep found in '{}'!".format(proj_dir))
def write_launch_json(fips_dir, proj_dir, vscode_dir, cfg): '''write the .vscode/launch.json file''' proj_name = util.get_project_name_from_dir(proj_dir) exe_targets = read_cmake_targets(fips_dir, proj_dir, cfg, ['app']) deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name']) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) pre_launch_build_options = [('', True), (' [Skip Build]', False)] stop_at_entry_options = [('', False), (' [Stop At Entry]', True)] launch = { 'version': '0.2.0', 'configurations': [] } for tgt in exe_targets: for pre_launch_build in pre_launch_build_options: for stop_at_entry in stop_at_entry_options: path = deploy_dir + '/' + tgt if util.get_host_platform() == 'win': path += '.exe' cwd = os.path.dirname(path) osx_path = path + '.app/Contents/MacOS/' + tgt osx_cwd = os.path.dirname(osx_path) if os.path.isdir(osx_cwd): path = osx_path cwd = osx_cwd if util.get_host_platform() == 'win': c = { 'name': tgt + pre_launch_build[0] + stop_at_entry[0], 'type': 'cppvsdbg', 'request': 'launch', 'program': path, 'args': [], 'stopAtEntry': stop_at_entry[1], 'cwd': cwd, 'environment': [], 'externalConsole': False, 'preLaunchTask': tgt if pre_launch_build[1] else '' } elif util.get_host_platform() == 'linux': c = { 'name': tgt + pre_launch_build[0] + stop_at_entry[0], 'type': 'cppdbg', 'request': 'launch', 'program': path, 'args': [], 'stopAtEntry': stop_at_entry[1], 'cwd': cwd, 'externalConsole': False, 'MIMode': 'gdb', 'preLaunchTask': tgt if pre_launch_build[1] else '' } else: c = { 'name': tgt + pre_launch_build[0] + stop_at_entry[0], 'type': 'cppdbg', 'request': 'launch', 'program': path, 'args': [], 'stopAtEntry': stop_at_entry[1], 'cwd': cwd, 'externalConsole': False, 'MIMode': 'lldb', 'preLaunchTask': tgt if pre_launch_build[1] else '' } launch['configurations'].append(c) # add a python code-generator debug config c = { 'name': 'fips codegen', 'type': 'python', 'request': 'launch', 'stopOnEntry': True, 'pythonPath': '${config:python.pythonPath}', 'program': build_dir + '/fips-gen.py', 'args': [ build_dir + '/fips_codegen.yml' ], "cwd": proj_dir, "debugOptions": [ "WaitOnAbnormalExit", "WaitOnNormalExit", "RedirectOutput" ] } launch['configurations'].append(c) # add a python debug config for each fips verb for verb_name, verb_mod in verb.verbs.items() : # ignore standard verbs if fips_dir not in inspect.getfile(verb_mod): c = { 'name': 'fips {}'.format(verb_name), 'type': 'python', 'request': 'launch', 'stopOnEntry': True, 'pythonPath': '${config:python.pythonPath}', 'program': proj_dir + '/fips', 'args': [ verb_name ], 'cwd': proj_dir, "debugOptions": [ "WaitOnAbnormalExit", "WaitOnNormalExit", "RedirectOutput" ] } launch['configurations'].append(c) launch_path = vscode_dir + '/launch.json' log.info(' writing {}'.format(launch_path)) with open(launch_path, 'w') as f: json.dump(launch, f, indent=1, separators=(',',':'))
def write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg): '''write the .vscode/c_cpp_properties.json files for main project and all dependent projects ''' proj_name = util.get_project_name_from_dir(proj_dir) build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name']) defines = read_cmake_defines(fips_dir, proj_dir, cfg) compile_commands_path = build_dir + '/compile_commands.json' has_compile_command_json = os.path.isfile(compile_commands_path) inc_paths = None if not has_compile_command_json: inc_paths = read_cmake_headerdirs(fips_dir, proj_dir, cfg) props = { 'configurations': [], 'version': 3 } for config_name in ['Mac','Linux','Win32']: c = { 'name': config_name, 'browse': { 'limitSymbolsToIncludedHeaders': True, 'databaseFilename': '{}/browse.VS.code'.format(build_dir) } } config_incl_paths = None compiler_path = None intellisense_mode = 'clang-x64' if config_name == 'Mac': if not has_compile_command_json: config_incl_paths = get_cc_header_paths() config_defines = ['_DEBUG','__GNUC__','__APPLE__','__clang__'] compiler_path = '/usr/bin/c++' elif config_name == 'Linux': if not has_compile_command_json: config_incl_paths = get_cc_header_paths() config_defines = ['_DEBUG','__GNUC__'] compiler_path = '/usr/bin/c++' else: if not has_compile_command_json: config_incl_paths = get_vs_header_paths(fips_dir, proj_dir, cfg) intellisense_mode = 'msvc-x64' config_defines = ['_DEBUG','_WIN32'] if inc_paths: config_incl_paths.extend(inc_paths) config_defines.extend(defines) if compiler_path: c['compilerPath'] = compiler_path if has_compile_command_json: c['compileCommands'] = compile_commands_path if config_incl_paths: c['includePath'] = config_incl_paths c['defines'] = config_defines if config_incl_paths: c['browse']['path'] = config_incl_paths c['intelliSenseMode'] = intellisense_mode props['configurations'].append(c) # add dependencies in reverse order, so that main project is first for dep_proj_name in reversed(impex): dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name) vscode_dir = dep_proj_dir + '/.vscode' if not os.path.isdir(vscode_dir): os.makedirs(vscode_dir) prop_path = vscode_dir + '/c_cpp_properties.json' log.info(' writing {}'.format(prop_path)) with open(prop_path, 'w') as f: json.dump(props, f, indent=1, separators=(',',':'))