Example #1
0
def run(fips_dir, proj_dir, args) :
    """run diagnostics

    :param fips_dir:    absolute path to fips directory
    :param proj_dir:    absolute path to current project
    :args:              command line args
    """
    noun = 'all'
    ok = False
    if len(args) > 0 :
        noun = args[0]
    if noun in ['all', 'configs'] :
        check_configs(fips_dir, proj_dir)
        ok = True
    if noun in ['all', 'imports'] :
        check_imports(fips_dir, proj_dir)
        ok = True
    if noun in ['all', 'tools'] :
        check_tools(fips_dir)
        ok = True
    if noun in ['all', 'fips'] :
        check_fips(fips_dir)
        ok = True
    if not ok :
        log.error("invalid noun '{}'".format(noun))
Example #2
0
def copy_template_file(fips_dir, proj_dir, filename, values, silent=False) :
    """copy a template file from fips/templates to the project 
    directory and replace template values (e.g. the project name),
    ask for user permission if files exist

    :param fips_dir:    absolute fips directory
    :param proj_dir:    absolute project directory
    :param filename:    filename to copy from fips/templates
    :param values:      template key/value dictionary
    :param silent:      if True, overwrite existing file and don't print status
    :returns:           True file overwritten, False on not overwritten
    """
    
    src_path = fips_dir + '/templates/' + filename
    dst_path = proj_dir + '/' + filename

    if not os.path.isfile(src_path) :
        log.error("template src file '{}' doesn't exist".format(src_path))
    
    if not silent :
        if os.path.isfile(dst_path) :
            if not util.confirm("overwrite '{}'?".format(dst_path)) :
                log.info("skipping '{}'".format(dst_path))
                return False

    content = None
    with open(src_path, 'r') as f :
        content = f.read()
    content = Template(content).substitute(values)
    with open(dst_path, 'w') as f :
        f.write(content)

    if not silent :
        log.info("wrote '{}'".format(dst_path))
    return True
Example #3
0
def run(fips_dir, proj_dir, args) :
    """run the init verb"""
    if len(args) > 0 :
        proj_name = args[0]
        project.init(fips_dir, proj_name)
    else :
        log.error("expected one arg [project]")
Example #4
0
def get_include_path() :
    """return the global shader header search path"""
    include_path = '{}/bgfx/src'.format(proj_path)
    include_path = os.path.normpath(include_path)
    if not os.path.isdir(include_path) :
        log.error("could not find bgfx shader include search path at '{}'".format(include_path))
    return include_path
Example #5
0
def load(fips_dir, proj_dir, pattern) :
    """load one or more matching configs from fips and current project dir

    :param fips_dir:    absolute fips directory
    :param proj_dir:    absolute project directory
    :param pattern:     config name pattern (e.g. 'linux-make-*')
    :returns:   an array of loaded config objects
    """
    dirs = get_config_dirs(fips_dir, proj_dir)
    configs = []
    for curDir in dirs :
        paths = glob.glob('{}/{}.yml'.format(curDir, pattern))
        for path in paths :
            try :
                with open(path, 'r') as f :
                    cfg = yaml.load(f)
                folder, fname = os.path.split(path)

                # patch path, folder, and name
                cfg['path'] = path
                cfg['folder'] = folder
                cfg['name'] = os.path.splitext(fname)[0]
                if 'generator' not in cfg :
                    cfg['generator'] = 'Default'
                if 'generator-platform' not in cfg :
                    cfg['generator-platform'] = None
                if 'generator-toolset' not in cfg :
                    cfg['generator-toolset'] = None
                if 'defines' not in cfg :
                    cfg['defines'] = None
                configs.append(cfg)
            except yaml.error.YAMLError, e:
                log.error('YML parse error: {}', e.message)
Example #6
0
def gather_and_write_imports(fips_dir, proj_dir) :
    """first does and gather_imports, then a write_imports with the result"""
    imports = gather_imports(fips_dir, proj_dir)
    if imports is not None :
        write_imports(fips_dir, proj_dir, imports)
    else :
        log.error("project imports are incomplete, please run 'fips fetch'")
Example #7
0
def ensure_valid_project_dir(proj_dir):
    """test if project dir is valid, if not, dump error and abort

    :param proj_dir:    absolute project directory to check
    """
    if not is_valid_project_dir(proj_dir):
        log.error("'{}' is not a valid project directory".format(proj_dir))
Example #8
0
def run(fips_dir, proj_dir, args) :
    if not util.is_valid_project_dir(proj_dir) :
        log.error('must be run in a project directory')
    if len(args) > 0:
        if args[0] == 'clean':
            vscode.cleanup(fips_dir, proj_dir)
        else:
            log.error("invalid noun '{}' (expected: clean)".format(noun))
Example #9
0
def check_config_valid(fips_dir, cfg, print_errors=False) :
    """check if provided config is valid, and print errors if not

    :param cfg:     a loaded config object
    :returns:       (True, [ errors ]) tuple with result and error messages
    """
    errors = []
    valid = True

    # check whether all required fields are present
    # (NOTE: name and folder should always be present since they are appended
    # during loading)
    required_fields = ['name', 'folder', 'platform', 'generator', 'build_tool', 'build_type']
    for field in required_fields :
        if field not in cfg :
            errors.append("missing field '{}' in '{}'".format(field, cfg['path']))
            valid = False
    
    # check if the platform string is valid
    if not valid_platform(cfg['platform']) :
        errors.append("invalid platform name '{}' in '{}'".format(cfg['platform'], cfg['path']))
        valid = False

    # check if the platform can be built on current host platform
    if cfg['platform'] not in target_platforms[util.get_host_platform()] :
        errors.append("'{}' is not a valid target platform for host '{}'".format(cfg['platform'], util.get_host_platform()))
        valid = False

    # check if the target platform SDK is installed
    if not check_sdk(fips_dir, cfg['platform']) :
        errors.append("platform sdk for '{}' not installed (see './fips help setup')".format(cfg['platform']))
        valid = False

    # check if the generator name is valid
    if not valid_generator(cfg['generator']) :
        errors.append("invalid generator name '{}' in '{}'".format(cfg['generator'], cfg['path']))
        valid = False

    # check if build tool is valid
    if not valid_build_tool(cfg['build_tool']) :
        errors.append("invalid build_tool name '{}' in '{}'".format(cfg['build_tool'], cfg['path']))
        valid = False

    # check if the build tool can be found
    if not check_build_tool(fips_dir, cfg['build_tool']) :
        errors.append("build tool '{}' not found".format(cfg['build_tool']))
        valid = False

    # check if build type is valid (Debug, Release, Profiling)
    if not valid_build_type(cfg['build_type']) :
        errors.append("invalid build_type '{}' in '{}'".format(cfg['build_type'], cfg['path']))
        valid = False

    if print_errors :
        for error in errors :
            log.error(error, False)

    return (valid, errors)
Example #10
0
def build(fips_dir, proj_dir, cfg_name, target=None) :
    """perform a build of config(s) in project

    :param fips_dir:    absolute path of fips
    :param proj_dir:    absolute path of project dir
    :param cfg_name:    config name or pattern
    :param target:      optional target name (build all if None)
    :returns:           True if build was successful
    """

    # prepare
    dep.fetch_imports(fips_dir, proj_dir)
    proj_name = util.get_project_name_from_dir(proj_dir)
    util.ensure_valid_project_dir(proj_dir)
    dep.gather_and_write_imports(fips_dir, proj_dir)

    # load the config(s)
    configs = config.load(fips_dir, proj_dir, cfg_name)
    num_valid_configs = 0
    if configs :
        for cfg in configs :
            # check if config is valid
            config_valid, _ = config.check_config_valid(fips_dir, cfg, print_errors=True)
            if config_valid :
                log.colored(log.YELLOW, "=== building: {}".format(cfg['name']))

                if not gen_project(fips_dir, proj_dir, cfg, False) :
                    log.error("Failed to generate '{}' of project '{}'".format(cfg['name'], proj_name))

                # select and run build tool
                build_dir = util.get_build_dir(fips_dir, proj_name, cfg)
                num_jobs = settings.get(proj_dir, 'jobs')
                result = False
                if cfg['build_tool'] == make.name :
                    result = make.run_build(fips_dir, target, build_dir, num_jobs)
                elif cfg['build_tool'] == ninja.name :
                    result = ninja.run_build(fips_dir, target, build_dir, num_jobs)
                elif cfg['build_tool'] == xcodebuild.name :
                    result = xcodebuild.run_build(fips_dir, target, cfg['build_type'], build_dir, num_jobs)
                else :
                    result = cmake.run_build(fips_dir, target, cfg['build_type'], build_dir)
                
                if result :
                    num_valid_configs += 1
                else :
                    log.error("Failed to build config '{}' of project '{}'".format(cfg['name'], proj_name))
            else :
                log.error("Config '{}' not valid in this environment".format(cfg['name']))
    else :
        log.error("No valid configs found for '{}'".format(cfg_name))

    if num_valid_configs != len(configs) :
        log.error('{} out of {} configs failed!'.format(len(configs) - num_valid_configs, len(configs)))
        return False      
    else :
        log.colored(log.GREEN, '{} configs built'.format(num_valid_configs))
        return True
Example #11
0
File: git.py Project: floooh/fips
def push(proj_dir):
    """runs a 'git push' in the provided git repo
    
    :param proj_dir:    path to git repo
    """
    check_exists_with_error()
    try:
        res = subprocess.check_call('git push', cwd=proj_dir, shell=True)
    except subprocess.CalledProcessError as e:
        log.error("'git push' failed with '{}'".format(e.returncode))
Example #12
0
File: git.py Project: floooh/fips
def commit(proj_dir, msg):
    """runs a 'git commit -m msg' in the provided git repo

    :param proj_dir:    path to a git repo
    """
    check_exists_with_error()
    try:
        subprocess.check_call('git commit -m "{}"'.format(msg), cwd=proj_dir, shell=True)
    except subprocess.CalledProcessError as e:
        log.error("'git commit' failed with '{}'".format(e.returncode))
Example #13
0
def run(fips_dir, proj_dir, args) :
    """build fips project"""
    if not util.is_valid_project_dir(proj_dir) :
        log.error('must be run in a project directory')
    cfg_name = None
    if len(args) > 0 :
        cfg_name = args[0]
    if not cfg_name :
        cfg_name = settings.get(proj_dir, 'config')
    project.build(fips_dir, proj_dir, cfg_name)
Example #14
0
def run(fips_dir, proj_dir, args) :
    if len(args) > 0 :
        if args[0] == 'build' :
            build_deploy_webpage(fips_dir, proj_dir)
        elif args[0] == 'serve' :
            serve_webpage(fips_dir, proj_dir)
        else :
            log.error("Invalid param '{}', expected 'build' or 'serve'".format(args[0]))
    else :
        log.error("Param 'build' or 'serve' expected")
Example #15
0
def get_toolchain(fips_dir, proj_dir, cfg) :
    """get the toolchain path location for a config, this first checks
    for a 'cmake-toolchain' attribute, and if this does not exist, builds
    a xxx.toolchain.cmake file from the platform name (only for cross-
    compiling platforms). Toolchain files are searched in the
    following locations:
    - a fips-files/toolchains subdirectory in the project directory
    - a fips-files/toolchains subdirectory in all imported projects
    - finally in the cmake-toolchains subdirectory of the fips directory

    :param fips_dir:    absolute path to fips
    :param plat:        the target platform name
    :returns:           path to toolchain file or None for non-cross-compiling
    """

    # ignore native target platforms
    if 'platform' in cfg :
        if cfg['platform'] in native_platforms :
            return None
    else :
        log.error("config has no 'platform' attribute!'")

    # build toolchain file name
    toolchain = None
    if 'cmake-toolchain' in cfg :
        toolchain = cfg['cmake-toolchain']
    else :
        toolchain = '{}.toolchain.cmake'.format(cfg['platform'])
    
    # look for toolchain file in current project directory
    toolchain_dir = util.get_toolchains_dir(proj_dir)
    toolchain_path = None
    if toolchain_dir:
        toolchain_path = toolchain_dir + '/' + toolchain
    if toolchain_path and os.path.isfile(toolchain_path) :
        return toolchain_path
    else :
        # look for toolchain in all imported directories
        _, imported_projs = dep.get_all_imports_exports(fips_dir, proj_dir)
        for imported_proj_name in imported_projs :
            imported_proj_dir = imported_projs[imported_proj_name]['proj_dir']
            toolchain_dir = util.get_toolchains_dir(imported_proj_dir)
            toolchain_path = None
            if toolchain_dir:
                toolchain_path = toolchain_dir + '/' + toolchain
            if toolchain_path and os.path.isfile(toolchain_path):
                return toolchain_path
        else :
            # toolchain is not in current project or imported projects, 
            # try the fips directory
            toolchain_path = '{}/cmake-toolchains/{}'.format(fips_dir, toolchain)
            if os.path.isfile(toolchain_path) :
                return toolchain_path
    # fallthrough: no toolchain file found
    return None
Example #16
0
def check_config_valid(fips_dir, proj_dir, cfg, print_errors=False) :
    """check if provided config is valid, and print errors if not

    :param cfg:     a loaded config object
    :returns:       (True, [ messages ]) tuple with result and error messages
    """
    messages = []
    valid = True

    # check whether all required fields are present
    # (NOTE: name and folder should always be present since they are appended
    # during loading)
    required_fields = ['name', 'folder', 'platform', 'generator', 'build_tool', 'build_type']
    for field in required_fields :
        if field not in cfg :
            messages.append("missing field '{}' in '{}'".format(field, cfg['path']))
            valid = False
    
    # check if the target platform SDK is installed
    if not check_sdk(fips_dir, cfg['platform']) :
        messages.append("platform sdk for '{}' not installed (see './fips help setup')".format(cfg['platform']))
        valid = False

    # check if the generator name is valid
    if not valid_generator(cfg['generator']) :
        messages.append("invalid generator name '{}' in '{}'".format(cfg['generator'], cfg['path']))
        valid = False

    # check if build tool is valid
    if not valid_build_tool(cfg['build_tool']) :
        messages.append("invalid build_tool name '{}' in '{}'".format(cfg['build_tool'], cfg['path']))
        valid = False

    # check if the build tool can be found
    if not check_build_tool(fips_dir, cfg['build_tool']) :
        messages.append("build tool '{}' not found".format(cfg['build_tool']))
        valid = False

    # check if build type is valid (Debug, Release, Profiling)
    if not valid_build_type(cfg['build_type']) :
        messages.append("invalid build_type '{}' in '{}'".format(cfg['build_type'], cfg['path']))
        valid = False

    # check if the toolchain file can be found (if this is a crosscompiling toolchain)
    if cfg['platform'] not in native_platforms :
        toolchain_path = get_toolchain(fips_dir, proj_dir, cfg)
        if not toolchain_path :
            messages.append("toolchain file not found for config '{}'!".format(cfg['name']))
            valid = False

    if print_errors :
        for msg in messages :
            log.error(msg, False)

    return (valid, messages)
Example #17
0
def run(fips_dir, proj_dir, args) :
    """run the 'unset' verb"""
    if len(args) > 0 :
        noun = args[0]
        if noun in valid_nouns :
            settings.unset(proj_dir, noun)
        else :
            log.error("invalid noun '{}', must be: {}".format(
                noun, ', '.join(valid_nouns)))
    else :
        log.error("expected noun: {}".format(', '.join(valid_nouns)))
Example #18
0
File: git.py Project: floooh/fips
def commit_allow_empty(proj_dir, msg):
    """same as commit(), but uses the --allow-empty arg so that the
    commit doesn't fail if there's nothing to commit.

    :param proj_dir:    path to a git repo
    """
    check_exists_with_error()
    try:
        subprocess.check_call('git commit --allow-empty -m "{}"'.format(msg), cwd=proj_dir, shell=True)
    except subprocess.CalledProcessError as e:
        log.error("'git commit' failed with '{}'".format(e.returncode))
Example #19
0
def get_shaderc_path() :
    """find shaderc compiler, fail if not exists"""
    shaderc_path = os.path.abspath('{}/shaderc'.format(deploy_path))
    if not os.path.isfile(shaderc_path) :
        os_name = platform.system().lower()
        shaderc_path = '{}/bgfx/tools/bin/{}/shaderc'.format(proj_path, os_name)
        shaderc_path = os.path.normpath(shaderc_path)
        if not os.path.isfile(shaderc_path) :
            log.error("bgfx shaderc executable not found, please run 'make tools' in bgfx directory")

    return shaderc_path
Example #20
0
File: setup.py Project: floooh/fips
def run(fips_dir, proj_dir, args) :
    """run the 'setup' verb"""
    sdk_name = None
    if len(args) > 0 :
        sdk_name = args[0]
    if sdk_name == 'emscripten' :
        emscripten.setup(fips_dir, proj_dir)
    elif sdk_name == 'android' :
        android.setup(fips_dir, proj_dir)
    else :
        log.error("invalid SDK name (must be 'emscripten' or 'android')")
Example #21
0
File: git.py Project: floooh/fips
def add(proj_dir, update=False):
    """runs a 'git add .' in the provided git repo

    :param proj_dir:    path to a git repo
    :update:    if True, will run 'git add -u'
    """
    check_exists_with_error()
    cmd = 'git add ' + '-u' if update else '.'
    try:
        subprocess.check_call('git add .', cwd=proj_dir, shell=True)
    except subprocess.CalledProcessError as e:
        log.error("'git add .' failed with '{}'".format(e.returncode))
Example #22
0
def run(fips_dir, proj_dir, args) :
    """clean generated files"""
    if not util.is_valid_project_dir(proj_dir) :
        log.error('must be run in a project directory')
    cfg_name = None
    if len(args) > 0 :
        cfg_name = args[0]
    if not cfg_name :
        cfg_name = settings.get(proj_dir, 'config')
    if cfg_name == 'all' :
        cfg_name = '*'
    project.clean(fips_dir, proj_dir, cfg_name)
Example #23
0
File: git.py Project: fungos/fips
def checkout(proj_dir, revision):
    """checkout a specific revision hash of a repository

    :param proj_dir:    a git repo dir
    :param revision:    SHA1 hash of the commit
    :returns:           True if git returns successful
    """
    try:
        output = subprocess.check_output("git checkout {}".format(revision), cwd=proj_dir, shell=True)
        return output.split(":")[0] != "error"
    except subprocess.CalledProcessError:
        log.error("failed to call 'git checkout'")
        return None
Example #24
0
def get_local_rev(proj_dir, local_branch) :
    """get the head rev of a local branch

    :param proj_dir:        a git repo dir
    :param local_branch:    local branch name (e.g. master)
    :returns:               the revision string of the local branch head or None
    """
    try :
        output = subprocess.check_output('git rev-parse {}'.format(local_branch), cwd=proj_dir, shell=True)
        return output.rstrip()
    except subprocess.CalledProcessError :
        log.error("failed to call 'git rev-parse'")
        return None
Example #25
0
File: git.py Project: floooh/fips
def update_submodule(proj_dir):
    """runs a 'git submodule sync --recursive' followed by a
    git submodule update --recursive' on the provided git repo,
    unconditionally (it will *not* check for local changes)

    :param proj_dir:    a git repo dir
    """
    check_exists_with_error()
    try:
        subprocess.call('git submodule sync --recursive', cwd=proj_dir, shell=True)
        subprocess.call('git submodule update --recursive', cwd=proj_dir, shell=True)
    except subprocess.CalledProcessError:
        log.error("Failed to call 'git submodule sync/update'")
Example #26
0
def view(fips_dir, proj_dir):
    proj_name = util.get_project_name_from_dir(proj_dir)
    out_dir = util.get_workspace_dir(fips_dir)+'/fips-deploy/'+proj_name+'-markdeep'
    if os.path.isfile(out_dir+'/index.html'):
        p = util.get_host_platform()
        if p == 'osx':
            subprocess.call('open index.html', cwd=out_dir, shell=True)
        elif p == 'win':
            subprocess.call('start index.html', cwd=out_dir, shell=True)
        elif p == 'linux':
            subprocess.call('xdg-open index.html', cwd=out_dir, shell=True)
    else:
        log.error('no generated index.html found: {}'.format(out_dir+'/index.html'))
Example #27
0
File: dep.py Project: XoDeR/Amstel
def _rec_fetch_imports(fips_dir, proj_dir, handled) :
    """internal recursive function to fetch project imports,
    keeps an array of already handled dirs to break cyclic dependencies

    :param proj_dir:    current project directory
    :param handled:     array of already handled dirs
    :returns:           updated array of handled dirs
    """
    ws_dir = util.get_workspace_dir(fips_dir)
    proj_name = util.get_project_name_from_dir(proj_dir)
    if proj_name not in handled :
        handled.append(proj_name)

        imports = get_imports(fips_dir, proj_dir)
        for dep in imports:
            dep_proj_name = dep
            if dep not in handled:
                dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
                log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name))
                dep_ok = False
                if not os.path.isdir(dep_proj_dir) :
                    # directory did not exist, do a fresh git clone
                    dep = imports[dep_proj_name]
                    git_commit = None if 'rev' not in dep else dep['rev']
                    if git_commit :
                        if 'depth' in dep :
                            # when using rev, we may not want depth because the revision may not be reachable
                            log.colored(log.YELLOW, "=== 'depth' was ignored because parameter 'rev' is specified.")
                        dep['depth'] = None
                    git_depth = git.clone_depth if not git_commit and 'depth' not in dep else dep['depth']
                    git_url = dep['git']
                    git_branch = dep['branch']
                    if git.clone(git_url, git_branch, git_depth, dep_proj_name, ws_dir) :
                        if git_commit :
                            log.colored(log.YELLOW, "=== revision: '{}':".format(git_commit))
                            dep_ok = git.checkout(dep_proj_dir, git_commit)
                        else :
                            dep_ok = True
                    else :
                        log.error('failed to git clone {} into {}'.format(git_url, dep_proj_dir))
                else :
                    # directory already exists
                    log.info("dir '{}' exists".format(dep_proj_dir))
                    dep_ok = True

                # recuse
                if dep_ok :
                    handled = _rec_fetch_imports(fips_dir, dep_proj_dir, handled)

    # done, return the new handled array
    return handled
Example #28
0
File: git.py Project: floooh/fips
def checkout(proj_dir, revision) :
    """checkout a specific revision hash of a repository

    :param proj_dir:    a git repo dir
    :param revision:    SHA1 hash of the commit
    :returns:           True if git returns successful
    """
    try :
        output = subprocess.check_output('git checkout {}'.format(revision), cwd=proj_dir, shell=True).decode("utf-8")
        update_submodule(proj_dir)
        return output.split(':')[0] != 'error'
    except subprocess.CalledProcessError :
        log.error("failed to call 'git checkout'")
        return None
Example #29
0
def generate(input_file, out_src, out_hdr) :
    """
    :param input:       bgfx .sc file
    :param out_src:     must be None
    :param out_hdr:     path of output header file
    """
    if not os.path.isfile(out_hdr) or genutil.isDirty(Version, [input_file], [out_hdr]):
        # deduce shader type
        base_file = os.path.basename(input_file)
        shader_type = None
        if base_file.startswith("vs_"):
            shader_type = "vertex"
        if base_file.startswith("fs_"):
            shader_type = "fragment"
        if base_file.startswith("cs_"):
            shader_type = "compute"

        if not shader_type:
            log.error("Could not identify shader type, please use prefix vs_, fs_ or cs_ on file " + input_file)
            return

        # source to bgfx shader compiler
        shaderc_path = get_shaderc_path()
        include_path = get_include_path()
        basename = get_basename(input_file)

        glsl = BuildShaderTask(input_file, 'glsl', 'linux', shader_type, None, basename)
        mtl = BuildShaderTask(input_file, 'mtl', 'ios', shader_type, None, basename)
        dx9 = BuildShaderTask(input_file, 'dx9', 'windows', shader_type,
                'vs_3_0' if shader_type == 'vertex' else 'ps_3_0', basename)
        dx11 = BuildShaderTask(input_file, 'dx11', 'windows', shader_type,
                'vs_4_0' if shader_type == 'vertex' else 
                'cs_5_0' if shader_type == 'compute' else 'ps_4_0', basename)

        glsl.start()
        mtl.start()
        dx9.start()
        dx11.start()

        glsl.join()
        mtl.join()
        dx9.join()
        dx11.join()

        contents = glsl.contents + mtl.contents + dx9.contents + dx11.contents
        if len(contents):
            with open(out_hdr, 'w') as f:
                contents = "// #version:{}#\n".format(Version) + contents
                f.write(contents)
Example #30
0
def read_cmake_targets(fips_dir, proj_dir, cfg, types):
    '''Reads the fips_targets.yml file which was created during
    "fips gen", and extract targets matching the target
    types in the types array, or if types is None, all targets.
    '''
    success, targets = util.get_cfg_target_list(fips_dir, proj_dir, cfg)
    if success:
        if types:
            matching_targets = [tgt for tgt in targets if targets[tgt] in types]
        else:
            matching_targets = targets.keys()
        return matching_targets
    else:
        log.error('Failed to read fips_targets.yml from build dir')
        return None
Example #31
0
def clone(fips_dir, url):
    """clone an existing fips project with git, do NOT fetch dependencies

    :param fips_dir:    absolute path to fips
    :param url:         git url to clone from (may contain branch name separated by '#')
    :return:            True if project was successfully cloned
    """
    ws_dir = util.get_workspace_dir(fips_dir)
    proj_name = util.get_project_name_from_url(url)
    proj_dir = util.get_project_dir(fips_dir, proj_name)
    if not os.path.isdir(proj_dir):
        git_url = util.get_giturl_from_url(url)
        git_branch = util.get_gitbranch_from_url(url)
        if git.clone(git_url, git_branch, proj_name, ws_dir):
            # fetch imports
            dep.fetch_imports(fips_dir, proj_dir)
            return True
        else:
            log.error("failed to 'git clone {}' into '{}'".format(
                url, proj_dir))
            return False
    else:
        log.error("project dir '{}' already exists".format(proj_dir))
        return False
Example #32
0
File: open.py Project: rbxnk/fips
def run(fips_dir, proj_dir, args):
    """run the 'open' verb (opens project in IDE)"""
    if not util.is_valid_project_dir(proj_dir):
        log.error('must be run in a project directory')
    proj_name = util.get_project_name_from_dir(proj_dir)
    cfg_name = None
    if len(args) > 0:
        cfg_name = args[0]
    if not cfg_name:
        cfg_name = settings.get(proj_dir, 'config')

    # check the cmake generator of this config
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs:
        # hmm, only look at first match, 'open' doesn't
        # make sense with config-patterns
        cfg = configs[0]

        # find build dir, if it doesn't exist, generate it
        build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
        if not os.path.isdir(build_dir):
            log.warn("build dir not found, generating...")
            project.gen(fips_dir, proj_dir, cfg['name'])

        # first check if this is a VSCode project
        if cfg['build_tool'] == 'vscode_cmake':
            vscode.run(proj_dir)
            return
        # check if this is a CLion project
        if cfg['build_tool'] == 'clion':
            clion.run(proj_dir)
            return
        # try to open as Xcode project
        proj = glob.glob(build_dir + '/*.xcodeproj')
        if proj:
            subprocess.call('open "{}"'.format(proj[0]), shell=True)
            return
        # try to open as VS project
        proj = glob.glob(build_dir + '/*.sln')
        if proj:
            subprocess.call('cmd /c start {}'.format(proj[0]), shell=True)
            return
        # try to open as eclipse project
        proj = glob.glob(build_dir + '/.cproject')
        if proj:
            subprocess.call(
                'eclipse -nosplash --launcher.timeout 60 -application org.eclipse.cdt.managedbuilder.core.headlessbuild -import "{}"'
                .format(build_dir),
                shell=True)
            subprocess.call('eclipse', shell=True)
            return

        log.error("don't know how to open a '{}' project in {}".format(
            cfg['generator'], build_dir))
    else:
        log.error("config '{}' not found".format(cfg_name))
Example #33
0
def clean(fips_dir, proj_dir, cfg_name):
    """clean build files

    :param fips_dir:    absolute path of fips
    :param proj_dir:    absolute project path
    :param cfg_name:    config name (or pattern)
    """
    proj_name = util.get_project_name_from_dir(proj_dir)
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs:
        for cfg in configs:
            log.colored(log.YELLOW, "=== clean: {}".format(cfg['name']))

            build_dir = util.get_build_dir(fips_dir, proj_name, cfg)
            if os.path.isdir(build_dir):
                shutil.rmtree(build_dir)
                log.info("  deleted '{}'".format(build_dir))

            deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg)
            if os.path.isdir(deploy_dir):
                shutil.rmtree(deploy_dir)
                log.info("  deleted '{}'".format(deploy_dir))
    else:
        log.error("No valid configs found for '{}'".format(cfg_name))
Example #34
0
def get_target_list(fips_dir, proj_dir, cfg_name):
    """get project targets config name, only works
    if a cmake run was performed before

    :param fips_dir:        absolute path to fips
    :param proj_dir:        absolute project path
    :param cfg_name:        the config name
    :returns:   (success, targets)
    """
    proj_name = util.get_project_name_from_dir(proj_dir)
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs:
        cfg = configs[0]
        build_dir = util.get_build_dir(fips_dir, proj_name, cfg)
        targets_path = build_dir + '/fips_targets.yml'
        if os.path.isfile(targets_path):
            targets = []
            with open(targets_path) as f:
                targets = yaml.load(f)
            return True, targets
        else:
            return False, []
    else:
        log.error("No valid configs found for '{}'".format(cfg_name))
Example #35
0
def gen(fips_dir, proj_dir, cfg_name):
    """generate build files with cmake

    :param fips_dir:    absolute path to fips
    :param proj_dir:    absolute path to project
    :param cfg_name:    config name or pattern (e.g. osx-make-debug)
    :returns:           True if successful
    """

    # prepare
    dep.fetch_imports(fips_dir, proj_dir)
    proj_name = util.get_project_name_from_dir(proj_dir)
    util.ensure_valid_project_dir(proj_dir)
    dep.gather_and_write_imports(fips_dir, proj_dir, cfg_name)

    # load the config(s)
    configs = config.load(fips_dir, proj_dir, cfg_name)
    num_valid_configs = 0
    if configs:
        for cfg in configs:
            # check if config is valid
            config_valid, _ = config.check_config_valid(fips_dir,
                                                        proj_dir,
                                                        cfg,
                                                        print_errors=True)
            if config_valid:
                if gen_project(fips_dir, proj_dir, cfg, True):
                    num_valid_configs += 1
                else:
                    log.error(
                        "failed to generate build files for config '{}'".
                        format(cfg['name']), False)
            else:
                log.error("'{}' is not a valid config".format(cfg['name']),
                          False)
    else:
        log.error("No configs found for '{}'".format(cfg_name))

    if num_valid_configs != len(configs):
        log.error('{} out of {} configs failed!'.format(
            len(configs) - num_valid_configs, len(configs)))
        return False
    else:
        log.colored(log.GREEN,
                    '{} configs generated'.format(num_valid_configs))
        return True
Example #36
0
def setup(fips_dir, proj_dir) :
    """setup the Android SDK and NDK"""
    log.colored(log.YELLOW, '=== setup Android SDK/NDK :')

    # first make sure that java is present, otherwise the Android
    # SDK setup will finish without errors, but is not actually usable
    if not java.check_exists(fips_dir) :
        log.error("please install java first (see './fips diag tools')")

    ensure_sdk_dirs(fips_dir)

    # download and setup the Android SDK
    sdk_archive_path = get_androidsdk_archive_path(fips_dir)
    if not os.path.isfile(sdk_archive_path) :        
        sdk_url = get_sdk_url()
        log.info("downloading '{}'...".format(sdk_url))
        urllib.urlretrieve(sdk_url, sdk_archive_path, util.url_download_hook)
    else :
        log.info("'{}' already exists".format(sdk_archive_path))
    log.info("\nunpacking '{}'...".format(sdk_archive_path))
    uncompress(fips_dir, sdk_archive_path)
    log.info("downloading additional SDK files...")
    update_android_sdk(fips_dir, proj_dir)

    # download the Android NDK
    ndk_archive_path = get_androidndk_archive_path(fips_dir)
    if not os.path.isfile(ndk_archive_path) :
        ndk_url = get_ndk_url()
        log.info("downloading '{}'...".format(ndk_url))
        urllib.urlretrieve(ndk_url, ndk_archive_path, util.url_download_hook)
    else :
        log.info("'{}' already exists".format(ndk_archive_path))
    log.info("\nunpacking '{}'...".format(ndk_archive_path))
    uncompress(fips_dir, ndk_archive_path)

    log.colored(log.GREEN, "done.")
Example #37
0
def run(fips_dir, proj_dir, args) :
    
    if not util.is_valid_project_dir(proj_dir) :
        log.error('must be run in a project directory')
    
    proj_name = util.get_project_name_from_dir(proj_dir)

    cfg_name = None
    if len(args) > 0 :
        cfg_name = args[0]
    if not cfg_name :
        cfg_name = settings.get(proj_dir, 'config')

    build_dir = util.get_build_dir(fips_dir, proj_name, cfg_name)

    cache_file = build_dir + '/CMakeCache.txt'
    if not os.path.isfile(cache_file) :
        log.error('generate project first!')

    if sys.platform == "win32" :
        os.startfile(cache_file)
    else :
        opener ="open" if sys.platform == "darwin" else "xdg-open"
        subprocess.call([opener, cache_file])
Example #38
0
def make_clean(fips_dir, proj_dir, cfg_name) :
    """perform a 'make clean' on the project

    :param fips_dir:    absolute path of fips
    :param proj_dir:    absolute path of project dir
    :param cfg_name:    config name or pattern
    """

    proj_name = util.get_project_name_from_dir(proj_dir)
    configs = config.load(fips_dir, proj_dir, cfg_name)
    num_valid_configs = 0
    if configs :
        for cfg in configs :
            config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors=True)
            if config_valid :
                log.colored(log.YELLOW, "=== cleaning: {}".format(cfg['name']))

                build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
                result = False
                if cfg['build_tool'] == make.name :
                    result = make.run_clean(fips_dir, build_dir)
                elif cfg['build_tool'] == ninja.name :
                    result = ninja.run_clean(fips_dir, build_dir)
                elif cfg['build_tool'] == xcodebuild.name :
                    result = xcodebuild.run_clean(fips_dir, build_dir)
                else :
                    result = cmake.run_clean(fips_dir, build_dir)
                    
                if result :
                    num_valid_configs += 1
                else :
                    log.error("Failed to clean config '{}' of project '{}'".format(cfg['name'], proj_name))
            else :
                log.error("Config '{}' not valid in this environment".format(cfg['name']))
    else :
        log.error("No valid configs found for '{}'".format(cfg_name))

    if num_valid_configs != len(configs) :
        log.error('{} out of {} configs failed!'.format(len(configs) - num_valid_configs, len(configs)))
        return False      
    else :
        log.colored(log.GREEN, '{} configs cleaned'.format(num_valid_configs))
        return True
Example #39
0
def get_imports(fips_dir, proj_dir):
    """get the imports from the fips.yml file in proj_dir

    :param proj_dir:    the project directory
    :returns:           dictionary object with imports (can be empty)
    """
    proj_name = util.get_project_name_from_dir(proj_dir)
    imports = {}
    if util.is_valid_project_dir(proj_dir):
        dic = util.load_fips_yml(proj_dir)
        if 'imports' in dic:
            imports = dic['imports']

        # warn if this is an old-style list instead of new style dict
        if imports:
            if type(imports) is list:
                log.warn("imports in '{}/fips.yml' uses obsolete array format".
                         format(proj_dir))

                # convert old style to new dict format
                # FIXME: should be removed after a while
                new_imports = {}
                for dep in imports:
                    dep_url = registry.get_url(fips_dir, dep)
                    if not util.is_git_url(dep_url):
                        log.error(
                            "'{}' cannot be resolved into a git url (in project '{}')"
                            .format(dep_url, proj_name))
                    dep_proj_name = util.get_project_name_from_url(dep_url)
                    new_imports[dep_proj_name] = {}
                    new_imports[dep_proj_name][
                        'git'] = util.get_giturl_from_url(dep_url)
                    new_imports[dep_proj_name][
                        'branch'] = util.get_gitbranch_from_url(dep_url)
                imports = new_imports
            elif type(imports) is dict:
                for dep in imports:
                    if not 'branch' in imports[dep]:
                        imports[dep]['branch'] = 'master'
                    if not 'cond' in imports[dep]:
                        imports[dep]['cond'] = None
                    if not 'git' in imports[dep]:
                        log.error(
                            "no git URL in import '{}' in '{}/fips.yml'!\n".
                            format(dep, proj_dir))
                    if not 'group' in imports[dep]:
                        imports[dep]['group'] = None
            else:
                log.error(
                    "imports in '{}/fips.yml' must be a dictionary!".format(
                        proj_dir))
    return imports
Example #40
0
def run(fips_dir, proj_dir, args):
    if len(args) > 0:
        if len(args) > 1:
            proj_name = args[1]
            proj_dir = util.get_project_dir(fips_dir, proj_name)
        if not util.is_valid_project_dir(proj_dir):
            log.error('{} is not a valid fips project!'.format(proj_name))
        if args[0] == 'build':
            markdeep.build(fips_dir, proj_dir)
        elif args[0] == 'view':
            # view also build the markdown docs first
            markdeep.build(fips_dir, proj_dir)
            markdeep.view(fips_dir, proj_dir)
        else:
            log.error("expected 'build' or 'serve' arg")
    else:
        log.error("expected 'build' or 'serve' arg")
Example #41
0
File: clion.py Project: yazici/fips
def run(proj_dir):
    host = util.get_host_platform()
    if host == 'linux':
        try:
            if find_executable("clion.sh") is not None:
                subprocess.Popen('clion.sh {}'.format(proj_dir), cwd=proj_dir, shell=True)
            else:
                subprocess.Popen('clion {}'.format(proj_dir), cwd=proj_dir, shell=True)
        except OSError:
            log.error("Failed to run JetBrains CLion as 'clion' or 'clion.sh'")
    elif host == 'osx':
        try:
            subprocess.Popen('open /Applications/CLion.app --args {}'.format(proj_dir), cwd=proj_dir, shell=True)
        except OSError:
            log.error("Failed to run JetBrains CLion as '/Applications/CLion.app'")
    else:
        log.error("Not supported on this platform")
Example #42
0
def configure(fips_dir, proj_dir, cfg_name) :
    """run ccmake or cmake-gui on the provided project and config

    :param fips_dir:    absolute fips path
    :param proj_dir:    absolute project dir
    :cfg_name:          build config name
    """

    dep.fetch_imports(fips_dir, proj_dir)
    proj_name = util.get_project_name_from_dir(proj_dir)
    util.ensure_valid_project_dir(proj_dir)
    dep.gather_and_write_imports(fips_dir, proj_dir, cfg_name)

    # load configs, if more then one, only use first one
    configs = config.load(fips_dir, proj_dir, cfg_name)
    fips_yml_defines = dep.get_fips_yml_defines(proj_dir)

    if configs :
        cfg = configs[0]
        log.colored(log.YELLOW, '=== configuring: {}'.format(cfg['name']))

        # Merge fips.yml defines into config defines
        if fips_yml_defines:
            if 'defines' in cfg and cfg['defines']:
                cfg['defines'].update(fips_yml_defines)
            else:
                cfg['defines'] = fips_yml_defines

        # generate build files
        if not gen_project(fips_dir, proj_dir, cfg, True) :
            log.error("Failed to generate '{}' of project '{}'".format(cfg['name'], proj_name))

        # run ccmake or cmake-gui
        build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
        if ccmake.check_exists(fips_dir) :
            ccmake.run(build_dir)
        elif cmake_gui.check_exists(fips_dir) :
            cmake_gui.run(build_dir)
        else :
            log.error("Neither 'ccmake' nor 'cmake-gui' found (run 'fips diag')")
    else :
        log.error("No configs found for '{}'".format(cfg_name))
Example #43
0
 def run(fips_dir, proj_dir, args):
     """run the 'nebula' verb"""
     if len(args) > 0:
         noun = args[0]
         if noun == 'set':
             if len(args) > 2:
                 setKey(args[1], args[2])
             else:
                 log.error("Expected setting and value")
         elif noun == 'get':
             if len(args) > 1:
                 key = argToKey(args[1])
                 if key != "":
                     reg_key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
                                               base_reg, 0,
                                               _winreg.KEY_READ)
                     keyval, regtype = _winreg.QueryValueEx(reg_key, key)
                     _winreg.CloseKey(reg_key)
                     log.info(keyval)
                 else:
                     log.error("Invalid setting")
             else:
                 log.error("Expected setting name")
         elif noun == 'cleannidl':
             proj = util.get_project_name_from_dir(proj_dir)
             cfg = settings.get(proj_dir, 'config')
             path = util.get_build_dir(fips_dir, proj, cfg) + "/nidl"
             shutil.rmtree(path, True)
     else:
         try:
             reg_key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, base_reg,
                                       0, _winreg.KEY_READ)
             workval, regtype = _winreg.QueryValueEx(reg_key, "workdir")
             rootval, regtype = _winreg.QueryValueEx(reg_key, "path")
             _winreg.CloseKey(reg_key)
             log.colored(log.YELLOW, "Current settings")
             log.optional("Project directory", workval)
             log.optional("Nebula root director", rootval)
         except WindowsError:
             log.colored(log.YELLOW, "No Nebula settings in registry\n")
Example #44
0
def run(fips_dir, proj_dir, args):
    """run the 'open' verb (opens project in IDE)"""
    if not util.is_valid_project_dir(proj_dir):
        log.error('must be run in a project directory')
    proj_name = util.get_project_name_from_dir(proj_dir)
    cfg_name = None
    if len(args) > 0:
        cfg_name = args[0]
    if not cfg_name:
        cfg_name = settings.get(proj_dir, 'config')

    # check the cmake generator of this config
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs:
        # hmm, only look at first match, 'open' doesn't
        # make sense with config-patterns
        cfg = configs[0]

        # find build dir, if it doesn't exist, generate it
        build_dir = util.get_build_dir(fips_dir, proj_name, cfg)
        if not os.path.isdir(build_dir):
            log.warn("build dir not found, generating...")
            project.gen(fips_dir, proj_dir, cfg['name'])

        if 'Xcode' in cfg['generator']:
            # find the Xcode project
            proj = glob.glob(build_dir + '/*.xcodeproj')
            subprocess.call(['open', proj[0]])
        elif 'Visual Studio' in cfg['generator']:
            # find the VisualStudio project file
            proj = glob.glob(build_dir + '/*.sln')
            subprocess.call(['cmd', '/c', 'start', proj[0]])
        else:
            log.error("don't know how to open a '{}' project".format(
                cfg['generator']))
    else:
        log.error("config '{}' not found".format(cfg_name))
Example #45
0
def run(fips_dir, proj_dir, args):
    """run the 'set' verb"""

    # FIXME: thos shouldn't be as hardwired as it is, see help() function
    if len(args) > 0:
        noun = args[0]
        if noun == 'config':
            if len(args) > 1:
                cfg_name = args[1]
                settings.set(proj_dir, 'config', cfg_name)
            else:
                log.error('expected config name')
        elif noun == 'target':
            if len(args) > 1:
                target_name = args[1]
                settings.set(proj_dir, 'target', target_name)
            else:
                log.error('expected target name')
        elif noun == 'jobs':
            if len(args) > 1:
                num_jobs = args[1]
                if num_jobs.isdigit():
                    settings.set(proj_dir, 'jobs', int(num_jobs))
                else:
                    log.error("value for setting 'jobs' must be a number")
            else:
                log.error('expected number of build jobs value')
        elif noun == 'ccache':
            if len(args) > 1:
                use_ccache = args[1]
                if use_ccache == 'on':
                    settings.set(proj_dir, 'ccache', True)
                elif use_ccache == 'off':
                    settings.set(proj_dir, 'ccache', False)
                else:
                    log.error(
                        "value for setting 'ccache' must be 'on' or 'off")
        elif noun == 'local':
            if len(args) > 1:
                is_local = args[1]
                if is_local == 'on':
                    settings.set(proj_dir, 'local', True)
                elif is_local == 'off':
                    settings.set(proj_dir, 'local', False)
                else:
                    log.error(
                        "value for setting 'local' must be 'on' or 'off'")
        elif noun == 'vscode-launch-configs':
            if len(args) > 1 and args[1] in ['all', 'minimal', 'skip-build']:
                settings.set(proj_dir, noun, args[1])
            else:
                log.error("expected one of 'all', 'minimal' or 'skip-build'")
        else:
            settings.set(proj_dir, noun, args[1])
    else:
        log.error("expected noun 'config' or 'target'")
def generate(input_file, out_src, out_hdr, args):
    """
    :param input:       bgfx .sc file
    :param out_src:     must be None
    :param out_hdr:     path of output header file
    """
    global shaderc_path
    shaderc_path = args['shadercPath']

    if not os.path.isfile(out_hdr) or genutil.isDirty(Version, [input_file],
                                                      [out_hdr]):
        # deduce shader type
        base_file = os.path.basename(input_file)
        shader_type = None
        if base_file.startswith("vs_"):
            shader_type = "vertex"
        if base_file.startswith("fs_"):
            shader_type = "fragment"
        if base_file.startswith("cs_"):
            shader_type = "compute"

        if not shader_type:
            log.error(
                "Could not identify shader type, please use prefix vs_, fs_ or cs_ on file "
                + input_file)
            return

        # source to bgfx shader compiler
        #shaderc_path = get_shaderc_path()
        include_path = get_include_path()
        basename = get_basename(input_file)

        emsc = BuildShaderTask(input_file, 'glsl', 'asm.js', shader_type, None,
                               basename)

        android = BuildShaderTask(input_file, 'glsl', 'android', shader_type,
                                  None, basename)
        mtl = BuildShaderTask(input_file, 'mtl', 'ios', shader_type, None,
                              basename)

        dx9 = BuildShaderTask(
            input_file, 'dx9', 'windows', shader_type,
            'vs_3_0' if shader_type == 'vertex' else 'ps_3_0', basename)
        dx11 = BuildShaderTask(
            input_file, 'dx11', 'windows', shader_type,
            'vs_4_0' if shader_type == 'vertex' else
            'cs_5_0' if shader_type == 'compute' else 'ps_4_0', basename)
        linux = BuildShaderTask(input_file, 'glsl', 'linux', shader_type, None,
                                basename)
        osx = BuildShaderTask(input_file, 'glsl', 'osx', shader_type, None,
                              basename)

        emsc.start()
        android.start()
        mtl.start()
        dx9.start()
        dx11.start()
        linux.start()
        osx.start()

        emsc.join()
        android.join()
        mtl.join()
        dx9.join()
        dx11.join()
        linux.join()
        osx.join()

        contents = emsc.contents + android.contents + mtl.contents + dx9.contents + dx11.contents + linux.contents + osx.contents
        if len(contents):
            with open(out_hdr, 'w') as f:
                contents = "// #version:{}#\n".format(Version) + contents
                f.write(contents)
Example #47
0
def check_config_valid(fips_dir, cfg, print_errors=False):
    """check if provided config is valid, and print errors if not

    :param cfg:     a loaded config object
    :returns:       (True, [ errors ]) tuple with result and error messages
    """
    errors = []
    valid = True

    # check whether all required fields are present
    # (NOTE: name and folder should always be present since they are appended
    # during loading)
    required_fields = [
        'name', 'folder', 'platform', 'generator', 'build_tool', 'build_type'
    ]
    for field in required_fields:
        if field not in cfg:
            errors.append("missing field '{}' in '{}'".format(
                field, cfg['path']))
            valid = False

    # check if the platform string is valid
    if not valid_platform(cfg['platform']):
        errors.append("invalid platform name '{}' in '{}'".format(
            cfg['platform'], cfg['path']))
        valid = False

    # check if the platform can be built on current host platform
    if cfg['platform'] not in target_platforms[util.get_host_platform()]:
        errors.append(
            "'{}' is not a valid target platform for host '{}'".format(
                cfg['platform'], util.get_host_platform()))
        valid = False

    # check if the target platform SDK is installed
    if not check_sdk(fips_dir, cfg['platform']):
        errors.append(
            "platform sdk for '{}' not installed (see './fips help setup')".
            format(cfg['platform']))
        valid = False

    # check if the generator name is valid
    if not valid_generator(cfg['generator']):
        errors.append("invalid generator name '{}' in '{}'".format(
            cfg['generator'], cfg['path']))
        valid = False

    # check if build tool is valid
    if not valid_build_tool(cfg['build_tool']):
        errors.append("invalid build_tool name '{}' in '{}'".format(
            cfg['build_tool'], cfg['path']))
        valid = False

    # check if the build tool can be found
    if not check_build_tool(fips_dir, cfg['build_tool']):
        errors.append("build tool '{}' not found".format(cfg['build_tool']))
        valid = False

    # check if build type is valid (Debug, Release, Profiling)
    if not valid_build_type(cfg['build_type']):
        errors.append("invalid build_type '{}' in '{}'".format(
            cfg['build_type'], cfg['path']))
        valid = False

    if print_errors:
        for error in errors:
            log.error(error, False)

    return (valid, errors)
Example #48
0
def check_config_valid(fips_dir, proj_dir, cfg, print_errors=False):
    """check if provided config is valid, and print errors if not

    :param cfg:     a loaded config object
    :returns:       (True, [ messages ]) tuple with result and error messages
    """
    messages = []
    valid = True

    # check whether all required fields are present
    # (NOTE: name and folder should always be present since they are appended
    # during loading)
    required_fields = [
        'name', 'folder', 'platform', 'generator', 'build_tool', 'build_type'
    ]
    for field in required_fields:
        if field not in cfg:
            messages.append("missing field '{}' in '{}'".format(
                field, cfg['path']))
            valid = False

    # check if the target platform SDK is installed
    if not check_sdk(fips_dir, cfg['platform']):
        messages.append(
            "platform sdk for '{}' not installed (see './fips help setup')".
            format(cfg['platform']))
        valid = False

    # check if the generator name is valid
    if not valid_generator(cfg['generator']):
        messages.append("invalid generator name '{}' in '{}'".format(
            cfg['generator'], cfg['path']))
        valid = False

    # check if build tool is valid
    if not valid_build_tool(cfg['build_tool']):
        messages.append("invalid build_tool name '{}' in '{}'".format(
            cfg['build_tool'], cfg['path']))
        valid = False

    # check if the build tool can be found
    if not check_build_tool(fips_dir, cfg['build_tool']):
        messages.append("build tool '{}' not found".format(cfg['build_tool']))
        valid = False

    # check if build type is valid (Debug, Release, Profiling)
    if not valid_build_type(cfg['build_type']):
        messages.append("invalid build_type '{}' in '{}'".format(
            cfg['build_type'], cfg['path']))
        valid = False

    # check if the toolchain file can be found (if this is a crosscompiling toolchain)
    if cfg['platform'] not in native_platforms:
        toolchain_path = get_toolchain(fips_dir, proj_dir, cfg)
        if not toolchain_path:
            messages.append("toolchain file not found for config '{}'!".format(
                cfg['name']))
            valid = False

    if print_errors:
        for msg in messages:
            log.error(msg, False)

    return (valid, messages)
Example #49
0
def build(fips_dir, proj_dir, cfg_name, target=None):
    """perform a build of config(s) in project

    :param fips_dir:    absolute path of fips
    :param proj_dir:    absolute path of project dir
    :param cfg_name:    config name or pattern
    :param target:      optional target name (build all if None)
    :returns:           True if build was successful
    """

    # prepare
    dep.fetch_imports(fips_dir, proj_dir)
    proj_name = util.get_project_name_from_dir(proj_dir)
    util.ensure_valid_project_dir(proj_dir)
    dep.gather_and_write_imports(fips_dir, proj_dir, cfg_name)

    # load the config(s)
    configs = config.load(fips_dir, proj_dir, cfg_name)
    num_valid_configs = 0
    if configs:
        for cfg in configs:
            # check if config is valid
            config_valid, _ = config.check_config_valid(fips_dir,
                                                        proj_dir,
                                                        cfg,
                                                        print_errors=True)
            if config_valid:
                log.colored(log.YELLOW, "=== building: {}".format(cfg['name']))

                if not gen_project(fips_dir, proj_dir, cfg, False):
                    log.error("Failed to generate '{}' of project '{}'".format(
                        cfg['name'], proj_name))

                # select and run build tool
                build_dir = util.get_build_dir(fips_dir, proj_name,
                                               cfg['name'])
                num_jobs = settings.get(proj_dir, 'jobs')
                result = False
                if cfg['build_tool'] == make.name:
                    result = make.run_build(fips_dir, target, build_dir,
                                            num_jobs)
                elif cfg['build_tool'] == ninja.name:
                    result = ninja.run_build(fips_dir, target, build_dir,
                                             num_jobs)
                elif cfg['build_tool'] == xcodebuild.name:
                    result = xcodebuild.run_build(fips_dir, target,
                                                  cfg['build_type'], build_dir,
                                                  num_jobs)
                else:
                    result = cmake.run_build(fips_dir, target,
                                             cfg['build_type'], build_dir,
                                             num_jobs)

                if result:
                    num_valid_configs += 1
                else:
                    log.error(
                        "Failed to build config '{}' of project '{}'".format(
                            cfg['name'], proj_name))
            else:
                log.error("Config '{}' not valid in this environment".format(
                    cfg['name']))
    else:
        log.error("No valid configs found for '{}'".format(cfg_name))

    if num_valid_configs != len(configs):
        log.error('{} out of {} configs failed!'.format(
            len(configs) - num_valid_configs, len(configs)))
        return False
    else:
        log.colored(log.GREEN, '{} configs built'.format(num_valid_configs))
        return True
Example #50
0
 def run(fips_dir, proj_dir, args):
     log.error("Not supported")
Example #51
0
def run(proj_dir):
    try:
        subprocess.call('code .', cwd=proj_dir, shell=True)
    except OSError:
        log.error("Failed to run Visual Studio Code as 'code'")
Example #52
0
def run(fips_dir, proj_dir, cfg_name, target_name, target_args):
    """run a build target executable

    :param fips_dir:    absolute path of fips
    :param proj_dir:    absolute path of project dir
    :param cfg_name:    config name or pattern
    :param target_name: the target name
    :param target_args: command line arguments for build target
    """

    proj_name = util.get_project_name_from_dir(proj_dir)
    util.ensure_valid_project_dir(proj_dir)

    # load the config(s)
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs:
        for cfg in configs:
            log.colored(
                log.YELLOW, "=== run '{}' (config: {}, project: {}):".format(
                    target_name, cfg['name'], proj_name))

            # find deploy dir where executables live
            deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg)

            cmd_line = []
            if cfg['platform'] in ['emscripten', 'pnacl']:
                # special case: emscripten app
                if cfg['platform'] == 'emscripten':
                    html_name = target_name + '.html'
                else:
                    html_name = target_name + '_pnacl.html'
                if util.get_host_platform() == 'osx':
                    try:
                        subprocess.call([
                            'open http://localhost:8000/{} ; python {}/mod/httpserver.py'
                            .format(html_name, fips_dir)
                        ],
                                        cwd=deploy_dir,
                                        shell=True)
                    except KeyboardInterrupt:
                        pass
                elif util.get_host_platform() == 'win':
                    try:
                        cmd = [
                            'cmd /c start http://localhost:8000/{} && python {}/mod/httpserver.py'
                            .format(html_name, fips_dir)
                        ]
                        subprocess.call(cmd, cwd=deploy_dir, shell=True)
                    except KeyboardInterrupt:
                        pass
                elif util.get_host_platform() == 'linux':
                    try:
                        subprocess.call([
                            'xdg-open http://localhost:8000/{}; python {}/mod/httpserver.py'
                            .format(html_name, fips_dir)
                        ],
                                        cwd=deploy_dir,
                                        shell=True)
                    except KeyboardInterrupt:
                        pass
                else:
                    log.error(
                        "don't know how to start HTML app on this platform")
            elif os.path.isdir('{}/{}.app'.format(deploy_dir, target_name)):
                # special case: Mac app
                cmd_line = [
                    'open', '{}/{}.app'.format(deploy_dir, target_name)
                ]
                if target_args:
                    cmd_line.append('--args')
            else:
                cmd_line = ['{}/{}'.format(deploy_dir, target_name)]
            if cmd_line:
                if target_args:
                    cmd_line.extend(target_args)
                try:
                    subprocess.call(args=cmd_line, cwd=deploy_dir)
                except OSError, e:
                    log.error("Failed to execute '{}' with '{}'".format(
                        target_name, e.strerror))
Example #53
0
def check_exists_with_error():
    """checks if git exists, and if not throws a fatal error"""
    if not check_exists():
        log.error("git not found, please run and fix './fips diag tools'")
        return False
Example #54
0
def run(fips_dir, proj_dir, args):
    """run the 'set' verb"""
    if len(args) > 0:
        noun = args[0]
        if noun == 'config':
            if len(args) > 1:
                cfg_name = args[1]
                settings.set(proj_dir, 'config', cfg_name)
            else:
                log.error('expected config name')
        elif noun == 'target':
            if len(args) > 1:
                target_name = args[1]
                settings.set(proj_dir, 'target', target_name)
            else:
                log.error('expected target name')
        elif noun == 'jobs':
            if len(args) > 1:
                num_jobs = args[1]
                if num_jobs.isdigit():
                    settings.set(proj_dir, 'jobs', int(num_jobs))
                else:
                    log.error("value for setting 'jobs' must be a number")
            else:
                log.error('expected number of build jobs value')
        elif noun == 'ccache':
            if len(args) > 1:
                use_ccache = args[1]
                if use_ccache == 'on':
                    settings.set(proj_dir, 'ccache', True)
                elif use_ccache == 'off':
                    settings.set(proj_dir, 'ccache', False)
                else:
                    log.error(
                        "value for setting 'ccache' must be 'on' or 'off")
        else:
            settings.set(proj_dir, noun, args[1])
    else:
        log.error("expected noun 'config' or 'target'")
Example #55
0
def run(proj_dir):
    try:
        proj_name = util.get_project_name_from_dir(proj_dir)
        subprocess.call('code .vscode/{}.code-workspace'.format(proj_name), cwd=proj_dir, shell=True)
    except OSError:
        log.error("Failed to run Visual Studio Code as 'code'") 
Example #56
0
                cmd_line = '{}/{}.app/Contents/MacOS/{}'.format(
                    deploy_dir, target_name, target_name)
            else:
                cmd_line = '{}/{}'.format(deploy_dir, target_name)
            if cmd_line:
                if target_args:
                    cmd_line += ' ' + ' '.join(target_args)
                try:
                    retcode = subprocess.call(args=cmd_line,
                                              cwd=target_cwd,
                                              shell=True)
                except OSError, e:
                    log.error("Failed to execute '{}' with '{}'".format(
                        target_name, e.strerror))
    else:
        log.error("No valid configs found for '{}'".format(cfg_name))

    return retcode


#-------------------------------------------------------------------------------
def clean(fips_dir, proj_dir, cfg_name):
    """clean build files

    :param fips_dir:    absolute path of fips
    :param proj_dir:    absolute project path
    :param cfg_name:    config name (or pattern)
    """
    proj_name = util.get_project_name_from_dir(proj_dir)
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs:
Example #57
0
def build(fips_dir, proj_dir):
    # target directory will be 'fips-deploy/[proj]-markdeep
    proj_name = util.get_project_name_from_dir(proj_dir)
    out_dir = util.get_workspace_dir(
        fips_dir) + '/fips-deploy/' + proj_name + '-markdeep'
    log.info('building to: {}...'.format(out_dir))
    if os.path.isdir(out_dir):
        shutil.rmtree(out_dir)
    os.makedirs(out_dir)

    # check all .h files for embedded documentation
    hdrs = []
    for root, dirnames, filenames in os.walk(proj_dir):
        for filename in fnmatch.filter(filenames, '*.h'):
            hdrs.append(os.path.join(root, filename).replace('\\', '/'))
    markdeep_files = []
    capture_begin = re.compile(r'/\*#\s')
    for hdr in hdrs:
        log.info('  parsing {}'.format(hdr))
        capturing = False
        markdeep_lines = []
        with open(hdr, 'r') as src:
            lines = src.readlines()
            for line in lines:
                if "#*/" in line and capturing:
                    capturing = False
                if capturing:
                    # remove trailing tab
                    if line.startswith('    '):
                        line = line[4:]
                    elif line.startswith('\t'):
                        line = line[1:]
                    markdeep_lines.append(line)
                if capture_begin.match(line) and not capturing:
                    capturing = True
        if markdeep_lines:
            markdeep_files.append(hdr)
            dst_path = out_dir + '/' + os.path.relpath(hdr, proj_dir) + '.html'
            log.info(
                '    markdeep block(s) found, writing: {}'.format(dst_path))
            dst_dir = os.path.dirname(dst_path)
            if not os.path.isdir(dst_dir):
                os.makedirs(dst_dir)
            with open(dst_path, 'w') as dst:
                dst.write(
                    "<meta charset='utf-8' emacsmode='-*- markdown -*-'>\n")
                dst.write(
                    "<link rel='stylesheet' href='https://casual-effects.com/markdeep/latest/apidoc.css?'>\n"
                )
                for line in markdeep_lines:
                    dst.write(line)
                dst.write(
                    "<script>markdeepOptions={tocStyle:'medium'};</script>")
                dst.write(
                    "<!-- Markdeep: --><script src='https://casual-effects.com/markdeep/latest/markdeep.min.js?'></script>"
                )

    # write a toplevel index.html
    if markdeep_files:
        markdeep_files = sorted(markdeep_files)
        dst_path = out_dir + '/index.html'
        log.info('writing toc file: {}'.format(dst_path))
        with open(dst_path, 'w') as dst:
            dst.write("<meta charset='utf-8' emacsmode='-*- markdown -*-'>\n")
            dst.write(
                "<link rel='stylesheet' href='https://casual-effects.com/markdeep/latest/apidoc.css?'>\n"
            )
            dst.write('# {}\n'.format(proj_name))
            for hdr in markdeep_files:
                rel_path = os.path.relpath(hdr, proj_dir)
                dst.write('- [{}]({})\n'.format(rel_path, rel_path + '.html'))
            dst.write("<script>markdeepOptions={tocStyle:'medium'};</script>")
            dst.write(
                "<!-- Markdeep: --><script src='https://casual-effects.com/markdeep/latest/markdeep.min.js?'></script>"
            )
    else:
        log.error("no headers with embedded markdeep found in '{}'!".format(
            proj_dir))
Example #58
0
def _rec_fetch_imports(fips_dir, proj_dir, handled):
    """internal recursive function to fetch project imports,
    keeps an array of already handled dirs to break cyclic dependencies

    :param proj_dir:    current project directory
    :param handled:     array of already handled dirs
    :returns:           updated array of handled dirs
    """
    ws_dir = util.get_workspace_dir(fips_dir)
    proj_name = util.get_project_name_from_dir(proj_dir)
    if proj_name not in handled:
        handled.append(proj_name)

        imports = get_imports(fips_dir, proj_dir)
        for dep in imports:
            dep_proj_name = dep
            if dep not in handled:
                dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
                log.colored(log.YELLOW,
                            "=== dependency: '{}':".format(dep_proj_name))
                dep_ok = False
                if not os.path.isdir(dep_proj_dir):
                    # directory did not exist, do a fresh git clone
                    dep = imports[dep_proj_name]
                    git_commit = None if 'rev' not in dep else dep['rev']
                    if git_commit:
                        if 'depth' in dep:
                            # when using rev, we may not want depth because the revision may not be reachable
                            log.colored(
                                log.YELLOW,
                                "=== 'depth' was ignored because parameter 'rev' is specified."
                            )
                        dep['depth'] = None
                    git_depth = git.clone_depth if not git_commit and 'depth' not in dep else dep[
                        'depth']
                    git_url = dep['git']
                    git_branch = dep['branch']
                    if git.clone(git_url, git_branch, git_depth, dep_proj_name,
                                 ws_dir):
                        if git_commit:
                            log.colored(
                                log.YELLOW,
                                "=== revision: '{}':".format(git_commit))
                            dep_ok = git.checkout(dep_proj_dir, git_commit)
                        else:
                            dep_ok = True
                    else:
                        log.error('failed to git clone {} into {}'.format(
                            git_url, dep_proj_dir))
                else:
                    # directory already exists
                    log.info("dir '{}' exists".format(dep_proj_dir))
                    dep_ok = True

                # recuse
                if dep_ok:
                    handled = _rec_fetch_imports(fips_dir, dep_proj_dir,
                                                 handled)

    # done, return the new handled array
    return handled
Example #59
0
def run(fips_dir, proj_dir, cfg_name, target_name, target_args, target_cwd):
    """run a build target executable

    :param fips_dir:    absolute path of fips
    :param proj_dir:    absolute path of project dir
    :param cfg_name:    config name or pattern
    :param target_name: the target name
    :param target_args: command line arguments for build target
    :param target_cwd:  working directory or None
    """

    retcode = 10
    proj_name = util.get_project_name_from_dir(proj_dir)
    util.ensure_valid_project_dir(proj_dir)

    # load the config(s)
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs:
        for cfg in configs:
            log.colored(
                log.YELLOW, "=== run '{}' (config: {}, project: {}):".format(
                    target_name, cfg['name'], proj_name))

            # find deploy dir where executables live
            deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name'])
            if not target_cwd:
                target_cwd = deploy_dir

            if cfg['platform'] == 'emscripten':
                # special case: emscripten app
                html_name = target_name + '.html'
                if util.get_host_platform() == 'osx':
                    try:
                        subprocess.call(
                            'open http://localhost:8080/{} ; http-server -c-1 -g'
                            .format(html_name),
                            cwd=target_cwd,
                            shell=True)
                    except KeyboardInterrupt:
                        return 0
                elif util.get_host_platform() == 'win':
                    try:
                        cmd = 'cmd /c start http://localhost:8080/{} && http-server -c-1 -g'.format(
                            html_name)
                        subprocess.call(cmd, cwd=target_cwd, shell=True)
                    except KeyboardInterrupt:
                        return 0
                elif util.get_host_platform() == 'linux':
                    try:
                        subprocess.call(
                            'xdg-open http://localhost:8080/{}; http-server -c-1 -g'
                            .format(html_name),
                            cwd=target_cwd,
                            shell=True)
                    except KeyboardInterrupt:
                        return 0
                else:
                    log.error(
                        "don't know how to start HTML app on this platform")
            elif cfg['platform'] == 'android':
                try:
                    adb_path = android.get_adb_path(fips_dir)
                    pkg_name = android.target_to_package_name(target_name)
                    # Android: first re-install the apk...
                    cmd = '{} install -r {}.apk'.format(adb_path, target_name)
                    subprocess.call(cmd, shell=True, cwd=deploy_dir)
                    # ...then start the apk
                    cmd = '{} shell am start -n {}/android.app.NativeActivity'.format(
                        adb_path, pkg_name)
                    subprocess.call(cmd, shell=True)
                    # ...then run adb logcat
                    cmd = '{} logcat'.format(adb_path)
                    subprocess.call(cmd, shell=True)
                    return 0
                except KeyboardInterrupt:
                    return 0

            elif os.path.isdir('{}/{}.app'.format(deploy_dir, target_name)):
                # special case: Mac app
                cmd_line = '{}/{}.app/Contents/MacOS/{}'.format(
                    deploy_dir, target_name, target_name)
            else:
                cmd_line = '{}/{}'.format(deploy_dir, target_name)
            if cmd_line:
                if target_args:
                    cmd_line += ' ' + ' '.join(target_args)
                try:
                    retcode = subprocess.call(args=cmd_line,
                                              cwd=target_cwd,
                                              shell=True)
                except OSError as e:
                    log.error("Failed to execute '{}' with '{}'".format(
                        target_name, e.strerror))
    else:
        log.error("No valid configs found for '{}'".format(cfg_name))

    return retcode
Example #60
0
def run(fips_dir, proj_dir, args):
    """run the 'physx' verb"""
    if len(args) > 0:
        noun = args[0]
        if noun == 'build':
            if sys.platform == "win32":
                # FIXME all of this only works on windows at the moment and is super hacky

                if len(args) != 2:
                    log.error("expected compiler target (win-vs15, win-vs16)")

                preset = util.fix_path(
                    os.path.dirname(os.path.abspath(__file__))
                ) + "/physx-presets/" + "fips" + args[1] + ".xml"
                if not os.path.isfile(preset):
                    log.error("Unrecognized compiler target")
                shutil.copy2(preset,
                             proj_dir + "/../physx/physx/buildtools/presets/")
                subprocess.call(proj_dir +
                                "/../physx/physx/generate_projects.bat fips" +
                                args[1])

                # figure out a version number for vswhere
                version = args[1][6:]
                version_next = str(int(version) + 1)
                version = version + ".0," + version_next + ".0"
                #use vswhere to figure out where vs is
                devenvPath = subprocess.check_output(
                    proj_dir +
                    "/../physx/externals/vswhere/vswhere -version [" +
                    version +
                    "] -property productPath").decode("utf-8").rstrip()
                devenvPath = util.fix_path(devenvPath)
                if not os.path.isfile(devenvPath):
                    #try to find msbuild
                    log.info(
                        "Could not find Visual Studio, trying to find lastest version of MSBuild..."
                        + devenvPath)
                    devenvPath = subprocess.check_output(
                        proj_dir +
                        "/../physx/externals/vswhere/vswhere -version [" +
                        version +
                        "] -products * -requires Microsoft.Component.MSBuild -property installationPath"
                    ).decode("utf-8").rstrip()
                    devenvPath = util.fix_path(
                        devenvPath + "/MSBuild/Current/Bin/MSBuild.exe")
                    if not os.path.isfile(devenvPath):
                        log.error(
                            "Could not detect Visual Studio installation.")
                    log.optional("Using MSBuild from", devenvPath)

                    log.info("Compiling PhysX, this might take a while.")
                    log.info("Building debug version...")
                    #-noConsoleLogger
                    retcode = subprocess.call(
                        devenvPath + " " + proj_dir +
                        "/../physx/physx/compiler/fips" + args[1] +
                        "/INSTALL.vcxproj /verbosity:quiet /noLogo /noConsoleLogger /property:Configuration=Debug"
                    )
                    if retcode == 0:
                        log.colored(log.GREEN, "PhysX debug build completed.")
                    else:
                        log.colored(log.RED, "PhysX debug build failed!")

                    log.info("Building release version...")
                    retcode = subprocess.call(
                        devenvPath + " " + proj_dir +
                        "/../physx/physx/compiler/fips" + args[1] +
                        "/INSTALL.vcxproj /verbosity:quiet /noLogo /noConsoleLogger /property:Configuration=Release"
                    )
                    if retcode == 0:
                        log.colored(log.GREEN,
                                    "PhysX release build completed.")
                    else:
                        log.colored(log.RED, "PhysX release build failed!")
                else:
                    log.optional("Using Visual Studio from", devenvPath)

                    log.info("Compiling PhysX, this might take a while...")
                    log.info("Building debug version...")
                    retcode = subprocess.call(
                        devenvPath + " " + proj_dir +
                        "/../physx/physx/compiler/fips" + args[1] +
                        "/PhysXSDK.sln /Build debug /Project INSTALL")
                    if retcode == 0:
                        log.colored(log.GREEN, "PhysX debug build completed.")
                    else:
                        log.colored(log.RED, "PhysX debug build failed!")

                    log.info("Building release version...")
                    retcode = subprocess.call(
                        devenvPath + " " + proj_dir +
                        "/../physx/physx/compiler/fips" + args[1] +
                        "/PhysXSDK.sln /Build release /Project INSTALL")
                    if retcode == 0:
                        log.colored(log.GREEN,
                                    "PhysX release build completed.")
                    else:
                        log.colored(log.RED, "PhysX release build failed!")
            else:
                preset = util.fix_path(
                    os.path.dirname(os.path.abspath(
                        __file__))) + "/physx-presets/fipslinux.xml"
                shutil.copy2(preset,
                             proj_dir + "/../physx/physx/buildtools/presets/")
                subprocess.run([
                    proj_dir + "/../physx/physx/generate_projects.sh",
                    "fipslinux"
                ])
                subprocess.run(["make", "-j", "10"],
                               cwd=proj_dir +
                               "/../physx/physx/compiler/fipslinux-checked")
                subprocess.run(["make", "install"],
                               cwd=proj_dir +
                               "/../physx/physx/compiler/fipslinux-checked")
        if noun == 'deploy':
            if sys.platform == "win32":
                cur_cfg = None
                if len(args) > 1:
                    cur_cfg = args[1]
                if not cur_cfg:
                    cur_cfg = settings.get(proj_dir, 'config')

                ps_deploy = util.get_workspace_dir(
                    fips_dir) + "/fips-deploy/physx/bin/"
                cfg = config.load(fips_dir, proj_dir, cur_cfg)[0]
                px_target = cfg['defines']['PX_TARGET']
                target_dir = util.get_deploy_dir(
                    fips_dir, util.get_project_name_from_dir(proj_dir),
                    cur_cfg)

                dllFiles = glob.glob(ps_deploy + px_target + "/debug/*.dll")
                log.info("Looking for PhysX dlls in '{}/'".format(ps_deploy +
                                                                  px_target))
                if not dllFiles:
                    log.error(
                        "PhysX debug dlls not found! Have you built them? (fips physx build [compiler target])"
                    )
                else:
                    for dll in dllFiles:
                        shutil.copy2(dll, target_dir)
                    log.colored(
                        log.GREEN,
                        "Deployed PhysX debug binaries to '{}'".format(
                            target_dir))

                dllFiles = glob.glob(ps_deploy + px_target + "/release/*.dll")
                if not dllFiles:
                    log.error(
                        "PhysX release dlls not found! Have you built them? (fips physx build [compiler target])"
                    )
                else:
                    for dll in dllFiles:
                        shutil.copy2(dll, target_dir)
                    log.colored(
                        log.GREEN,
                        "Deployed PhysX release binaries to '{}'".format(
                            target_dir))
    else:

        def run(fips_dir, proj_dir, args):
            log.error("Not supported")