Ejemplo n.º 1
0
def check_imports(fips_dir, proj_dir) :
    """do various checks on the imports of a project

    :param fips_dir: absolute fips directory
    :param proj_dir: absolute project directory
    :returns:   True if checks were valid
    """

    # check whether any imported projects are in sync with the remote git repo
    success, imported_projects = get_all_imports_exports(fips_dir, proj_dir)
    num_imports = 0
    for imp_proj_name in imported_projects :
        imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name)

        # don't git-check the top-level project directory
        if imp_proj_dir != proj_dir :
            num_imports += 1
            log.info("git status of '{}':".format(imp_proj_name))
            if os.path.isdir(imp_proj_dir) :
                if git.check_out_of_sync(imp_proj_dir) :
                    log.warn("  '{}' is out of sync with remote git repo".format(imp_proj_dir))
                else :
                    log.colored(log.GREEN, '  uptodate')
            else :
                log.warn("  '{}' does not exist, please run 'fips fetch'".format(imp_proj_dir))
    if success and num_imports == 0 :
        log.info('  none')

    # gather imports, this will dump warnings
    gather_imports(fips_dir, proj_dir)
Ejemplo n.º 2
0
def _rec_update_imports(fips_dir, proj_dir, handled) :
    """same as _rec_fetch_imports() but for updating the imported projects
    """
    ws_dir = util.get_workspace_dir(fips_dir)
    proj_name = util.get_project_name_from_dir(proj_dir)
    if proj_name not in handled :
        handled.append(proj_name)
        imports = get_imports(fips_dir, proj_dir)
        for dep in imports:
            dep_proj_name = dep
            if dep not in handled:
                dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
                log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name))
                dep_ok = False
                if os.path.isdir(dep_proj_dir) :
                    # directory did not exist, do a fresh git clone
                    dep = imports[dep_proj_name]
                    git_commit = None if 'rev' not in dep else dep['rev']
                    if git.has_local_changes(dep_proj_dir) :
                        log.warn("  '{}' has local changes, skipping...".format(dep_proj_dir))
                    else :
                        log.colored(log.BLUE, "  updating '{}'...".format(dep_proj_dir))
                        git.update(dep_proj_dir)
                        if git_commit:
                            log.colored(log.YELLOW, "=== revision: '{}':".format(git_commit))
                            dep_ok = git.checkout(dep_proj_dir, git_commit)
                        else:
                            dep_ok = True
                else :
                    log.warn("  '{}' does not exist, please run 'fips fetch'".format(dep_proj_dir))
                # recuse
                if dep_ok :
                    handled = _rec_update_imports(fips_dir, dep_proj_dir, handled)
    # done, return the new handled array
    return handled
Ejemplo n.º 3
0
def check_fips(fips_dir) :
    """check whether fips is uptodate"""
    log.colored(log.YELLOW, '=== fips:')
    if git.check_branch_out_of_sync(fips_dir, 'master') :
        log.warn("'fips' is not update, please run 'git pull' in fips directory!")
    else :
        log.colored(log.GREEN, '  uptodate')
Ejemplo n.º 4
0
def check_imports(fips_dir, proj_dir) :
    """do various checks on the imports of a project

    :param fips_dir: absolute fips directory
    :param proj_dir: absolute project directory
    :returns:   True if checks were valid
    """

    # check whether any imported projects are in sync with the remote git repo
    success, imported_projects = get_all_imports_exports(fips_dir, proj_dir)
    num_imports = 0
    for imp_proj_name in imported_projects :
        imp_proj_dir = imported_projects[imp_proj_name]['proj_dir']

        # don't git-check the top-level project directory
        if imp_proj_dir != proj_dir :
            num_imports += 1
            log.info("git status of '{}':".format(imp_proj_name))
            if os.path.isdir(imp_proj_dir) :
                if os.path.isdir("{}/.git".format(imp_proj_dir)) :
                    if git.check_out_of_sync(imp_proj_dir) :
                        log.warn("  '{}' is out of sync with remote git repo".format(imp_proj_dir))
                    else :
                        log.colored(log.GREEN, '  uptodate')
                else :
                    log.colored(log.GREEN, "  '{}' is not a git repository".format(imp_proj_dir))
            else :
                log.warn("  '{}' does not exist, please run 'fips fetch'".format(imp_proj_dir))
    if success and num_imports == 0 :
        log.info('  none')

    # gather imports, this will dump warnings
    gather_imports(fips_dir, proj_dir)
Ejemplo n.º 5
0
def check_local_changes(fips_dir, proj_dir):
    """check if any imports have local, uncommitted changes"""
    log.colored(log.YELLOW, '=== local changes:')
    if util.is_valid_project_dir(proj_dir):
        dep.check_local_changes(fips_dir, proj_dir)
    else:
        log.warn('currently not in a project directory')
Ejemplo n.º 6
0
def check_fips(fips_dir):
    """check whether fips is uptodate"""
    log.colored(log.YELLOW, '=== fips:')
    if git.check_branch_out_of_sync(fips_dir, 'master'):
        log.warn("'fips' is not update, please run 'fips update fips'!")
    else:
        log.colored(log.GREEN, '  uptodate')
Ejemplo n.º 7
0
def check_imports(fips_dir, proj_dir):
    """recursively check imports"""
    log.colored(log.YELLOW, '=== imports:')
    if util.is_valid_project_dir(proj_dir):
        dep.check_imports(fips_dir, proj_dir)
    else:
        log.warn('currently not in a project directory')
Ejemplo n.º 8
0
Archivo: dep.py Proyecto: floooh/fips
def _rec_update_imports(fips_dir, proj_dir, handled) :
    """same as _rec_fetch_imports() but for updating the imported projects
    """
    ws_dir = util.get_workspace_dir(fips_dir)
    proj_name = util.get_project_name_from_dir(proj_dir)
    if proj_name not in handled :
        handled.append(proj_name)
        imports = get_imports(fips_dir, proj_dir)
        for dep in imports:
            dep_proj_name = dep
            if dep not in handled:
                dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
                log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name))
                dep_ok = False
                if os.path.isdir(dep_proj_dir) :
                    # directory did not exist, do a fresh git clone
                    dep = imports[dep_proj_name]
                    git_commit = None if 'rev' not in dep else dep['rev']
                    if git.has_local_changes(dep_proj_dir) :
                        log.warn("  '{}' has local changes, skipping...".format(dep_proj_dir))
                    else :
                        log.colored(log.BLUE, "  updating '{}'...".format(dep_proj_dir))
                        git.update(dep_proj_dir)
                        if git_commit:
                            log.colored(log.YELLOW, "=== revision: '{}':".format(git_commit))
                            dep_ok = git.checkout(dep_proj_dir, git_commit)
                        else:
                            dep_ok = True
                else :
                    log.warn("  '{}' does not exist, please run 'fips fetch'".format(dep_proj_dir))
                # recuse
                if dep_ok :
                    handled = _rec_update_imports(fips_dir, dep_proj_dir, handled)
    # done, return the new handled array
    return handled
Ejemplo n.º 9
0
Archivo: dep.py Proyecto: floooh/fips
def check_local_changes(fips_dir, proj_dir) :
    """this is a variation of check_imports which just checks for local
    (uncommitted or unpushed) changes.

    :param fips_dir: absolute fips directory
    :param proj_dir: absolute project directory
    :returns:   True if checks were valid
    """
    success, imported_projects = get_all_imports_exports(fips_dir, proj_dir)
    num_imports = 0
    for imp_proj_name in imported_projects :
        imp_proj_dir = imported_projects[imp_proj_name]['proj_dir']

        # don't git-check the top-level project directory
        if imp_proj_dir != proj_dir :
            num_imports += 1
            log.info("checking '{}':".format(imp_proj_name))
            if os.path.isdir(imp_proj_dir) :
                if git.has_local_changes(imp_proj_dir) :
                    log.warn("  '{}' has local changes (uncommitted and/or unpushed)".format(imp_proj_dir))
                else :
                    log.colored(log.GREEN, '  no local changes')
            else :
                log.warn("  '{}' does not exist, please run 'fips fetch'".format(imp_proj_dir))
    if success and num_imports == 0 :
        log.info('  none')
Ejemplo n.º 10
0
def setup(fips_dir) :
    """setup the Android SDK and NDK"""
    log.colored(log.YELLOW, '=== setup Android SDK/NDK :')

    # first make sure that java is present, otherwise the Android
    # SDK setup will finish without errors, but is not actually usable
    if not java.check_exists(fips_dir) or not javac.check_exists(fips_dir) :
        log.error("please install Java JDK 8 (see './fips diag tools')")
    ensure_sdk_dirs(fips_dir)

    # download the command line tools archive
    tools_archive_path = get_tools_archive_path(fips_dir)
    tools_url = get_tools_url()
    log.info("downloading '{}'...".format(tools_url))
    urlretrieve(tools_url, tools_archive_path, util.url_download_hook)
    log.info("\nunpacking '{}'...".format(tools_archive_path))
    uncompress(fips_dir, tools_archive_path)

    # install the required SDK components through sdkmanager
    install_package(fips_dir, '"platforms;android-28"')
    install_package(fips_dir, '"build-tools;29.0.3"')
    install_package(fips_dir, 'platform-tools')
    install_package(fips_dir, 'ndk-bundle')

    # check for potentially breaking changes in build setup
    fips_cmake = fips_dir + '/cmake-toolchains/android.toolchain.orig'
    ndk_cmake = get_sdk_dir(fips_dir) + '/ndk-bundle/build/cmake/android.toolchain.cmake'
    if compute_sha256(ndk_cmake, strip_whitespace) != compute_sha256(fips_cmake, strip_whitespace) :
        log.warn('android.toolchain.cmake in fips might be outdated...')

    log.colored(log.GREEN, "done.")
Ejemplo n.º 11
0
def check_local_changes(fips_dir, proj_dir) :
    """this is a variation of check_imports which just checks for local
    (uncommitted or unpushed) changes.

    :param fips_dir: absolute fips directory
    :param proj_dir: absolute project directory
    :returns:   True if checks were valid
    """
    success, imported_projects = get_all_imports_exports(fips_dir, proj_dir)
    num_imports = 0
    for imp_proj_name in imported_projects :
        imp_proj_dir = imported_projects[imp_proj_name]['proj_dir']

        # don't git-check the top-level project directory
        if imp_proj_dir != proj_dir :
            num_imports += 1
            log.info("checking '{}':".format(imp_proj_name))
            if os.path.isdir(imp_proj_dir) :
                if git.has_local_changes(imp_proj_dir) :
                    log.warn("  '{}' has local changes (uncommitted and/or unpushed)".format(imp_proj_dir))
                else :
                    log.colored(log.GREEN, '  no local changes')
            else :
                log.warn("  '{}' does not exist, please run 'fips fetch'".format(imp_proj_dir))
    if success and num_imports == 0 :
        log.info('  none')
Ejemplo n.º 12
0
def check_imports(fips_dir, proj_dir) :
    """recursively check imports"""
    log.colored(log.YELLOW, '=== imports:')
    if util.is_valid_project_dir(proj_dir) :
        dep.check_imports(fips_dir, proj_dir)
    else :
        log.warn('currently not in a project directory')
Ejemplo n.º 13
0
Archivo: open.py Proyecto: rbxnk/fips
def run(fips_dir, proj_dir, args):
    """run the 'open' verb (opens project in IDE)"""
    if not util.is_valid_project_dir(proj_dir):
        log.error('must be run in a project directory')
    proj_name = util.get_project_name_from_dir(proj_dir)
    cfg_name = None
    if len(args) > 0:
        cfg_name = args[0]
    if not cfg_name:
        cfg_name = settings.get(proj_dir, 'config')

    # check the cmake generator of this config
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs:
        # hmm, only look at first match, 'open' doesn't
        # make sense with config-patterns
        cfg = configs[0]

        # find build dir, if it doesn't exist, generate it
        build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
        if not os.path.isdir(build_dir):
            log.warn("build dir not found, generating...")
            project.gen(fips_dir, proj_dir, cfg['name'])

        # first check if this is a VSCode project
        if cfg['build_tool'] == 'vscode_cmake':
            vscode.run(proj_dir)
            return
        # check if this is a CLion project
        if cfg['build_tool'] == 'clion':
            clion.run(proj_dir)
            return
        # try to open as Xcode project
        proj = glob.glob(build_dir + '/*.xcodeproj')
        if proj:
            subprocess.call('open "{}"'.format(proj[0]), shell=True)
            return
        # try to open as VS project
        proj = glob.glob(build_dir + '/*.sln')
        if proj:
            subprocess.call('cmd /c start {}'.format(proj[0]), shell=True)
            return
        # try to open as eclipse project
        proj = glob.glob(build_dir + '/.cproject')
        if proj:
            subprocess.call(
                'eclipse -nosplash --launcher.timeout 60 -application org.eclipse.cdt.managedbuilder.core.headlessbuild -import "{}"'
                .format(build_dir),
                shell=True)
            subprocess.call('eclipse', shell=True)
            return

        log.error("don't know how to open a '{}' project in {}".format(
            cfg['generator'], build_dir))
    else:
        log.error("config '{}' not found".format(cfg_name))
Ejemplo n.º 14
0
def uninstall(fips_dir):
    log.colored(log.YELLOW, "=== uninstalling WASI SDK")
    if sdk_dir_exists(fips_dir):
        sdk_dir = get_sdk_dir(fips_dir)
        if util.confirm(log.RED + "Delete directory '{}?".format(sdk_dir) +
                        log.DEF):
            log.info("Deleting '{}'...".format(sdk_dir))
            shutil.rmtree(sdk_dir)
        else:
            log.info("'No' selected, nothing deleted")
    else:
        log.warn("WASI SDK is not installed, nothing to do")
Ejemplo n.º 15
0
def run(fips_dir, proj_dir, args):
    if len(args) > 0 and args[0] == 'fips':
        if git.has_local_changes(fips_dir):
            log.warn("  '{}' has local changes, skipping...".format(fips_dir))
        else:
            log.colored(log.BLUE, "  updating '{}'...".format(fips_dir))
            git.update(fips_dir)
    else:
        if len(args) > 0:
            proj_name = args[0]
            proj_dir = util.get_project_dir(fips_dir, proj_name)
        dep.update_imports(fips_dir, proj_dir)
Ejemplo n.º 16
0
Archivo: open.py Proyecto: floooh/fips
def run(fips_dir, proj_dir, args) :
    """run the 'open' verb (opens project in IDE)"""
    if not util.is_valid_project_dir(proj_dir) :
        log.error('must be run in a project directory')
    proj_name = util.get_project_name_from_dir(proj_dir)
    cfg_name = None
    if len(args) > 0 :
        cfg_name = args[0]
    if not cfg_name :
        cfg_name = settings.get(proj_dir, 'config')

    # check the cmake generator of this config
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs :
        # hmm, only look at first match, 'open' doesn't
        # make sense with config-patterns
        cfg = configs[0]

        # find build dir, if it doesn't exist, generate it
        build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
        if not os.path.isdir(build_dir) :
            log.warn("build dir not found, generating...")
            project.gen(fips_dir, proj_dir, cfg['name'])

        # first check if this is a VSCode project
        if cfg['build_tool'] == 'vscode_cmake':
            vscode.run(proj_dir)
            return
        # check if this is a CLion project
        if cfg['build_tool'] == 'clion':
            clion.run(proj_dir)
            return
        # try to open as Xcode project
        proj = glob.glob(build_dir + '/*.xcodeproj')
        if proj :
            subprocess.call('open "{}"'.format(proj[0]), shell=True)
            return
        # try to open as VS project
        proj = glob.glob(build_dir + '/*.sln')
        if proj :
            subprocess.call('cmd /c start {}'.format(proj[0]), shell=True)
            return
        # try to open as eclipse project
        proj = glob.glob(build_dir + '/.cproject')
        if proj :
            subprocess.call('eclipse -nosplash --launcher.timeout 60 -application org.eclipse.cdt.managedbuilder.core.headlessbuild -import "{}"'.format(build_dir), shell=True)
            subprocess.call('eclipse', shell=True)
            return

        log.error("don't know how to open a '{}' project in {}".format(cfg['generator'], build_dir))
    else :
        log.error("config '{}' not found".format(cfg_name))
Ejemplo n.º 17
0
def get_imports(fips_dir, proj_dir):
    """get the imports from the fips.yml file in proj_dir

    :param proj_dir:    the project directory
    :returns:           dictionary object with imports (can be empty)
    """
    proj_name = util.get_project_name_from_dir(proj_dir)
    imports = {}
    if util.is_valid_project_dir(proj_dir):
        dic = util.load_fips_yml(proj_dir)
        if 'imports' in dic:
            imports = dic['imports']

        # warn if this is an old-style list instead of new style dict
        if imports:
            if type(imports) is list:
                log.warn("imports in '{}/fips.yml' uses obsolete array format".
                         format(proj_dir))

                # convert old style to new dict format
                # FIXME: should be removed after a while
                new_imports = {}
                for dep in imports:
                    dep_url = registry.get_url(fips_dir, dep)
                    if not util.is_git_url(dep_url):
                        log.error(
                            "'{}' cannot be resolved into a git url (in project '{}')"
                            .format(dep_url, proj_name))
                    dep_proj_name = util.get_project_name_from_url(dep_url)
                    new_imports[dep_proj_name] = {}
                    new_imports[dep_proj_name][
                        'git'] = util.get_giturl_from_url(dep_url)
                    new_imports[dep_proj_name][
                        'branch'] = util.get_gitbranch_from_url(dep_url)
                imports = new_imports
            elif type(imports) is dict:
                for dep in imports:
                    if not 'branch' in imports[dep]:
                        imports[dep]['branch'] = 'master'
                    if not 'cond' in imports[dep]:
                        imports[dep]['cond'] = None
                    if not 'git' in imports[dep]:
                        log.error(
                            "no git URL in import '{}' in '{}/fips.yml'!\n".
                            format(dep, proj_dir))
                    if not 'group' in imports[dep]:
                        imports[dep]['group'] = None
            else:
                log.error(
                    "imports in '{}/fips.yml' must be a dictionary!".format(
                        proj_dir))
    return imports
Ejemplo n.º 18
0
def uninstall(fips_dir):
    emsdk_dir = get_emsdk_dir(fips_dir)
    log.colored(log.YELLOW, "=== uninstalling emscripten SDK".format(emsdk_dir))
    # check for any "old" SDK installation
    remove_old_sdks(fips_dir)
    if emsdk_dir_exists(fips_dir):
        if util.confirm(log.RED + "Delete emsdk directory at '{}'?".format(emsdk_dir) + log.DEF):
            log.info("Deleting '{}'...".format(emsdk_dir))
            shutil.rmtree(emsdk_dir, onerror=remove_readonly)
        else:
            log.info("'No' selected, nothing deleted")
    else:
        log.warn('emscripten SDK is not installed, nothing to do')
Ejemplo n.º 19
0
def check_branch_out_of_sync(proj_dir, branch) :
    """check if a single branch is out of sync with remote repo"""
    check_exists_with_error()

    out_of_sync = False
    remote_branches = get_branches(proj_dir)
    local_rev = get_local_rev(proj_dir, branch)
    if branch in remote_branches :
        remote_rev = get_remote_rev(proj_dir, remote_branches[branch])
        out_of_sync = remote_rev != local_rev
    else :
        log.warn("'{}' no remote branch found for '{}'".format(proj_dir, branch))

    return out_of_sync
Ejemplo n.º 20
0
def update(proj_dir):
    """runs a git pull --rebase && git submodule update --recursive on the
    provided git repo, but only if the repo has no local changes

    :param proj_dir:    a git repo dir
    """
    check_exists_with_error()
    if not has_local_changes(proj_dir):
        subprocess.call('git pull', cwd=proj_dir, shell=True)
        update_submodule(proj_dir)
        return True
    else:
        log.warn('skipping {}, uncommitted or unpushed changes!'.format(proj_dir))
        return False
Ejemplo n.º 21
0
def is_valid_project_dir(proj_dir):
    """test if the provided directory is a valid fips project (has a
    fips.yml file)

    :param proj_dir:    absolute project directory to check
    :returns:           True if a valid fips project
    """
    if os.path.isdir(proj_dir):
        if not os.path.isfile(proj_dir + '/fips.yml'):
            return False
        return True
    else:
        log.warn("project dir '{}' does not exist".format(proj_dir))
        return False
Ejemplo n.º 22
0
Archivo: git.py Proyecto: floooh/fips
def update(proj_dir):
    """runs a git pull --rebase && git submodule update --recursive on the
    provided git repo, but only if the repo has no local changes

    :param proj_dir:    a git repo dir
    """
    check_exists_with_error()
    if not has_local_changes(proj_dir):
        subprocess.call('git pull', cwd=proj_dir, shell=True)
        update_submodule(proj_dir)
        return True
    else:
        log.warn('skipping {}, uncommitted or unpushed changes!'.format(proj_dir))
        return False
Ejemplo n.º 23
0
Archivo: git.py Proyecto: floooh/fips
def check_branch_out_of_sync(proj_dir, branch) :
    """check if a single branch is out of sync with remote repo"""
    check_exists_with_error()

    out_of_sync = False
    remote_branches = get_branches(proj_dir)
    local_rev = get_local_rev(proj_dir, branch)
    if branch in remote_branches :
        remote_rev = get_remote_rev(proj_dir, remote_branches[branch])
        out_of_sync = remote_rev != local_rev
    else :
        log.warn("'{}' no remote branch found for '{}'".format(proj_dir, branch))

    return out_of_sync
Ejemplo n.º 24
0
def uninstall(fips_dir):
    log.colored(log.YELLOW,
                "=== uninstalling Dawn SDK".format(get_sdk_dir(fips_dir)))
    sdk_dir = get_sdk_dir(fips_dir)
    if os.path.isdir(sdk_dir):
        if util.confirm(log.RED +
                        "Delete Dawn SDK directory at '{}'?".format(sdk_dir) +
                        log.DEF):
            log.info("Deleting '{}'...".format(sdk_dir))
            shutil.rmtree(sdk_dir)
        else:
            log.info("'No' selected, nothing deleted")
    else:
        log.warn('Dawn SDK is not installed, nothing to do')
Ejemplo n.º 25
0
def check_branch_out_of_sync(proj_dir, branch) :
    """check if a single branch is out of sync with remote repo"""
    if not check_exists() :
        log.error("git not found, please run and fix './fips diag tools'")
        return False

    out_of_sync = False
    remote_branches = get_branches(proj_dir)
    local_rev = get_local_rev(proj_dir, branch)
    if branch in remote_branches :
        remote_rev = get_remote_rev(proj_dir, remote_branches[branch])
        out_of_sync = remote_rev != local_rev
    else :
        log.warn("'{}' no remote branch found for '{}'".format(proj_dir, branch))

    return out_of_sync
Ejemplo n.º 26
0
Archivo: git.py Proyecto: XoDeR/RE
def check_branch_out_of_sync(proj_dir, branch):
    """check if a single branch is out of sync with remote repo"""
    if not check_exists():
        log.error("git not found, please run and fix './fips diag tools'")
        return False

    out_of_sync = False
    remote_branches = get_branches(proj_dir)
    local_rev = get_local_rev(proj_dir, branch)
    if branch in remote_branches:
        remote_rev = get_remote_rev(proj_dir, remote_branches[branch])
        out_of_sync = remote_rev != local_rev
    else:
        log.warn("'{}' no remote branch found for '{}'".format(
            proj_dir, branch))

    return out_of_sync
Ejemplo n.º 27
0
def write_code_workspace_file(fips_dir, proj_dir, vscode_dir, cfg):
    '''write a multiroot-workspace config file'''
    ws = {
        'folders': [],
        'settings': {}
    }
    # fetch all project dependencies
    success, impex = dep.get_all_imports_exports(fips_dir, proj_dir)
    if not success :
        log.warn("missing import project directories, please run 'fips fetch'")
    # add dependencies in reverse order, so that main project is first
    for dep_proj_name in reversed(impex):
        dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
        ws['folders'].append({ 'path': dep_proj_dir })
    proj_name = util.get_project_name_from_dir(proj_dir)
    with open('{}/{}.code-workspace'.format(vscode_dir, proj_name), 'w') as f:
        json.dump(ws, f, indent=1, separators=(',',':'))
Ejemplo n.º 28
0
def get_config_dirs(fips_dir, proj_dir):
    """return list of config directories, including all imports

    :param fips_dir: absolute fips directory
    :param proj_dir: absolute project directory
    :returns:        list of all directories with config files
    """
    dirs = [fips_dir + '/configs']
    if fips_dir != proj_dir:
        success, result = dep.get_all_imports_exports(fips_dir, proj_dir)
        if success:
            for dep_proj_name in result:
                dep_proj_dir = result[dep_proj_name]['proj_dir']
                dep_configs_dir = dep_proj_dir + '/fips-configs'
                if os.path.isdir(dep_configs_dir):
                    dirs.append(dep_configs_dir)
        else:
            log.warn("missing import directories, please run 'fips fetch'")
    return dirs
Ejemplo n.º 29
0
Archivo: dep.py Proyecto: floooh/fips
def get_imports(fips_dir, proj_dir) :
    """get the imports from the fips.yml file in proj_dir

    :param proj_dir:    the project directory
    :returns:           dictionary object with imports (can be empty)
    """
    proj_name = util.get_project_name_from_dir(proj_dir)
    imports = {}
    if util.is_valid_project_dir(proj_dir) :
        dic = util.load_fips_yml(proj_dir)
        if 'imports' in dic :
            imports = dic['imports']

        # warn if this is an old-style list instead of new style dict
        if imports :
            if type(imports) is list :
                log.warn("imports in '{}/fips.yml' uses obsolete array format".format(proj_dir))
                
                # convert old style to new dict format
                # FIXME: should be removed after a while
                new_imports = {}
                for dep in imports :
                    dep_url = registry.get_url(fips_dir, dep)
                    if not util.is_git_url(dep_url) :
                        log.error("'{}' cannot be resolved into a git url (in project '{}')".format(dep_url, proj_name))
                    dep_proj_name = util.get_project_name_from_url(dep_url)
                    new_imports[dep_proj_name] = {}
                    new_imports[dep_proj_name]['git']    = util.get_giturl_from_url(dep_url)
                    new_imports[dep_proj_name]['branch'] = util.get_gitbranch_from_url(dep_url)
                imports = new_imports
            elif type(imports) is dict :
                for dep in imports :
                    if not 'branch' in imports[dep] :
                        imports[dep]['branch'] = 'master'
                    if not 'cond' in imports[dep] :
                        imports[dep]['cond'] = None
                    if not 'git' in imports[dep] :
                        log.error("no git URL in import '{}' in '{}/fips.yml'!\n".format(dep, proj_dir))
                    if not 'group' in imports[dep] :
                        imports[dep]['group'] = None
            else :
                log.error("imports in '{}/fips.yml' must be a dictionary!".format(proj_dir))
    return imports
Ejemplo n.º 30
0
Archivo: fips.py Proyecto: yazici/fips
def run(fips_path, proj_path, args) :
    fips_path = util.fix_path(fips_path)
    proj_path = util.fix_path(proj_path)
    if ' ' in proj_path:
        log.warn("whitespace in project path detected, fips will not work correctly")
    verb.import_verbs(fips_path, proj_path)
    if len(args) <= 1:
        print("run 'fips help' for more info")
    else :
        verb_name = args[1]
        verb_args = args[2:]
        if verb_name in ['help', '--help', '-help'] :
            show_help(verb_args)
        elif verb_name == '--version' :
            log.info(VERSION)
        elif verb_name in verb.verbs :
            verb.verbs[verb_name].run(fips_path, proj_path, verb_args)
        else :
            log.error("unknown verb '{}'".format(verb_name))
Ejemplo n.º 31
0
def get_config_dirs(fips_dir, proj_dir) :
    """return list of config directories, including all imports

    :param fips_dir: absolute fips directory
    :param proj_dir: absolute project directory
    :returns:        list of all directories with config files
    """
    dirs = [ fips_dir + '/configs' ]
    if fips_dir != proj_dir :
        success, result = dep.get_all_imports_exports(fips_dir, proj_dir)
        if success :
            for dep_proj_name in result :
                dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
                dep_configs_dir = dep_proj_dir + '/fips-configs'
                if os.path.isdir(dep_configs_dir) :
                    dirs.append(dep_configs_dir)
        else :
            log.warn("missing import directories, please run 'fips fetch'")
    return dirs
Ejemplo n.º 32
0
Archivo: list.py Proyecto: yazici/fips
def list_exports(fips_dir, proj_dir):
    """list project exports"""
    log.colored(log.YELLOW, '=== exports:')
    if util.is_valid_project_dir(proj_dir):
        success, result = dep.get_all_imports_exports(fips_dir, proj_dir)
        if not success:
            log.warn(
                "missing import project directories, please un 'fips fetch'")
        for dep_proj_name in result:
            cur_dep = result[dep_proj_name]
            log.colored(log.BLUE,
                        "project '{}' exports:".format(dep_proj_name))

            cur_modules = cur_dep['exports']['modules']
            cur_hdrs = cur_dep['exports']['header-dirs']
            cur_libs = cur_dep['exports']['lib-dirs']
            cur_defs = cur_dep['exports']['defines']

            if not (cur_modules or cur_hdrs or cur_libs or cur_defs):
                log.info("    nothing")

            if cur_modules:
                log.info("  modules:")
                for mod in cur_modules:
                    log.info("    {} => {}".format(mod, cur_modules[mod]))

            if cur_hdrs:
                log.info("  header search dirs:")
                for hdr in cur_hdrs:
                    log.info("    {}".format(hdr))

            if cur_libs:
                log.info("  lib search dirs:")
                for lib in cur_libs:
                    log.info("    {}".format(lib))

            if cur_defs:
                log.info("  defines:")
                for define in cur_defs:
                    log.info("    {} => {}".format(define, cur_defs[define]))
    else:
        log.info('  currently not in a valid project directory')
Ejemplo n.º 33
0
def list_imports(fips_dir, proj_dir) :
    """list project imports"""
    log.colored(log.YELLOW, '=== imports:')
    if util.is_valid_project_dir(proj_dir) :
        success, result = dep.get_all_imports_exports(fips_dir, proj_dir)
        if not success :
            log.warn("missing import project directories, please run 'fips fetch'")
        for dep_proj_name in result :
            # top level project is in result, but has no URL set, filter
            # this from the output
            log.colored(log.BLUE, "project '{}' imports:".format(dep_proj_name))
            cur_dep = result[dep_proj_name]
            if cur_dep['imports'] :
                for imp_proj in cur_dep['imports'] :
                    git_url = cur_dep['imports'][imp_proj]['git']
                    git_branch = cur_dep['imports'][imp_proj]['branch']
                    log.info("  '{}' from '{}' at branch '{}'".format(imp_proj, git_url, git_branch))
            else :
                log.info("    nothing")
    else :
        log.info('  currently not in a valid project directory')
Ejemplo n.º 34
0
def list_imports(fips_dir, proj_dir) :
    """list project imports"""
    log.colored(log.YELLOW, '=== imports:')
    if util.is_valid_project_dir(proj_dir) :
        success, result = dep.get_all_imports_exports(fips_dir, proj_dir)
        if not success :
            log.warn("missing import project directories, please run 'fips fetch'")
        for dep_proj_name in result :
            # top level project is in result, but has no URL set, filter
            # this from the output
            log.colored(log.BLUE, "project '{}' imports:".format(dep_proj_name))
            cur_dep = result[dep_proj_name]
            if cur_dep['imports'] :
                for imp_proj in cur_dep['imports'] :
                    git_url = cur_dep['imports'][imp_proj]['git']
                    git_branch = cur_dep['imports'][imp_proj]['branch']
                    log.info("  '{}' from '{}' at branch '{}'".format(imp_proj, git_url, git_branch))
            else :
                log.info("    nothing")
    else :
        log.info('  currently not in a valid project directory')
Ejemplo n.º 35
0
def list_exports(fips_dir, proj_dir) :
    """list project exports"""
    log.colored(log.YELLOW, '=== exports:')
    if util.is_valid_project_dir(proj_dir) :
        success, result = dep.get_all_imports_exports(fips_dir, proj_dir)
        if not success :
            log.warn("missing import project directories, please un 'fips fetch'")
        for dep_proj_name in result :
            cur_dep = result[dep_proj_name]
            log.colored(log.BLUE, "project '{}' exports:".format(dep_proj_name))
            
            cur_modules = cur_dep['exports']['modules']
            cur_hdrs = cur_dep['exports']['header-dirs']
            cur_libs = cur_dep['exports']['lib-dirs']
            cur_defs = cur_dep['exports']['defines']

            if not (cur_modules or cur_hdrs or cur_libs or cur_defs) :
                log.info("    nothing")

            if cur_modules :
                log.info("  modules:")
                for mod in cur_modules :
                    log.info("    {} => {}".format(mod, cur_modules[mod]))

            if cur_hdrs :
                log.info("  header search dirs:")
                for hdr in cur_hdrs :
                    log.info("    {}".format(hdr))

            if cur_libs :
                log.info("  lib search dirs:")
                for lib in cur_libs :
                    log.info("    {}".format(lib))

            if cur_defs :
                log.info("  defines:")
                for define in cur_defs :
                    log.info("    {} => {}".format(define, cur_defs[define]))
    else :
        log.info('  currently not in a valid project directory')
Ejemplo n.º 36
0
def cleanup(fips_dir, proj_dir):
    '''goes through all dependencies and deletes the .vscode directory'''
    # fetch all project dependencies
    success, impex = dep.get_all_imports_exports(fips_dir, proj_dir)
    if not success :
        log.warn("missing import project directories, please run 'fips fetch'")
    log.info(log.RED + 'Please confirm to delete the following directories:' + log.DEF)
    for dep_proj_name in reversed(impex):
        dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
        vscode_dir = dep_proj_dir + '/.vscode/'
        if os.path.isdir(vscode_dir):
            log.info('  {}'.format(vscode_dir))
    if util.confirm(log.RED + 'Delete those directories?' + log.DEF):
        for dep_proj_name in reversed(impex):
            dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
            vscode_dir = dep_proj_dir + '/.vscode/'
            if os.path.isdir(vscode_dir):
                log.info('  deleting {}'.format(vscode_dir))
                shutil.rmtree(vscode_dir)
        log.info('Done.')
    else:
        log.info('Nothing deleted, done.')
Ejemplo n.º 37
0
def write_workspace_settings(fips_dir, proj_dir, cfg, proj_settings):
    '''write the VSCode launch.json, tasks.json and
    c_cpp_properties.json files from cmake output files
    '''
    log.info("=== writing Visual Studio Code config files...")
    vscode_dir = proj_dir + '/.vscode'
    if not os.path.isdir(vscode_dir):
        os.makedirs(vscode_dir)
    # fetch all project dependencies
    success, impex = dep.get_all_imports_exports(fips_dir, proj_dir)
    if not success :
        log.warn("missing import project directories, please run 'fips fetch'")
    vscode_extensions = list_extensions()
    has_cmake_tools = any('vector-of-bool.cmake-tools' in ext for ext in vscode_extensions)
    remove_vscode_tasks_launch_files(fips_dir, proj_dir, impex, cfg)
    write_tasks_json(fips_dir, proj_dir, vscode_dir, cfg)
    write_launch_json(fips_dir, proj_dir, vscode_dir, cfg, proj_settings)
    if has_cmake_tools:
        write_cmake_tools_settings(fips_dir, proj_dir, vscode_dir, cfg)
    else:
        write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg)
    write_code_workspace_file(fips_dir, proj_dir, impex, cfg)
Ejemplo n.º 38
0
def write_workspace_settings(fips_dir, proj_dir, cfg):
    '''write the VSCode launch.json, tasks.json and
    c_cpp_properties.json files from cmake output files
    '''
    log.info("=== writing Visual Studio Code config files...")
    vscode_dir = proj_dir + '/.vscode'
    if not os.path.isdir(vscode_dir):
        os.makedirs(vscode_dir)
    # fetch all project dependencies
    success, impex = dep.get_all_imports_exports(fips_dir, proj_dir)
    if not success :
        log.warn("missing import project directories, please run 'fips fetch'")
    vscode_extensions = list_extensions()
    has_cmake_tools = any('vector-of-bool.cmake-tools' in ext for ext in vscode_extensions)
    remove_vscode_tasks_launch_files(fips_dir, proj_dir, impex, cfg)
    write_tasks_json(fips_dir, proj_dir, vscode_dir, cfg)
    write_launch_json(fips_dir, proj_dir, vscode_dir, cfg)
    if has_cmake_tools:
        write_cmake_tools_settings(fips_dir, proj_dir, vscode_dir, cfg)
    else:
        write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg)
    write_code_workspace_file(fips_dir, proj_dir, impex, cfg)
Ejemplo n.º 39
0
def cleanup(fips_dir, proj_dir):
    '''goes through all dependencies and deletes the .vscode directory'''
    # fetch all project dependencies
    success, impex = dep.get_all_imports_exports(fips_dir, proj_dir)
    if not success :
        log.warn("missing import project directories, please run 'fips fetch'")
    log.info(log.RED + 'Please confirm to delete the following directories:' + log.DEF)
    for dep_proj_name in reversed(impex):
        dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
        vscode_dir = dep_proj_dir + '/.vscode/'
        if os.path.isdir(vscode_dir):
            log.info('  {}'.format(vscode_dir))
    if util.confirm(log.RED + 'Delete those directories?' + log.DEF):
        for dep_proj_name in reversed(impex):
            dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
            vscode_dir = dep_proj_dir + '/.vscode/'
            if os.path.isdir(vscode_dir):
                log.info('  deleting {}'.format(vscode_dir))
                shutil.rmtree(vscode_dir)
        log.info('Done.')
    else:
        log.info('Nothing deleted, done.')
Ejemplo n.º 40
0
def run(fips_dir, proj_dir, args):
    """run the 'open' verb (opens project in IDE)"""
    if not util.is_valid_project_dir(proj_dir):
        log.error('must be run in a project directory')
    proj_name = util.get_project_name_from_dir(proj_dir)
    cfg_name = None
    if len(args) > 0:
        cfg_name = args[0]
    if not cfg_name:
        cfg_name = settings.get(proj_dir, 'config')

    # check the cmake generator of this config
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs:
        # hmm, only look at first match, 'open' doesn't
        # make sense with config-patterns
        cfg = configs[0]

        # find build dir, if it doesn't exist, generate it
        build_dir = util.get_build_dir(fips_dir, proj_name, cfg)
        if not os.path.isdir(build_dir):
            log.warn("build dir not found, generating...")
            project.gen(fips_dir, proj_dir, cfg['name'])

        if 'Xcode' in cfg['generator']:
            # find the Xcode project
            proj = glob.glob(build_dir + '/*.xcodeproj')
            subprocess.call(['open', proj[0]])
        elif 'Visual Studio' in cfg['generator']:
            # find the VisualStudio project file
            proj = glob.glob(build_dir + '/*.sln')
            subprocess.call(['cmd', '/c', 'start', proj[0]])
        else:
            log.error("don't know how to open a '{}' project".format(
                cfg['generator']))
    else:
        log.error("config '{}' not found".format(cfg_name))
Ejemplo n.º 41
0
def run(fips_dir, proj_dir, args) :
    """run the 'open' verb (opens project in IDE)"""
    if not util.is_valid_project_dir(proj_dir) :
        log.error('must be run in a project directory')
    proj_name = util.get_project_name_from_dir(proj_dir)
    cfg_name = None
    if len(args) > 0 :
        cfg_name = args[0]
    if not cfg_name :
        cfg_name = settings.get(proj_dir, 'config')
        
    # check the cmake generator of this config
    configs = config.load(fips_dir, proj_dir, cfg_name)
    if configs :
        # hmm, only look at first match, 'open' doesn't
        # make sense with config-patterns
        cfg = configs[0]

        # find build dir, if it doesn't exist, generate it
        build_dir = util.get_build_dir(fips_dir, proj_name, cfg)
        if not os.path.isdir(build_dir) :
            log.warn("build dir not found, generating...")
            project.gen(fips_dir, proj_dir, cfg['name'])
        
        # try to open as Xcode project
        proj = glob.glob(build_dir + '/*.xcodeproj')
        if proj :
            subprocess.call('open {}'.format(proj[0]), shell=True)
        else :
            # try to open as VS project
            proj = glob.glob(build_dir + '/*.sln')
            if proj :
                subprocess.call('cmd /c start {}'.format(proj[0]), shell=True)
            else :
                log.error("don't know how to open a '{}' project".format(cfg['generator']))
    else :
        log.error("config '{}' not found".format(cfg_name))
Ejemplo n.º 42
0
def check_out_of_sync(proj_dir) :
    """check through all branches of the git repo in proj_dir and
    returns an array of all branches that are out-of-sync with their
    remote branches (either have unpushed local changes, or un-pulled
    remote changes)

    :param proj_dir:    a git repo directory
    :returns:           array with branch names that are out-of-sync
    """
    if not check_exists() :
        log.error("git not found, please run and fix './fips diag tools'")
        return False

    out_of_sync = False

    # first check whether there are uncommitted changes
    status, status_output = has_uncommitted_files(proj_dir)
    if status :
        out_of_sync = True
        log.warn("'{}' has uncommitted changes:".format(proj_dir))
        log.info(status_output)

    # check whether local and remote branch are out of sync
    branches_out_of_sync = False
    branches = get_branches(proj_dir)
    if not branches :
        log.warn("'{}' no remote branches found".format(proj_dir))
    for local_branch in branches :
        remote_branch = branches[local_branch]
        remote_rev = get_remote_rev(proj_dir, remote_branch)

        # remote_rev can be None if the remote branch doesn't exists,
        # this is not an error
        if remote_rev :
            local_rev = get_local_rev(proj_dir, local_branch)
            if remote_rev != local_rev :
                out_of_sync = True
                if not branches_out_of_sync:
                    # only show this once
                    log.warn("'{}' branches out of sync:".format(proj_dir))
                    branches_out_of_sync = True
                log.info("  {}: {}".format(local_branch, local_rev))
                log.info("  {}: {}".format(remote_branch, remote_rev))
                    
    return out_of_sync
Ejemplo n.º 43
0
Archivo: git.py Proyecto: XoDeR/RE
def check_out_of_sync(proj_dir):
    """check through all branches of the git repo in proj_dir and
    returns an array of all branches that are out-of-sync with their
    remote branches (either have unpushed local changes, or un-pulled
    remote changes)

    :param proj_dir:    a git repo directory
    :returns:           array with branch names that are out-of-sync
    """
    if not check_exists():
        log.error("git not found, please run and fix './fips diag tools'")
        return False

    out_of_sync = False

    # first check whether there are uncommitted changes
    status, status_output = has_uncommitted_files(proj_dir)
    if status:
        out_of_sync = True
        log.warn("'{}' has uncommitted changes:".format(proj_dir))
        log.info(status_output)

    # check whether local and remote branch are out of sync
    branches_out_of_sync = False
    branches = get_branches(proj_dir)
    if not branches:
        log.warn("'{}' no remote branches found".format(proj_dir))
    for local_branch in branches:
        remote_branch = branches[local_branch]
        remote_rev = get_remote_rev(proj_dir, remote_branch)

        # remote_rev can be None if the remote branch doesn't exists,
        # this is not an error
        if remote_rev:
            local_rev = get_local_rev(proj_dir, local_branch)
            if remote_rev != local_rev:
                out_of_sync = True
                if not branches_out_of_sync:
                    # only show this once
                    log.warn("'{}' branches out of sync:".format(proj_dir))
                    branches_out_of_sync = True
                log.info("  {}: {}".format(local_branch, local_rev))
                log.info("  {}: {}".format(remote_branch, remote_rev))

    return out_of_sync
Ejemplo n.º 44
0
def gather_imports(fips_dir, proj_dir) :
    """resolve imports of proj_dir and returns a big dictionary
    with all imported data, which can then be written with write_imports()

    :param fips_dir:    absolute fips directory
    :param proj_dir:    absolute project directory
    :param dry_run:     if true perform a dry run (used by 'fips diag')
    :returns:           a big dictionary which can be written with write_imports()
                        or None on error
    """
    imported = OrderedDict()
    ws_dir = util.get_workspace_dir(fips_dir)
    success, deps = get_all_imports_exports(fips_dir, proj_dir)

    unique_defines = {}
    unique_modules = {}
    
    if success :
        
        # for each project:
        for proj_name in deps :

            imports = deps[proj_name]['imports']
            exports = deps[proj_name]['exports']
            # for each imported project:
            for imp_proj_name in imports :
                
                imported[imp_proj_name] = {}
                imported[imp_proj_name]['modules'] = OrderedDict()
                imported[imp_proj_name]['hdrdirs'] = []
                imported[imp_proj_name]['libdirs'] = []
                imported[imp_proj_name]['defines'] = {}

                # add header search paths
                for imp_hdr in deps[imp_proj_name]['exports']['header-dirs'] :
                    hdr_path = '{}/{}/{}'.format(ws_dir, imp_proj_name, imp_hdr)
                    if not os.path.isdir(hdr_path) :
                        log.warn("header search path '{}' not found in project '{}'".format(hdr_path, imp_proj_name))
                    imported[imp_proj_name]['hdrdirs'].append(hdr_path)

                # add lib search paths
                for imp_lib in deps[imp_proj_name]['exports']['lib-dirs'] :
                    lib_path = '{}/{}/{}'.format(ws_dir, imp_proj_name, imp_lib)
                    if not os.path.isdir(lib_path) :
                        log.warn("lib search path '{}' not found in project '{}'".format(lib_path, imp_proj_name))
                    imported[imp_proj_name]['libdirs'].append(lib_path)

                # add defines
                for imp_def in deps[imp_proj_name]['exports']['defines'] :
                    # hmm, no check whether this define collides with an earlier define...
                    value = deps[imp_proj_name]['exports']['defines'][imp_def]
                    imported[imp_proj_name]['defines'][imp_def] = value
                    if imp_def in unique_defines :
                        if unique_defines[imp_def] != value :
                            log.warn("C define collision: '{}={}' in '{}' collides with '{}={}' in earlier import".format(
                                imp_def, value, imp_proj_name, imp_def, unique_defines[define]))
                        unique_defines[imp_def] = value

                # for each imported module:
                for imp_mod in deps[imp_proj_name]['exports']['modules'] :
                    imp_mod_src = deps[imp_proj_name]['exports']['modules'][imp_mod]
                    # import module source directory (where module's CMakeLists.txt is)
                    src_dir = '{}/{}/{}'.format(ws_dir, imp_proj_name, imp_mod_src)
                    # cmake build subdirectory
                    build_dir = '{}_{}'.format(imp_proj_name, imp_mod)
                    imported[imp_proj_name]['modules'][src_dir] = build_dir
                    if imp_mod in unique_modules :
                        if unique_modules[imp_mod] != src_dir :
                            log.warn("Import module '{}=>{}' in '{}' collides with '{}=>{}' in earlier import".format(
                                imp_mod, src_dir, imp_proj_name, imp_mod, unique_modules[imp_mod]))
                        unique_modules[imp_mod] = src_dir

        # NOTE: return the imports in reversed order, so that for instance header paths
        # are defined before the modules that need the header paths
        reverseImported = OrderedDict()
        for entry in reversed(imported) :
            reverseImported[entry] = imported[entry]
        return reverseImported

    else :
        log.warn("imports are incomplete, please run 'fips fetch'")
        return None
Ejemplo n.º 45
0
def deploy_webpage(fips_dir, proj_dir, webpage_dir):
    wasm_deploy_dir = util.get_deploy_dir(fips_dir, 'sokol-samples',
                                          BuildConfig)

    # build the thumbnail gallery
    content = ''
    for sample in samples:
        name = sample[0]
        log.info('> adding thumbnail for {}'.format(name))
        url = "{}-sapp.html".format(name)
        ui_url = "{}-sapp-ui.html".format(name)
        img_name = name + '.jpg'
        img_path = proj_dir + '/webpage/' + img_name
        if not os.path.exists(img_path):
            img_name = 'dummy.jpg'
            img_path = proj_dir + 'webpage/dummy.jpg'
        content += '<div class="thumb">'
        content += '  <div class="thumb-title">{}</div>'.format(name)
        if os.path.exists(wasm_deploy_dir + '/' + name + '-sapp-ui.js'):
            content += '<a class="img-btn-link" href="{}"><div class="img-btn">UI</div></a>'.format(
                ui_url)
        content += '  <div class="img-frame"><a href="{}"><img class="image" src="{}"></img></a></div>'.format(
            url, img_name)
        content += '</div>\n'

    # populate the html template, and write to the build directory
    with open(proj_dir + '/webpage/index.html', 'r') as f:
        templ = Template(f.read())
    html = templ.safe_substitute(samples=content)
    with open(webpage_dir + '/index.html', 'w') as f:
        f.write(html)

    # copy other required files
    for name in ['dummy.jpg', 'favicon.png']:
        log.info('> copy file: {}'.format(name))
        shutil.copy(proj_dir + '/webpage/' + name, webpage_dir + '/' + name)

    # generate WebAssembly HTML pages
    for sample in samples:
        name = sample[0]
        source = sample[1]
        glsl = sample[2]
        log.info('> generate wasm HTML page: {}'.format(name))
        for postfix in ['sapp', 'sapp-ui']:
            for ext in ['wasm', 'js']:
                src_path = '{}/{}-{}.{}'.format(wasm_deploy_dir, name, postfix,
                                                ext)
                if os.path.isfile(src_path):
                    shutil.copy(src_path, '{}/'.format(webpage_dir))
                with open(proj_dir + '/webpage/wasm.html', 'r') as f:
                    templ = Template(f.read())
                src_url = GitHubSamplesURL + source
                if glsl is None:
                    glsl_url = "."
                    glsl_hidden = "hidden"
                else:
                    glsl_url = GitHubSamplesURL + glsl
                    glsl_hidden = ""
                html = templ.safe_substitute(name=name,
                                             prog=name + '-' + postfix,
                                             source=src_url,
                                             glsl=glsl_url,
                                             hidden=glsl_hidden)
                with open('{}/{}-{}.html'.format(webpage_dir, name, postfix),
                          'w') as f:
                    f.write(html)

    # copy assets from deploy directory
    for asset in assets:
        log.info('> copy asset file: {}'.format(asset))
        src_path = '{}/{}'.format(wasm_deploy_dir, asset)
        if os.path.isfile(src_path):
            shutil.copy(src_path, webpage_dir)
        else:
            log.warn('!!! file {} not found!'.format(src_path))

    # copy the screenshots
    for sample in samples:
        img_name = sample[0] + '.jpg'
        img_path = proj_dir + '/webpage/' + img_name
        if os.path.exists(img_path):
            log.info('> copy screenshot: {}'.format(img_name))
            shutil.copy(img_path, webpage_dir + '/' + img_name)
Ejemplo n.º 46
0
def gather_imports(fips_dir, proj_dir) :
    """resolve imports of proj_dir and returns a big dictionary
    with all imported data, which can then be written with write_imports()

    :param fips_dir:    absolute fips directory
    :param proj_dir:    absolute project directory
    :param dry_run:     if true perform a dry run (used by 'fips diag')
    :returns:           a big dictionary which can be written with write_imports()
                        or None on error
    """
    imported = OrderedDict()
    ws_dir = util.get_workspace_dir(fips_dir)
    success, deps = get_all_imports_exports(fips_dir, proj_dir)

    unique_defines = {}
    unique_modules = {}
    
    if success :
        
        # for each project:
        for proj_name in deps :

            imports = deps[proj_name]['imports']
            exports = deps[proj_name]['exports']
            # for each imported project:
            for imp_proj_name in sorted(imports) :
                
                imported[imp_proj_name] = {}
                imported[imp_proj_name]['modules'] = OrderedDict()
                imported[imp_proj_name]['hdrdirs'] = []
                imported[imp_proj_name]['condhdrdirs'] = {}
                imported[imp_proj_name]['libdirs'] = []
                imported[imp_proj_name]['defines'] = {}
                imported[imp_proj_name]['cond'] = imports[imp_proj_name]['cond']
                imported[imp_proj_name]['group'] = imports[imp_proj_name]['group']

                # add header search paths
                for imp_hdr in deps[imp_proj_name]['exports']['header-dirs'] :
                    hdr_path = '{}/{}/{}'.format(ws_dir, imp_proj_name, imp_hdr)
                    hdr_path = os.path.normpath(hdr_path).replace('\\', '/')
                    if not os.path.isdir(hdr_path) :
                        log.warn("header search path '{}' not found in project '{}'".format(hdr_path, imp_proj_name))
                    imported[imp_proj_name]['hdrdirs'].append(hdr_path)

                # add conditional header search paths
                for imp_hdr in deps[imp_proj_name]['exports']['conditional-header-dirs'] :
                    if not 'path' in imp_hdr :
                        log.warn("no 'path' key in conditional-header-dirs in project {}".format(imp_proj_name))
                    elif not 'cond' in imp_hdr :
                        log.warn("no 'cond' key in conditional-header-dirs in project {}".format(imp_proj_name))
                    else :
                        hdr_path = '{}/{}/{}'.format(ws_dir, imp_proj_name, imp_hdr['path'])
                        hdr_path = os.path.normpath(hdr_path).replace('\\', '/')
                        if not os.path.isdir(hdr_path) :
                            log.warn("conditional header search path '{}' not found in project '{}'".format(hdr_path, imp_proj_name))
                        value = imp_hdr['cond']
                        imported[imp_proj_name]['condhdrdirs'][hdr_path] = value

                # add lib search paths
                for imp_lib in deps[imp_proj_name]['exports']['lib-dirs'] :
                    lib_path = '{}/{}/{}'.format(ws_dir, imp_proj_name, imp_lib)
                    lib_path = os.path.normpath(lib_path).replace('\\', '/')
                    if not os.path.isdir(lib_path) :
                        log.warn("lib search path '{}' not found in project '{}'".format(lib_path, imp_proj_name))
                    imported[imp_proj_name]['libdirs'].append(lib_path)

                # add defines
                for imp_def in deps[imp_proj_name]['exports']['defines'] :
                    # hmm, no check whether this define collides with an earlier define...
                    value = deps[imp_proj_name]['exports']['defines'][imp_def]
                    imported[imp_proj_name]['defines'][imp_def] = value
                    if imp_def in unique_defines :
                        if unique_defines[imp_def] != value :
                            log.warn("C define collision: '{}={}' in '{}' collides with '{}={}' in earlier import".format(
                                imp_def, value, imp_proj_name, imp_def, unique_defines[define]))
                        unique_defines[imp_def] = value

                # for each imported module:
                for imp_mod in deps[imp_proj_name]['exports']['modules'] :
                    imp_mod_src = deps[imp_proj_name]['exports']['modules'][imp_mod]
                    # import module source directory (where module's CMakeLists.txt is)
                    src_dir = '{}/{}/{}'.format(ws_dir, imp_proj_name, imp_mod_src)
                    # cmake build subdirectory
                    build_dir = '{}_{}'.format(imp_proj_name, imp_mod)
                    imported[imp_proj_name]['modules'][src_dir] = build_dir
                    if imp_mod in unique_modules :
                        if unique_modules[imp_mod] != src_dir :
                            log.warn("Import module '{}=>{}' in '{}' collides with '{}=>{}' in earlier import".format(
                                imp_mod, src_dir, imp_proj_name, imp_mod, unique_modules[imp_mod]))
                        unique_modules[imp_mod] = src_dir
        return imported

    else :
        log.warn("imports are incomplete, please run 'fips fetch'")
        return None