Ejemplo n.º 1
0
def pytest() -> None:
    """Run pytest with project environment set up properly."""
    import os
    import platform
    import subprocess
    from efrotools import get_config, PYTHON_BIN
    from efro.error import CleanError

    # Grab our python paths for the project and stuff them in PYTHONPATH.
    pypaths = get_config(PROJROOT).get('python_paths')
    if pypaths is None:
        raise CleanError('python_paths not found in project config.')

    separator = ';' if platform.system() == 'Windows' else ':'
    os.environ['PYTHONPATH'] = separator.join(pypaths)

    # Also tell Python interpreters not to write __pycache__ dirs everywhere
    # which can screw up our builds.
    os.environ['PYTHONDONTWRITEBYTECODE'] = '1'

    # Do the thing.
    results = subprocess.run([PYTHON_BIN, '-m', 'pytest'] + sys.argv[2:],
                             check=False)
    if results.returncode != 0:
        sys.exit(results.returncode)
Ejemplo n.º 2
0
def push_ipa(root: pathlib.Path, modename: str) -> None:
    """Construct ios IPA and push it to staging server for device testing.

    This takes some shortcuts to minimize turnaround time;
    It doesn't recreate the ipa completely each run, uses rsync
    for speedy pushes to the staging server, etc.
    The use case for this is quick build iteration on a device
    that is not physically near the build machine.
    """

    # Load both the local and project config data.
    cfg = Config(**get_config(root)['push_ipa_config'])
    lcfg = LocalConfig(**get_localconfig(root)['push_ipa_local_config'])

    if modename not in MODES:
        raise Exception('invalid mode: "' + str(modename) + '"')
    mode = MODES[modename]

    xc_build_path = pathlib.Path(root, 'tools/xc_build_path')
    xcprojpath = pathlib.Path(root, cfg.projectpath)
    app_dir = subprocess.run(
        [xc_build_path, xcprojpath, mode['configuration']],
        check=True,
        capture_output=True).stdout.decode().strip()
    built_app_path = pathlib.Path(app_dir, cfg.app_bundle_name)

    workdir = pathlib.Path(root, 'build', "push_ipa")
    workdir.mkdir(parents=True, exist_ok=True)

    pathlib.Path(root, 'build').mkdir(parents=True, exist_ok=True)
    exportoptionspath = pathlib.Path(root, workdir, 'exportoptions.plist')
    ipa_dir_path = pathlib.Path(root, workdir, 'ipa')
    ipa_dir_path.mkdir(parents=True, exist_ok=True)

    # Inject our latest build into an existing xcarchive (creating if needed).
    archivepath = _add_build_to_xcarchive(workdir, xcprojpath, built_app_path,
                                          cfg)

    # Export an IPA from said xcarchive.
    ipa_path = _export_ipa_from_xcarchive(archivepath, exportoptionspath,
                                          ipa_dir_path, cfg)

    # And lastly sync said IPA up to our staging server.
    print('Pushing to staging server...')
    sys.stdout.flush()
    subprocess.run(
        [
            'rsync', '--verbose', ipa_path, '-e',
            'ssh -oBatchMode=yes -oStrictHostKeyChecking=yes',
            f'{lcfg.sftp_host}:{lcfg.sftp_dir}'
        ],
        check=True,
    )

    print('iOS Package Updated Successfully!')
Ejemplo n.º 3
0
def sync() -> None:
    """Runs standard syncs between this project and others."""
    from efrotools import get_config
    from efrotools.sync import Mode, SyncItem, run_standard_syncs
    mode = Mode(sys.argv[2]) if len(sys.argv) > 2 else Mode.PULL

    # Load sync-items from project config and run them
    sync_items = [
        SyncItem(**i) for i in get_config(PROJROOT).get('sync_items', [])
    ]
    run_standard_syncs(PROJROOT, mode, sync_items)
Ejemplo n.º 4
0
def cpplint(projroot: Path, full: bool) -> None:
    """Run lint-checking on all code deemed lint-able."""
    from concurrent.futures import ThreadPoolExecutor
    from multiprocessing import cpu_count
    from efrotools import get_config
    from efro.terminal import Clr

    os.chdir(projroot)
    filenames = get_code_filenames(projroot)
    if any(' ' in name for name in filenames):
        raise Exception('found space in path; unexpected')

    # Check the config for a list of ones to ignore.
    code_blacklist: List[str] = get_config(projroot).get(
        'cpplint_blacklist', [])

    # Just pretend blacklisted ones don't exist.
    filenames = [f for f in filenames if f not in code_blacklist]
    filenames = [f for f in filenames if not f.endswith('.mm')]

    cachepath = Path(projroot, 'config/.cache-lintcode')
    if full and cachepath.exists():
        cachepath.unlink()

    cache = FileCache(cachepath)

    # Clear out entries and hashes for files that have changed/etc.
    cache.update(filenames, '')
    dirtyfiles = cache.get_dirty_files()

    if dirtyfiles:
        print(f'{Clr.BLU}CppLint checking'
              f' {len(dirtyfiles)} file(s)...{Clr.RST}')

    def lint_file(filename: str) -> None:
        result = subprocess.call(['cpplint', '--root=src', filename])
        if result != 0:
            raise Exception(f'Linting failed for {filename}')

    with ThreadPoolExecutor(max_workers=cpu_count()) as executor:
        # Converting this to a list will propagate any errors.
        list(executor.map(lint_file, dirtyfiles))

    if dirtyfiles:
        cache.mark_clean(filenames)
        cache.write()
    print(
        f'{Clr.GRN}CppLint: all {len(filenames)} files are passing.{Clr.RST}',
        flush=True)
Ejemplo n.º 5
0
def _filter_tool_config(cfg: str) -> str:
    import textwrap
    from efrotools import get_config

    # Stick project-root wherever they want.
    cfg = cfg.replace('__EFRO_PROJECT_ROOT__', str(PROJROOT))

    # Short project name.
    short_names = {'ballistica-internal': 'ba-int', 'ballistica': 'ba'}
    shortname = short_names.get(PROJROOT.name, PROJROOT.name)
    cfg = cfg.replace('__EFRO_PROJECT_SHORTNAME__', shortname)

    mypy_standard_settings = textwrap.dedent("""
    # We don't want all of our plain scripts complaining
    # about __main__ being redefined.
    scripts_are_modules = True

    # Try to be as strict as we can about using types everywhere.
    warn_unused_ignores = True
    warn_return_any = True
    warn_redundant_casts = True
    warn_unreachable = True
    warn_unused_configs = True
    disallow_incomplete_defs = True
    disallow_untyped_defs = True
    disallow_untyped_decorators = True
    disallow_untyped_calls = True
    disallow_any_unimported = True
    disallow_subclassing_any = True
    strict_equality = True
    local_partial_types = True
    no_implicit_reexport = True
    """).strip()

    cfg = cfg.replace('__EFRO_MYPY_STANDARD_SETTINGS__',
                      mypy_standard_settings)

    # Gen a pylint init to set up our python paths:
    pylint_init_tag = '__EFRO_PYLINT_INIT__'
    if pylint_init_tag in cfg:
        pypaths = get_config(PROJROOT).get('python_paths')
        if pypaths is None:
            raise RuntimeError('python_paths not set in project config')
        cstr = "init-hook='import sys;"
        for path in pypaths:
            cstr += f" sys.path.append('{PROJROOT}/{path}');"
        cstr += "'"
        cfg = cfg.replace(pylint_init_tag, cstr)
    return cfg
Ejemplo n.º 6
0
def get_code_filenames(projroot: Path) -> List[str]:
    """Return the list of files to lint-check or auto-formatting."""
    from efrotools import get_config
    exts = ('.h', '.c', '.cc', '.cpp', '.cxx', '.m', '.mm')
    places = get_config(projroot).get('code_source_dirs', None)
    if places is None:
        raise RuntimeError('code_source_dirs not declared in config')
    codefilenames = []
    for place in places:
        for root, _dirs, files in os.walk(place):
            for fname in files:
                if any(fname.endswith(ext) for ext in exts):
                    codefilenames.append(os.path.join(root, fname))
    codefilenames.sort()
    return codefilenames
Ejemplo n.º 7
0
def get_script_filenames(projroot: Path) -> List[str]:
    """Return the Python filenames to lint-check or auto-format."""
    from efrotools import get_config
    filenames = set()
    places = get_config(projroot).get('python_source_dirs', None)
    if places is None:
        raise RuntimeError('python_source_dirs not declared in config')
    for place in places:
        for root, _dirs, files in os.walk(place):
            for fname in files:
                fnamefull = os.path.join(root, fname)
                # Skip symlinks (we conceivably operate on the original too)
                if os.path.islink(fnamefull):
                    continue
                if _should_include_script(fnamefull):
                    filenames.add(fnamefull)
    return sorted(list(f for f in filenames if 'flycheck_' not in f))
Ejemplo n.º 8
0
def tool_config_install() -> None:
    """Install a tool config file (with some filtering)."""
    from efrotools import get_config
    import textwrap
    if len(sys.argv) != 4:
        raise Exception('expected 2 args')
    src = Path(sys.argv[2])
    dst = Path(sys.argv[3])
    with src.open() as infile:
        cfg = infile.read()

    # Do a bit of filtering.

    # Stick project-root wherever they want.
    cfg = cfg.replace('__EFRO_PROJECT_ROOT__', str(PROJROOT))

    stdsettings = textwrap.dedent("""
    # We don't want all of our plain scripts complaining
    # about __main__ being redefined.
    scripts_are_modules = True

    # Try to be as strict as we can about using types everywhere.
    warn_unused_ignores = True
    warn_return_any = True
    warn_redundant_casts = True
    warn_unreachable=True
    disallow_incomplete_defs = True
    disallow_untyped_defs = True
    disallow_untyped_decorators = True
    disallow_untyped_calls = True
    disallow_any_unimported = True
    strict_equality = True
    """).strip()

    cfg = cfg.replace('__EFRO_MYPY_STANDARD_SETTINGS__', stdsettings)

    # Gen a pylint init to set up our python paths:
    pylint_init_tag = '__EFRO_PYLINT_INIT__'
    if pylint_init_tag in cfg:
        pypaths = get_config(PROJROOT).get('python_paths')
        if pypaths is None:
            raise RuntimeError('python_paths not set in project config')
        cstr = "init-hook='import sys;"
        for path in pypaths:
            cstr += f" sys.path.append('{PROJROOT}/{path}');"
        cstr += "'"
        cfg = cfg.replace(pylint_init_tag, cstr)

    # Add an auto-generated notice.
    comment = None
    if dst.name in ['.dir-locals.el']:
        comment = ';;'
    elif dst.name in [
            '.mypy.ini', '.pycheckers', '.pylintrc', '.style.yapf',
            '.clang-format'
    ]:
        comment = '#'
    if comment is not None:
        cfg = (f'{comment} THIS FILE WAS AUTOGENERATED; DO NOT EDIT.\n'
               f'{comment} Source: {src}.\n\n' + cfg)

    with dst.open('w') as outfile:
        outfile.write(cfg)
Ejemplo n.º 9
0
def _apply_pylint_run_to_cache(projroot: Path, run: Any, dirtyfiles: List[str],
                               allfiles: List[str], cache: FileCache) -> int:
    # pylint: disable=too-many-locals
    # pylint: disable=too-many-branches
    # pylint: disable=too-many-statements
    from astroid import modutils
    from efrotools import get_config

    # First off, build a map of dirtyfiles to module names
    # (and the corresponding reverse map).
    paths_to_names: Dict[str, str] = {}
    names_to_paths: Dict[str, str] = {}
    for fname in allfiles:
        try:
            mpath = modutils.modpath_from_file(fname)
            mpath = _filter_module_name('.'.join(mpath))
            paths_to_names[fname] = mpath
        except ImportError:
            # This probably means its a tool or something not in our
            # standard path.  In this case just use its base name.
            # (seems to be what pylint does)
            dummyname = os.path.splitext(os.path.basename(fname))[0]
            paths_to_names[fname] = dummyname
    for key, val in paths_to_names.items():
        names_to_paths[val] = key

    # If there's any cyclic-import errors, just mark all deps as dirty;
    # don't want to add the logic to figure out which ones the cycles cover
    # since they all seems to appear as errors for the last file in the list.
    cycles: int = run.linter.stats.get('by_msg', {}).get('cyclic-import', 0)
    have_dep_cycles: bool = cycles > 0
    if have_dep_cycles:
        print(f'Found {cycles} cycle-errors; keeping all dirty files dirty.')

    # Update dependencies for what we just ran.
    # A run leaves us with a map of modules to a list of the modules that
    # imports them.  We want the opposite though: for each of our modules
    # we want a list of the modules it imports.
    reversedeps = {}

    # Make sure these are all proper module names; no foo.bar.__init__ stuff.
    for key, val in run.linter.stats['dependencies'].items():
        sval = [_filter_module_name(m) for m in val]
        reversedeps[_filter_module_name(key)] = sval
    deps: Dict[str, Set[str]] = {}
    untracked_deps = set()
    for mname, mallimportedby in reversedeps.items():
        for mimportedby in mallimportedby:
            if mname in names_to_paths:
                deps.setdefault(mimportedby, set()).add(mname)
            else:
                untracked_deps.add(mname)

    ignored_untracked_deps: List[str] = get_config(projroot).get(
        'pylint_ignored_untracked_deps', [])

    # Add a few that this package itself triggers.
    ignored_untracked_deps += ['pylint.lint', 'astroid.modutils', 'astroid']

    # Ignore some specific untracked deps; complain about any others.
    untracked_deps = set(dep for dep in untracked_deps
                         if dep not in ignored_untracked_deps)
    if untracked_deps:
        raise Exception(
            f'Found untracked dependencies: {untracked_deps}.'
            ' If these are external to your project, add them to'
            ' "pylint_ignored_untracked_deps" in the project config.')

    # Finally add the dependency lists to our entries (operate on
    # everything in the run; it may not be mentioned in deps).
    no_deps_modules = set()
    for fname in dirtyfiles:
        fmod = paths_to_names[fname]
        if fmod not in deps:

            # Since this code is a bit flaky, lets always announce when
            # we come up empty and keep a whitelist of expected values to
            # ignore.
            no_deps_modules.add(fmod)
            depsval: List[str] = []
        else:
            # Our deps here are module names; store paths.
            depsval = [names_to_paths[dep] for dep in deps[fmod]]
        cache.entries[fname]['deps'] = depsval

    # Let's print a list of modules with no detected deps so we can make sure
    # this is behaving.
    if no_deps_modules:
        if bool(False):
            print('NOTE: no dependencies found for:',
                  ', '.join(no_deps_modules))

    # Ok, now go through all dirtyfiles involved in this run.
    # Mark them as either errored or clean depending on whether there's
    # error info for them in the run stats.

    # Once again need to convert any foo.bar.__init__ to foo.bar.
    stats_by_module: Dict[str, Any] = {
        _filter_module_name(key): val
        for key, val in run.linter.stats['by_module'].items()
    }
    errcount = 0

    for fname in dirtyfiles:
        mname2 = paths_to_names.get(fname)
        if mname2 is None:
            raise Exception('unable to get module name for "' + fname + '"')
        counts = stats_by_module.get(mname2)

        # 'statement' count seems to be new and always non-zero; ignore it
        if counts is not None:
            counts = {c: v for c, v in counts.items() if c != 'statement'}
        if (counts is not None and any(counts.values())) or have_dep_cycles:
            # print('GOT FAIL FOR', fname, counts)
            if 'hash' in cache.entries[fname]:
                del cache.entries[fname]['hash']
            errcount += 1
        else:
            # print('MARKING FILE CLEAN', mname2, fname)
            cache.entries[fname]['hash'] = (cache.curhashes[fname])

    return errcount
Ejemplo n.º 10
0
def cpplint(projroot: Path, full: bool) -> None:
    """Run lint-checking on all code deemed lint-able."""
    # pylint: disable=too-many-locals
    import tempfile
    from concurrent.futures import ThreadPoolExecutor
    from multiprocessing import cpu_count
    from efrotools import get_config
    from efro.terminal import Clr
    from efro.error import CleanError

    os.chdir(projroot)
    filenames = get_code_filenames(projroot)
    for fpath in filenames:
        if ' ' in fpath:
            raise Exception(f'Found space in path {fpath}; unexpected.')

    # Check the config for a list of ones to ignore.
    code_blacklist: List[str] = get_config(projroot).get(
        'cpplint_blacklist', [])

    # Just pretend blacklisted ones don't exist.
    filenames = [f for f in filenames if f not in code_blacklist]
    filenames = [f for f in filenames if not f.endswith('.mm')]

    cachepath = Path(projroot, 'config/.cache-lintcode')
    if full and cachepath.exists():
        cachepath.unlink()

    cache = FileCache(cachepath)

    # Clear out entries and hashes for files that have changed/etc.
    cache.update(filenames, '')
    dirtyfiles = cache.get_dirty_files()

    if dirtyfiles:
        print(f'{Clr.BLU}CppLint checking'
              f' {len(dirtyfiles)} file(s)...{Clr.RST}')

    # We want to do a few custom modifications to the cpplint module...
    try:
        import cpplint as cpplintmodule
    except Exception:
        raise CleanError('Unable to import cpplint')
    with open(cpplintmodule.__file__) as infile:
        codelines = infile.read().splitlines()
    cheadersline = codelines.index('_C_HEADERS = frozenset([')

    # Extra headers we consider as valid C system headers.
    c_headers = [
        'malloc.h', 'tchar.h', 'jni.h', 'android/log.h', 'EGL/egl.h',
        'libgen.h', 'linux/netlink.h', 'linux/rtnetlink.h', 'android/bitmap.h',
        'android/log.h', 'uuid/uuid.h', 'cxxabi.h', 'direct.h', 'shellapi.h',
        'rpc.h', 'io.h'
    ]
    codelines.insert(cheadersline + 1, ''.join(f"'{h}'," for h in c_headers))

    # Skip unapproved C++ headers check (it flags <mutex>, <thread>, etc.)
    headercheckline = codelines.index(
        "  if include and include.group(1) in ('cfenv',")
    codelines[headercheckline] = (
        "  if False and include and include.group(1) in ('cfenv',")

    def lint_file(filename: str) -> None:
        result = subprocess.call(['cpplint', '--root=src', filename], env=env)
        if result != 0:
            raise CleanError(
                f'{Clr.RED}Cpplint failed for {filename}.{Clr.RST}')

    with tempfile.TemporaryDirectory() as tmpdir:

        # Write our replacement module, make it discoverable, then run.
        with open(tmpdir + '/cpplint.py', 'w') as outfile:
            outfile.write('\n'.join(codelines))
        env = os.environ.copy()
        env['PYTHONPATH'] = tmpdir

        with ThreadPoolExecutor(max_workers=cpu_count()) as executor:
            # Converting this to a list will propagate any errors.
            list(executor.map(lint_file, dirtyfiles))

    if dirtyfiles:
        cache.mark_clean(filenames)
        cache.write()
    print(
        f'{Clr.GRN}CppLint: all {len(filenames)} files are passing.{Clr.RST}',
        flush=True)