Esempio n. 1
0
    def build_arch(self, arch):
        super(MobileInsightRecipe, self).build_arch(arch)

        env = self.get_recipe_env(arch)
        # self.build_cython_components(arch)

        with current_directory(self.get_build_dir(arch.arch)):
            hostpython = sh.Command(self.ctx.hostpython)
            app_mk = join(self.get_build_dir(arch.arch), 'Application.mk')
            app_setup = join(self.get_build_dir(arch.arch), 'setup.py')

            if not exists(app_mk):
                shprint(sh.cp, join(self.get_recipe_dir(), 'Application.mk'),
                        app_mk)
            if not exists(app_setup):
                shprint(sh.cp, join(self.get_recipe_dir(), 'setup.py'),
                        app_setup)

            shprint(hostpython,
                    'setup.py',
                    'build_ext',
                    '-v',
                    _env=env,
                    _tail=10,
                    _critical=True)
            shprint(hostpython,
                    'setup.py',
                    'install',
                    '-O2',
                    _env=env,
                    _tail=10,
                    _critical=True)

            build_lib = glob.glob('./build/lib*')
            assert len(build_lib) == 1
            warning('MobileInsight -- stripping mobileinsight')

            shprint(sh.find,
                    build_lib[0],
                    '-name',
                    '*.so',
                    '-exec',
                    env['STRIP'],
                    '{}',
                    ';',
                    _tail=20,
                    _critical=True)

        try:
            warning('Copying GNU STL shared lib to {libs_dir}/{arch}'.format(
                libs_dir=self.ctx.libs_dir, arch=arch))
            shprint(
                sh.cp,
                '{ndk_dir}/sources/cxx-stl/gnu-libstdc++/{toolchain_version}/libs/{arch}/libgnustl_shared.so'
                .format(ndk_dir=self.ctx.ndk_dir,
                        toolchain_version=self.toolchain_version,
                        arch=arch),
                '{libs_dir}/{arch}'.format(libs_dir=self.ctx.libs_dir,
                                           arch=arch))
        except:
            warning('Failed to copy GNU STL shared lib!')
Esempio n. 2
0
def ledger_with_contents(contents):
    return sh.Command('/usr/bin/ledger').bake(no_color=True,
                                              file="-",
                                              _in=contents)
Esempio n. 3
0
    def prepare_build_environment(self, user_sdk_dir, user_ndk_dir,
                                  user_android_api, user_ndk_ver,
                                  user_ndk_api):
        '''Checks that build dependencies exist and sets internal variables
        for the Android SDK etc.

        ..warning:: This *must* be called before trying any build stuff

        '''

        self.ensure_dirs()

        if self._build_env_prepared:
            return

        ok = True

        # Work out where the Android SDK is
        sdk_dir = None
        if user_sdk_dir:
            sdk_dir = user_sdk_dir
        # This is the old P4A-specific var
        if sdk_dir is None:
            sdk_dir = environ.get('ANDROIDSDK', None)
        # This seems used more conventionally
        if sdk_dir is None:
            sdk_dir = environ.get('ANDROID_HOME', None)
        # Checks in the buildozer SDK dir, useful for debug tests of p4a
        if sdk_dir is None:
            possible_dirs = glob.glob(
                expanduser(
                    join('~', '.buildozer', 'android', 'platform',
                         'android-sdk-*')))
            possible_dirs = [
                d for d in possible_dirs
                if not (d.endswith('.bz2') or d.endswith('.gz'))
            ]
            if possible_dirs:
                info('Found possible SDK dirs in buildozer dir: {}'.format(
                    ', '.join([d.split(os.sep)[-1] for d in possible_dirs])))
                info('Will attempt to use SDK at {}'.format(possible_dirs[0]))
                warning('This SDK lookup is intended for debug only, if you '
                        'use python-for-android much you should probably '
                        'maintain your own SDK download.')
                sdk_dir = possible_dirs[0]
        if sdk_dir is None:
            raise BuildInterruptingException(
                'Android SDK dir was not specified, exiting.')
        self.sdk_dir = realpath(sdk_dir)

        # Check what Android API we're using
        android_api = None
        if user_android_api:
            android_api = user_android_api
            info('Getting Android API version from user argument: {}'.format(
                android_api))
        elif 'ANDROIDAPI' in environ:
            android_api = environ['ANDROIDAPI']
            info('Found Android API target in $ANDROIDAPI: {}'.format(
                android_api))
        else:
            info('Android API target was not set manually, using '
                 'the default of {}'.format(DEFAULT_ANDROID_API))
            android_api = DEFAULT_ANDROID_API
        android_api = int(android_api)
        self.android_api = android_api

        if self.android_api >= 21 and self.archs[0].arch == 'armeabi':
            raise BuildInterruptingException(
                'Asked to build for armeabi architecture with API '
                '{}, but API 21 or greater does not support armeabi'.format(
                    self.android_api),
                instructions=
                'You probably want to build with --arch=armeabi-v7a instead')

        if exists(join(sdk_dir, 'tools', 'bin', 'avdmanager')):
            avdmanager = sh.Command(join(sdk_dir, 'tools', 'bin',
                                         'avdmanager'))
            targets = avdmanager('list',
                                 'target').stdout.decode('utf-8').split('\n')
        elif exists(join(sdk_dir, 'tools', 'android')):
            android = sh.Command(join(sdk_dir, 'tools', 'android'))
            targets = android('list').stdout.decode('utf-8').split('\n')
        else:
            raise BuildInterruptingException(
                'Could not find `android` or `sdkmanager` binaries in Android SDK',
                instructions='Make sure the path to the Android SDK is correct'
            )
        apis = [s for s in targets if re.match(r'^ *API level: ', s)]
        apis = [re.findall(r'[0-9]+', s) for s in apis]
        apis = [int(s[0]) for s in apis if s]
        info('Available Android APIs are ({})'.format(', '.join(map(str,
                                                                    apis))))
        if android_api in apis:
            info(('Requested API target {} is available, '
                  'continuing.').format(android_api))
        else:
            raise BuildInterruptingException(
                ('Requested API target {} is not available, install '
                 'it with the SDK android tool.').format(android_api))

        # Find the Android NDK
        # Could also use ANDROID_NDK, but doesn't look like many tools use this
        ndk_dir = None
        if user_ndk_dir:
            ndk_dir = user_ndk_dir
            info('Getting NDK dir from from user argument')
        if ndk_dir is None:  # The old P4A-specific dir
            ndk_dir = environ.get('ANDROIDNDK', None)
            if ndk_dir is not None:
                info('Found NDK dir in $ANDROIDNDK: {}'.format(ndk_dir))
        if ndk_dir is None:  # Apparently the most common convention
            ndk_dir = environ.get('NDK_HOME', None)
            if ndk_dir is not None:
                info('Found NDK dir in $NDK_HOME: {}'.format(ndk_dir))
        if ndk_dir is None:  # Another convention (with maven?)
            ndk_dir = environ.get('ANDROID_NDK_HOME', None)
            if ndk_dir is not None:
                info('Found NDK dir in $ANDROID_NDK_HOME: {}'.format(ndk_dir))
        if ndk_dir is None:  # Checks in the buildozer NDK dir, useful
            #                # for debug tests of p4a
            possible_dirs = glob.glob(
                expanduser(
                    join('~', '.buildozer', 'android', 'platform',
                         'android-ndk-r*')))
            if possible_dirs:
                info('Found possible NDK dirs in buildozer dir: {}'.format(
                    ', '.join([d.split(os.sep)[-1] for d in possible_dirs])))
                info('Will attempt to use NDK at {}'.format(possible_dirs[0]))
                warning('This NDK lookup is intended for debug only, if you '
                        'use python-for-android much you should probably '
                        'maintain your own NDK download.')
                ndk_dir = possible_dirs[0]
        if ndk_dir is None:
            raise BuildInterruptingException(
                'Android NDK dir was not specified')
        self.ndk_dir = realpath(ndk_dir)

        # Find the NDK version, and check it against what the NDK dir
        # seems to report
        ndk_ver = None
        if user_ndk_ver:
            ndk_ver = user_ndk_ver
            if ndk_dir is not None:
                info('Got NDK version from from user argument: {}'.format(
                    ndk_ver))
        if ndk_ver is None:
            ndk_ver = environ.get('ANDROIDNDKVER', None)
            if ndk_ver is not None:
                info('Got NDK version from $ANDROIDNDKVER: {}'.format(ndk_ver))

        self.ndk = 'google'

        try:
            with open(join(ndk_dir, 'RELEASE.TXT')) as fileh:
                reported_ndk_ver = fileh.read().split(' ')[0].strip()
        except IOError:
            pass
        else:
            if reported_ndk_ver.startswith('crystax-ndk-'):
                reported_ndk_ver = reported_ndk_ver[12:]
                self.ndk = 'crystax'
            if ndk_ver is None:
                ndk_ver = reported_ndk_ver
                info(('Got Android NDK version from the NDK dir: {}'
                      ).format(ndk_ver))
            else:
                if ndk_ver != reported_ndk_ver:
                    warning('NDK version was set as {}, but checking '
                            'the NDK dir claims it is {}.'.format(
                                ndk_ver, reported_ndk_ver))
                    warning('The build will try to continue, but it may '
                            'fail and you should check '
                            'that your setting is correct.')
                    warning('If the NDK dir result is correct, you don\'t '
                            'need to manually set the NDK ver.')
        if ndk_ver is None:
            warning('Android NDK version could not be found. This probably'
                    'won\'t cause any problems, but if necessary you can'
                    'set it with `--ndk-version=...`.')
        self.ndk_ver = ndk_ver

        ndk_api = None
        if user_ndk_api:
            ndk_api = user_ndk_api
            info(
                'Getting NDK API version (i.e. minimum supported API) from user argument'
            )
        elif 'NDKAPI' in environ:
            ndk_api = environ.get('NDKAPI', None)
            info('Found Android API target in $NDKAPI')
        else:
            ndk_api = min(self.android_api, DEFAULT_NDK_API)
            warning(
                'NDK API target was not set manually, using '
                'the default of {} = min(android-api={}, default ndk-api={})'.
                format(ndk_api, self.android_api, DEFAULT_NDK_API))
        ndk_api = int(ndk_api)
        self.ndk_api = ndk_api

        if self.ndk_api > self.android_api:
            raise BuildInterruptingException(
                'Target NDK API is {}, higher than the target Android API {}.'.
                format(self.ndk_api, self.android_api),
                instructions=
                ('The NDK API is a minimum supported API number and must be lower '
                 'than the target Android API'))

        info('Using {} NDK {}'.format(self.ndk.capitalize(), self.ndk_ver))

        virtualenv = None
        if virtualenv is None:
            virtualenv = sh.which('virtualenv2')
        if virtualenv is None:
            virtualenv = sh.which('virtualenv-2.7')
        if virtualenv is None:
            virtualenv = sh.which('virtualenv')
        if virtualenv is None:
            raise IOError('Couldn\'t find a virtualenv executable, '
                          'you must install this to use p4a.')
        self.virtualenv = virtualenv
        info('Found virtualenv at {}'.format(virtualenv))

        # path to some tools
        self.ccache = sh.which("ccache")
        if not self.ccache:
            info('ccache is missing, the build will not be optimized in the '
                 'future.')
        for cython_fn in ("cython", "cython3", "cython2", "cython-2.7"):
            cython = sh.which(cython_fn)
            if cython:
                self.cython = cython
                break
        else:
            raise BuildInterruptingException('No cython binary found.')
        if not self.cython:
            ok = False
            warning("Missing requirement: cython is not installed")

        # This would need to be changed if supporting multiarch APKs
        arch = self.archs[0]
        platform_dir = arch.platform_dir
        toolchain_prefix = arch.toolchain_prefix
        toolchain_version = None
        self.ndk_platform = join(self.ndk_dir, 'platforms',
                                 'android-{}'.format(self.ndk_api),
                                 platform_dir)
        if not exists(self.ndk_platform):
            warning('ndk_platform doesn\'t exist: {}'.format(
                self.ndk_platform))
            ok = False

        py_platform = sys.platform
        if py_platform in ['linux2', 'linux3']:
            py_platform = 'linux'

        toolchain_versions = []
        toolchain_path = join(self.ndk_dir, 'toolchains')
        if isdir(toolchain_path):
            toolchain_contents = glob.glob('{}/{}-*'.format(
                toolchain_path, toolchain_prefix))
            toolchain_versions = [
                split(path)[-1][len(toolchain_prefix) + 1:]
                for path in toolchain_contents
            ]
        else:
            warning('Could not find toolchain subdirectory!')
            ok = False
        toolchain_versions.sort()

        toolchain_versions_gcc = []
        for toolchain_version in toolchain_versions:
            if toolchain_version[0].isdigit():
                # GCC toolchains begin with a number
                toolchain_versions_gcc.append(toolchain_version)

        if toolchain_versions:
            info('Found the following toolchain versions: {}'.format(
                toolchain_versions))
            info('Picking the latest gcc toolchain, here {}'.format(
                toolchain_versions_gcc[-1]))
            toolchain_version = toolchain_versions_gcc[-1]
        else:
            warning('Could not find any toolchain for {}!'.format(
                toolchain_prefix))
            ok = False

        self.toolchain_prefix = toolchain_prefix
        self.toolchain_version = toolchain_version
        # Modify the path so that sh finds modules appropriately
        environ['PATH'] = (
            '{ndk_dir}/toolchains/{toolchain_prefix}-{toolchain_version}/'
            'prebuilt/{py_platform}-x86/bin/:{ndk_dir}/toolchains/'
            '{toolchain_prefix}-{toolchain_version}/prebuilt/'
            '{py_platform}-x86_64/bin/:{ndk_dir}:{sdk_dir}/'
            'tools:{path}').format(sdk_dir=self.sdk_dir,
                                   ndk_dir=self.ndk_dir,
                                   toolchain_prefix=toolchain_prefix,
                                   toolchain_version=toolchain_version,
                                   py_platform=py_platform,
                                   path=environ.get('PATH'))

        for executable in ("pkg-config", "autoconf", "automake", "libtoolize",
                           "tar", "bzip2", "unzip", "make", "gcc", "g++"):
            if not sh.which(executable):
                warning("Missing executable: {} is not installed".format(
                    executable))

        if not ok:
            raise BuildInterruptingException(
                'python-for-android cannot continue due to the missing executables above'
            )
Esempio n. 4
0
def copylibs_function(soname, objs_paths, extra_link_dirs=[], env=None):
    print('objs_paths are', objs_paths)

    re_needso = re.compile(
        r'^.*\(NEEDED\)\s+Shared library: \[lib(.*)\.so\]\s*$')
    blacklist_libs = (
        'c',
        'stdc++',
        'dl',
        'python2.7',
        'sdl',
        'sdl_image',
        'sdl_ttf',
        'z',
        'm',
        'GLESv2',
        'jpeg',
        'png',
        'log',

        # bootstrap takes care of sdl2 libs (if applicable)
        'SDL2',
        'SDL2_ttf',
        'SDL2_image',
        'SDL2_mixer',
    )
    found_libs = []
    sofiles = []
    if env and 'READELF' in env:
        readelf = env['READELF']
    elif 'READELF' in os.environ:
        readelf = os.environ['READELF']
    else:
        readelf = sh.which('readelf').strip()
    readelf = sh.Command(readelf).bake('-d')

    dest = dirname(soname)

    for directory in objs_paths:
        for fn in os.listdir(directory):
            fn = join(directory, fn)

            if not fn.endswith('.libs'):
                continue

            dirfn = fn[:-1] + 'dirs'
            if not exists(dirfn):
                continue

            with open(fn) as f:
                libs = f.read().strip().split(' ')
                needed_libs = [
                    lib for lib in libs if lib and lib not in blacklist_libs
                    and lib not in found_libs
                ]

            while needed_libs:
                print('need libs:\n\t' + '\n\t'.join(needed_libs))

                start_needed_libs = needed_libs[:]
                found_sofiles = []

                with open(dirfn) as f:
                    libdirs = f.read().split()
                    for libdir in libdirs:
                        if not needed_libs:
                            break

                        if libdir == dest:
                            # don't need to copy from dest to dest!
                            continue

                        libdir = libdir.strip()
                        print('scanning', libdir)
                        for lib in needed_libs[:]:
                            if lib in found_libs:
                                continue

                            if lib.endswith('.a'):
                                needed_libs.remove(lib)
                                found_libs.append(lib)
                                continue

                            lib_a = 'lib' + lib + '.a'
                            libpath_a = join(libdir, lib_a)
                            lib_so = 'lib' + lib + '.so'
                            libpath_so = join(libdir, lib_so)
                            plain_so = lib + '.so'
                            plainpath_so = join(libdir, plain_so)

                            sopath = None
                            if exists(libpath_so):
                                sopath = libpath_so
                            elif exists(plainpath_so):
                                sopath = plainpath_so

                            if sopath:
                                print('found', lib, 'in', libdir)
                                found_sofiles.append(sopath)
                                needed_libs.remove(lib)
                                found_libs.append(lib)
                                continue

                            if exists(libpath_a):
                                print('found', lib, '(static) in', libdir)
                                needed_libs.remove(lib)
                                found_libs.append(lib)
                                continue

                for sofile in found_sofiles:
                    print('scanning dependencies for', sofile)
                    out = readelf(sofile)
                    for line in out.splitlines():
                        needso = re_needso.match(line)
                        if needso:
                            lib = needso.group(1)
                            if (lib not in needed_libs
                                    and lib not in found_libs
                                    and lib not in blacklist_libs):
                                needed_libs.append(needso.group(1))

                sofiles += found_sofiles

                if needed_libs == start_needed_libs:
                    raise RuntimeError(
                        'Failed to locate needed libraries!\n\t' +
                        '\n\t'.join(needed_libs))

    print('Copying libraries')
    for lib in sofiles:
        shprint(sh.cp, lib, dest)
Esempio n. 5
0
def prepare_commands():
    notify_send_cmd = sh.Command("notify-send")
    notify_send_cmd = notify_send_cmd.bake("--icon==gtk-info")
    notmuch_cmd = sh.Command("notmuch")
    return notify_send_cmd, notmuch_cmd
Esempio n. 6
0
from __future__ import unicode_literals

from collections import namedtuple
from json import dumps

import sh

try:
    pip = sh.Command('pip')
    PIP = True
except sh.CommandNotFound:
    PIP = False


class PIPNotFound(Exception):
    pass


class PropertyNamespace(object):
    _registry = {}

    @classmethod
    def get(cls, name):
        return cls._registry[name]

    @classmethod
    def get_all(cls):
        return cls._registry.values()

    def __init__(self, name, label):
        self.name = name
Esempio n. 7
0
File: app.py Progetto: zozs/wiki
app.config['GIT_EXECUTABLE'] = 'git'
try:
    app.config.from_pyfile(
        os.path.join(app.config.get('CONTENT_DIR'), 'config.py'))
except IOError:
    print(
        "Startup Failure: You need to place a "
        "config.py in your content directory.")

manager = Manager(app)

loginmanager = LoginManager()
loginmanager.init_app(app)
loginmanager.login_view = 'user_login'

git = sh.Command(
    app.config.get('GIT_EXECUTABLE')).bake(_cwd=app.config.get('CONTENT_DIR'))
"""
    Wiki classes
    ~~~~~~~~~~~~
"""


class Processors(object):
    """This class is collection of processors for various content items.
    """
    def __init__(self, content=""):
        """Initialization function.  Runs Processors().pre() on content.

        Args:
            None
Esempio n. 8
0
    def run_distribute(self):
        info_main(
            '# Creating Android project from build and {} bootstrap'.format(
                self.name))

        info(
            'This currently just copies the build stuff straight from the build dir.'
        )
        shprint(sh.rm, '-rf', self.dist_dir)
        shprint(sh.cp, '-r', self.build_dir, self.dist_dir)
        with current_directory(self.dist_dir):
            with open('local.properties', 'w') as fileh:
                fileh.write('sdk.dir={}'.format(self.ctx.sdk_dir))
                fileh.write('ndk.dir={}'.format(self.ctx.ndk_dir))

        arch = self.ctx.archs[0]
        if len(self.ctx.archs) > 1:
            raise ValueError(
                'built for more than one arch, but bootstrap cannot handle that yet'
            )
        info('Bootstrap running with arch {}'.format(arch))

        with current_directory(self.dist_dir):
            info('Copying python distribution')

            if not exists(
                    'private') and not self.ctx.python_recipe.from_crystax:
                shprint(sh.mkdir, 'private')
            if not exists(
                    'crystax_python') and self.ctx.python_recipe.from_crystax:
                shprint(sh.mkdir, 'crystax_python')
                shprint(sh.mkdir, 'crystax_python/crystax_python')
            if not exists('assets'):
                shprint(sh.mkdir, 'assets')

            hostpython = sh.Command(self.ctx.hostpython)
            if not self.ctx.python_recipe.from_crystax:
                try:
                    shprint(hostpython,
                            '-OO',
                            '-m',
                            'compileall',
                            self.ctx.get_python_install_dir(),
                            _tail=10,
                            _filterout="^Listing")
                except sh.ErrorReturnCode:
                    pass
                if not exists('python-install'):
                    shprint(sh.cp, '-a', self.ctx.get_python_install_dir(),
                            './python-install')

            self.distribute_libs(arch, [self.ctx.get_libs_dir(arch.arch)])
            self.distribute_aars(arch)
            self.distribute_javaclasses(self.ctx.javaclass_dir)

            if not self.ctx.python_recipe.from_crystax:
                info('Filling private directory')
                if not exists(join('private', 'lib')):
                    info('private/lib does not exist, making')
                    shprint(sh.cp, '-a', join('python-install', 'lib'),
                            'private')
                shprint(sh.mkdir, '-p', join('private', 'include',
                                             'python2.7'))

                if exists(join('libs', arch.arch, 'libpymodules.so')):
                    shprint(sh.mv, join('libs', arch.arch, 'libpymodules.so'),
                            'private/')
                shprint(
                    sh.cp,
                    join('python-install', 'include',
                         'python2.7', 'pyconfig.h'),
                    join('private', 'include', 'python2.7/'))

                info('Removing some unwanted files')
                shprint(sh.rm, '-f', join('private', 'lib', 'libpython2.7.so'))
                shprint(sh.rm, '-rf', join('private', 'lib', 'pkgconfig'))

                libdir = join(self.dist_dir, 'private', 'lib', 'python2.7')
                site_packages_dir = join(libdir, 'site-packages')
                with current_directory(libdir):
                    # shprint(sh.xargs, 'rm', sh.grep('-E', '*\.(py|pyx|so\.o|so\.a|so\.libs)$', sh.find('.')))
                    removes = []
                    for dirname, something, filens in walk('.'):
                        for filename in filens:
                            for suffix in ('py', 'pyc', 'so.o', 'so.a',
                                           'so.libs'):
                                if filename.endswith(suffix):
                                    removes.append(filename)
                    shprint(sh.rm, '-f', *removes)

                    info('Deleting some other stuff not used on android')
                    # To quote the original distribute.sh, 'well...'
                    # shprint(sh.rm, '-rf', 'ctypes')
                    shprint(sh.rm, '-rf', 'lib2to3')
                    shprint(sh.rm, '-rf', 'idlelib')
                    for filename in glob.glob('config/libpython*.a'):
                        shprint(sh.rm, '-f', filename)
                    shprint(sh.rm, '-rf', 'config/python.o')
                    # shprint(sh.rm, '-rf', 'lib-dynload/_ctypes_test.so')
                    # shprint(sh.rm, '-rf', 'lib-dynload/_testcapi.so')

            else:  # Python *is* loaded from crystax
                ndk_dir = self.ctx.ndk_dir
                py_recipe = self.ctx.python_recipe
                python_dir = join(ndk_dir, 'sources', 'python',
                                  py_recipe.version, 'libs', arch.arch)

                shprint(sh.cp, '-r', join(python_dir, 'stdlib.zip'),
                        'crystax_python/crystax_python')
                shprint(sh.cp, '-r', join(python_dir, 'modules'),
                        'crystax_python/crystax_python')
                shprint(sh.cp, '-r', self.ctx.get_python_install_dir(),
                        'crystax_python/crystax_python/site-packages')

                info('Renaming .so files to reflect cross-compile')
                site_packages_dir = 'crystax_python/crystax_python/site-packages'
                filens = shprint(
                    sh.find, site_packages_dir, '-iname',
                    '*.so').stdout.decode('utf-8').split('\n')[:-1]
                for filen in filens:
                    parts = filen.split('.')
                    if len(parts) <= 2:
                        continue
                    shprint(sh.mv, filen, filen.split('.')[0] + '.so')
                site_packages_dir = join(abspath(curdir), site_packages_dir)

        self.strip_libraries(arch)
        self.fry_eggs(site_packages_dir)
        super(ServiceOnlyBootstrap, self).run_distribute()
Copyright (c) 2021 ABLECLOUD Co. Ltd.

호스트의 디스크 목록을 조회하는 스크립트

최초작성일 : 2021-03-15
'''
import argparse
import json
import logging

from ablestack import *
import os
import sh
import distro

lsblk_cmd = sh.Command('/usr/bin/lsblk')
if distro.linux_distribution() == ('CentOS Linux', '8', ''):
    # print('centos8')
    lspci_cmd = sh.Command('/usr/sbin/lspci')
else:
    # print('other')
    lspci_cmd = sh.Command('/usr/bin/lspci')

env = os.environ.copy()
env['LANG'] = "en_US.utf-8"
env['LANGUAGE'] = "en"
"""
입력된 argument를 파싱하여 dictionary 처럼 사용하게 만들어 주는 parser를 생성하는 함수

:return: argparse.ArgumentParser
"""
Esempio n. 10
0
    def __init__(self, preprocess_task):
        super().__init__()

        self.preprocess_task = preprocess_task
        self.prodigal = sh.Command(self.config['prodigal']['bin']).bake(
            i=self.preprocess_task.output(), a=self.output())
Esempio n. 11
0
    def build_arch(self, arch):
        with current_directory(self.get_build_dir(arch.arch)):
            env = arch.get_env()

            flags = ['--disable-everything']
            cflags = []
            ldflags = []

            if 'openssl' in self.ctx.recipe_build_order:
                flags += [
                    '--enable-openssl',
                    '--enable-nonfree',
                    '--enable-protocol=https,tls_openssl',
                ]
                build_dir = Recipe.get_recipe(
                    'openssl', self.ctx).get_build_dir(arch.arch)
                cflags += ['-I' + build_dir + '/include/']
                ldflags += ['-L' + build_dir]

            if 'ffpyplayer_codecs' in self.ctx.recipe_build_order:
                # libx264
                flags += ['--enable-libx264']
                build_dir = Recipe.get_recipe(
                    'libx264', self.ctx).get_build_dir(arch.arch)
                cflags += ['-I' + build_dir + '/include/']
                ldflags += ['-lx264', '-L' + build_dir + '/lib/']

                # libshine
                flags += ['--enable-libshine']
                build_dir = Recipe.get_recipe(
                    'libshine', self.ctx).get_build_dir(arch.arch)
                cflags += ['-I' + build_dir + '/include/']
                ldflags += ['-lshine', '-L' + build_dir + '/lib/']

                # Enable all codecs:
                flags += [
                    '--enable-parsers',
                    '--enable-decoders',
                    '--enable-encoders',
                    '--enable-muxers',
                    '--enable-demuxers',
                ]
            else:
                # Enable codecs only for .mp4:
                flags += [
                    '--enable-parser=aac,ac3,h261,h264,mpegaudio,mpeg4video,mpegvideo,vc1',
                    '--enable-decoder=aac,h264,mpeg4,mpegvideo',
                    '--enable-muxer=h264,mov,mp4,mpeg2video',
                    '--enable-demuxer=aac,h264,m4v,mov,mpegvideo,vc1',
                ]

            # needed to prevent _ffmpeg.so: version node not found for symbol av_init_packet@LIBAVFORMAT_52
            # /usr/bin/ld: failed to set dynamic section sizes: Bad value
            flags += [
                '--disable-symver',
            ]

            # disable binaries / doc
            flags += [
                '--disable-ffmpeg',
                '--disable-ffplay',
                '--disable-ffprobe',
                '--disable-ffserver',
                '--disable-doc',
            ]

            # other flags:
            flags += [
                '--enable-filter=aresample,resample,crop,adelay,volume,scale',
                '--enable-protocol=file,http',
                '--enable-small',
                '--enable-hwaccels',
                '--enable-gpl',
                '--enable-pic',
                '--disable-static',
                '--enable-shared',
            ]

            if 'arm64' in arch.arch:
                cross_prefix = 'aarch64-linux-android-'
                arch_flag = 'aarch64'
            else:
                cross_prefix = 'arm-linux-androideabi-'
                arch_flag = 'arm'

            # android:
            flags += [
                '--target-os=android',
                '--cross-prefix={}'.format(cross_prefix),
                '--arch={}'.format(arch_flag),
                '--sysroot=' + self.ctx.ndk_platform,
                '--enable-neon',
                '--prefix={}'.format(realpath('.')),
            ]

            if arch_flag == 'arm':
                cflags += [
                    '-mfpu=vfpv3-d16',
                    '-mfloat-abi=softfp',
                    '-fPIC',
                ]

            env['CFLAGS'] += ' ' + ' '.join(cflags)
            env['LDFLAGS'] += ' ' + ' '.join(ldflags)

            configure = sh.Command('./configure')
            shprint(configure, *flags, _env=env)
            shprint(sh.make, '-j4', _env=env)
            shprint(sh.make, 'install', _env=env)
            # copy libs:
            sh.cp('-a', sh.glob('./lib/lib*.so'),
                  self.ctx.get_libs_dir(arch.arch))
Esempio n. 12
0
def main():
    parser = argparse.ArgumentParser(
        description=
        "For the specified repository, assess its RAML and schema files using raml-cop."
    )
    parser.add_argument(
        "-i",
        "--input",
        default=".",
        help=
        "Directory of the repo git clone. (Default: current working directory)"
    )
    parser.add_argument(
        "-f",
        "--file",
        default="",
        help=
        "Limit to this particular pathname, e.g. ramls/item-storage.raml (Default: '' so all files)"
    )
    parser.add_argument(
        "-v",
        "--validate-only",
        action="store_true",
        help=
        "Just assess the RAML files. No schema assessment. (Default: False)")
    parser.add_argument(
        "-j",
        "--json-only",
        action="store_true",
        help=
        "Just assess the JSON schema files. No RAML assessment. (Default: False)"
    )
    parser.add_argument(
        "-l",
        "--loglevel",
        choices=["debug", "info", "warning", "error", "critical"],
        default="info",
        help="Logging level. (Default: warning)")
    parser.add_argument(
        "-d",
        "--dev",
        action="store_true",
        help="Development mode. Use local config file. (Default: False)")
    parser.add_argument(
        "-c",
        "--config",
        default="api.yml",
        help="Pathname to local configuration file. (Default: api.yml)")
    args = parser.parse_args()

    loglevel = LOGLEVELS.get(args.loglevel.lower(), logging.NOTSET)
    # Need stdout to enable Jenkins to redirect into an output file
    logging.basicConfig(stream=sys.stdout,
                        format="%(levelname)s: %(name)s: %(message)s",
                        level=loglevel)
    logger1 = logging.getLogger("lint-raml")
    logger2 = logging.getLogger("lint-raml-cop")
    logger3 = logging.getLogger("lint-raml-schema")
    logging.getLogger("sh").setLevel(logging.ERROR)
    logging.getLogger("requests").setLevel(logging.ERROR)

    # Display a version string
    logger1.info("Using lint-raml version: %s", SCRIPT_VERSION)

    # Process and validate the input parameters
    if args.input.startswith("~"):
        git_input_dir = os.path.expanduser(args.input)
    else:
        git_input_dir = args.input
    if not os.path.exists(git_input_dir):
        logger1.critical(
            "Specified input directory of git clone (-i) not found: %s",
            git_input_dir)
        return 2

    # Ensure that commands are available
    if sh.which("jq"):
        has_jq = True
    else:
        logger1.warning(
            "'jq' is not available. So will not do extra JSON assessment.")
        has_jq = False
    bin_raml_cop = os.path.join(sys.path[0], "node_modules", ".bin",
                                "raml-cop")
    if not os.path.exists(bin_raml_cop):
        logger1.critical("'raml-cop' is not available.")
        logger1.critical(
            "Do 'yarn install' in folio-tools/lint-raml directory.")
        return 2

    # Get the repository name
    try:
        repo_url = sh.git.config("--get",
                                 "remote.origin.url",
                                 _cwd=git_input_dir).stdout.decode().strip()
    except sh.ErrorReturnCode as err:
        logger1.critical("Trouble doing 'git config': %s", err.stderr.decode())
        logger1.critical(
            "Could not determine remote.origin.url of git clone in specified input directory: %s",
            git_input_dir)
        return 2
    else:
        repo_name = os.path.splitext(os.path.basename(repo_url))[0]

    if args.file:
        specific_raml_file_pn = os.path.join(git_input_dir, args.file)
        if not os.path.exists(specific_raml_file_pn):
            logger1.critical("Specific RAML file '%s' does not exist in '%s'",
                             specific_raml_file_pn, repo_name)
            logger1.critical(
                "Needs to be pathname relative to top-level, e.g. ramls/item-storage.raml"
            )
            return 2

    # Get the configuration metadata for all repositories that are known to have RAML
    if args.config.startswith("~"):
        config_local_pn = os.path.expanduser(args.config)
    else:
        config_local_pn = args.config
    if args.dev is False:
        try:
            http_response = requests.get(CONFIG_FILE)
            http_response.raise_for_status()
        except requests.exceptions.HTTPError as err:
            logger1.critical("HTTP error retrieving configuration file: %s",
                             err)
            return 2
        except Exception as err:
            logger1.critical("Error retrieving configuration file: %s", err)
            return 2
        else:
            try:
                config = yaml.safe_load(http_response.text)
            except yaml.YAMLError as err:
                logger1.critical(
                    "Trouble parsing YAML configuration file '%s': %s",
                    CONFIG_FILE, err)
                return 2
    else:
        if not os.path.exists(config_local_pn):
            logger1.critical(
                "Development mode specified (-d) but config file (-c) not found: %s",
                config_local_pn)
            return 2
        with open(config_local_pn) as input_fh:
            try:
                config = yaml.safe_load(input_fh)
            except yaml.YAMLError as err:
                logger1.critical(
                    "Trouble parsing YAML configuration file '%s': %s",
                    config_local_pn, err)
                return 2

    if config is None:
        logger1.critical("Configuration data was not loaded.")
        return 2
    if repo_name not in config:
        logger1.warning("No configuration found for repository '%s'",
                        repo_name)
        logger1.warning("See FOLIO-903. Add an entry to api.yml")
        logger1.warning("Attempting default configuration.")
        config[repo_name] = config["default"]
        config[repo_name][0]["files"].remove("dummy")

    # The yaml parser gags on the "!include".
    # http://stackoverflow.com/questions/13280978/pyyaml-errors-on-in-a-string
    yaml.add_constructor(u"!include",
                         construct_raml_include,
                         Loader=yaml.SafeLoader)

    # Detect any schema $ref
    schema_ref_re = re.compile(r'( +"\$ref"[ :]+")([^"]+)(".*)')

    # Handle issue messages of parser
    message_avoid_re = re.compile(r'^(\[[^]]+\]) ([^:]+):(.*)$')

    # Process each configured set of RAML files
    version_re = re.compile(r"^#%RAML ([0-9.]+)")
    exit_code = 0  # Continue processing to detect various issues, then return the result.
    input_dir = git_input_dir
    for docset in config[repo_name]:
        logger1.info("Investigating and determining configuration: %s",
                     os.path.join(repo_name, docset["directory"]))
        ramls_dir = os.path.join(input_dir, docset["directory"])
        logger1.debug("ramls_dir=%s", ramls_dir)
        version_ramlutil_v1 = True
        if not os.path.exists(ramls_dir):
            logger1.warning("The specified 'ramls' directory not found: %s",
                            os.path.join(repo_name, docset["directory"]))
            logger1.warning("See FOLIO-903. Update entry in api.yml")
            logger1.warning("Attempting default.")
            docset["directory"] = config["default"][0]["directory"]
            ramls_dir = os.path.join(input_dir, docset["directory"])
            if not os.path.exists(ramls_dir):
                logger1.critical(
                    "The default 'ramls' directory not found: %s/%s",
                    repo_name, docset["directory"])
                return 2
        if docset["ramlutil"] is not None:
            ramlutil_dir = os.path.join(input_dir, docset["ramlutil"])
            if not os.path.exists(ramlutil_dir):
                logger1.warning(
                    "The specified 'raml-util' directory not found: %s",
                    os.path.join(repo_name, docset["ramlutil"]))
                logger1.warning("See FOLIO-903. Update entry in api.yml")
            else:
                # Detect if new raml-util
                auth_trait_pn = os.path.join(input_dir, docset["ramlutil"],
                                             "traits/auth.raml")
                if os.path.exists(auth_trait_pn):
                    version_ramlutil_v1 = False
        # If is using RMB, then there are various peculiarities to assess.
        try:
            is_rmb = docset["rmb"]
        except KeyError:
            is_rmb = True
        # Some repos have no RAMLs.
        # Currently this script is also processing schemas FOLIO-1447, so this can be intentional.
        try:
            is_schemas_only = docset["schemasOnly"]
        except KeyError:
            is_schemas_only = False
        # Ensure configuration and find any RAML files not configured
        configured_raml_files = []
        try:
            docset["files"]
        except KeyError:
            pass
        else:
            if isinstance(docset["files"], Iterable):
                for raml_name in docset["files"]:
                    raml_fn = "{0}.raml".format(raml_name)
                    configured_raml_files.append(raml_fn)
        exclude_list = [
            "raml-util", "rtypes", "traits", "examples", "bindings",
            "node_modules", ".git"
        ]
        try:
            exclude_list.extend(docset["excludes"])
        except KeyError:
            pass
        excludes = set(exclude_list)
        found_raml_files = []
        raml_files = []
        found_schema_files = []
        if docset["label"] == "shared":
            # If this is the top-level of the shared space, then do not descend
            pattern = os.path.join(ramls_dir, "*.raml")
            for raml_fn in glob.glob(pattern):
                raml_pn = os.path.relpath(raml_fn, ramls_dir)
                found_raml_files.append(raml_pn)
        else:
            for root, dirs, files in os.walk(ramls_dir, topdown=True):
                dirs[:] = [d for d in dirs if d not in excludes]
                for raml_fn in fnmatch.filter(files, "*.raml"):
                    raml_pn = os.path.relpath(os.path.join(root, raml_fn),
                                              ramls_dir)
                    found_raml_files.append(raml_pn)
        # Also find the JSON Schemas to later scan them
        try:
            schemas_dir = os.path.join(input_dir, docset["schemasDirectory"])
        except KeyError:
            schemas_dir = os.path.join(input_dir, docset["directory"])
        else:
            if not os.path.exists(schemas_dir):
                logger1.warning(
                    "The specified 'schemasDirectory' not found: %s",
                    os.path.join(repo_name, docset["schemasDirectory"]))
                logger1.warning("See FOLIO-903. Update entry in api.yml")
                logger1.warning("Attempting default.")
                schemas_dir = os.path.join(input_dir, docset["directory"])
        if docset["label"] == "shared":
            # If this is the top-level of the shared space, then do not descend
            pattern = os.path.join(schemas_dir, "*.schema")
            for schema_fn in glob.glob(pattern):
                schema_pn = os.path.relpath(schema_fn, schemas_dir)
                found_schema_files.append(schema_pn)
        else:
            for root, dirs, files in os.walk(schemas_dir, topdown=True):
                dirs[:] = [d for d in dirs if d not in excludes]
                logger1.debug("Looking for JSON schema files: %s", root)
                for filename in files:
                    if filename.endswith((".json", ".schema")):
                        schema_pn = os.path.relpath(
                            os.path.join(root, filename), schemas_dir)
                        found_schema_files.append(schema_pn)
        logger1.debug("found_schema_files: %s", found_schema_files)
        for raml_fn in configured_raml_files:
            if raml_fn not in found_raml_files:
                logger1.warning("Configured file not found: %s", raml_fn)
                logger1.warning(
                    "Configuration needs to be updated (FOLIO-903).")
            else:
                raml_files.append(raml_fn)
        for raml_fn in found_raml_files:
            if raml_fn not in configured_raml_files:
                raml_files.append(raml_fn)
                logger1.warning("Missing from configuration: %s", raml_fn)
                logger1.warning(
                    "Configuration needs to be updated (FOLIO-903).")
        logger1.debug("configured_raml_files: %s", configured_raml_files)
        logger1.debug("found_raml_files: %s", found_raml_files)
        logger1.debug("raml_files: %s", raml_files)
        if found_schema_files:
            if args.validate_only:
                logger1.info(
                    "Not assessing schema descriptions, as per option '--validate-only'."
                )
            else:
                issues_flag = assess_schema_descriptions(
                    schemas_dir, found_schema_files, has_jq)
                if issues_flag:
                    exit_code = 1
        if args.json_only:
            logger1.info(
                "Not assessing RAML/Schema or examples against schema, as per option '--json-only'."
            )
            continue
        if not is_schemas_only:
            logger1.info(
                "Assessing RAML files (https://dev.folio.org/guides/raml-cop/):"
            )
            if not raml_files:
                logger1.error("No RAML files found in %s", ramls_dir)
                exit_code = 1
        for raml_fn in sorted(raml_files):
            if args.file:
                if os.path.join(docset["directory"], raml_fn) != args.file:
                    logger1.info("Skipping RAML file: %s", raml_fn)
                    continue
            input_pn = os.path.join(ramls_dir, raml_fn)
            if not os.path.exists(input_pn):
                logger1.warning("Missing configured input file '%s'",
                                os.path.join(repo_name, raml_fn))
                logger1.warning(
                    "Configuration needs to be updated (FOLIO-903).")
                continue
            # Determine raml version
            version_value = None
            with open(input_pn, "r") as input_fh:
                for num, line in enumerate(input_fh):
                    match = re.search(version_re, line)
                    if match:
                        version_value = match.group(1)
                        break
            if not version_value:
                logger1.error(
                    "Could not determine RAML version for file '%s' so skipping.",
                    raml_fn)
                exit_code = 1
                continue
            logger2.info("Processing RAML v%s file: %s", version_value,
                         raml_fn)
            if version_value != "0.8" and not version_ramlutil_v1:
                logger1.error(
                    "The raml-util is not RAML-1.0 version. Update git submodule."
                )
                exit_code = 2
                continue
            # Now process this RAML file
            # First load the content to extract some details.
            (schemas,
             issues_flag) = gather_declarations(input_pn, raml_fn,
                                                version_value, is_rmb,
                                                input_dir, docset["directory"])
            logger1.debug("Found %s declared schemas or types files.",
                          len(schemas))
            if issues_flag:
                exit_code = 1
            # Ensure each $ref referenced schema file exists, is useable, and is declared in the RAML
            for schema in schemas:
                schema_pn = os.path.normpath(
                    os.path.join(ramls_dir, schemas[schema]))
                if not os.path.exists(schema_pn):
                    # Missing file was already reported
                    continue
                schema_dir = os.path.dirname(schema_pn)
                with open(schema_pn) as input_fh:
                    lines = list(input_fh)
                for line in lines:
                    match = re.search(schema_ref_re, line)
                    if match:
                        ref_value = match.group(2)
                        logger1.debug(
                            "Found schema $ref '%s' in schema file '%s'",
                            ref_value, schemas[schema])
                        relative_schema_ref_fn = os.path.normpath(
                            os.path.join(os.path.dirname(schemas[schema]),
                                         ref_value))
                        logger1.debug("    relative_schema_ref_fn=%s",
                                      relative_schema_ref_fn)
                        relative_schema_ref_pn = os.path.normpath(
                            os.path.join(ramls_dir, relative_schema_ref_fn))
                        if not is_rmb:
                            logger1.debug(
                                "Not RMB type, so just report if file not found."
                            )
                            if not os.path.exists(relative_schema_ref_pn):
                                logger1.error("File not found: %s",
                                              relative_schema_ref_pn)
                                logger1.error(
                                    "  via schema $ref '%s' in schema file '%s'",
                                    ref_value, schemas[schema])
                                exit_code = 1
                        else:
                            if version_value != "0.8":
                                #logger1.debug("Is RMB >= v20 and 1.0, so report if file not found.")
                                if not os.path.exists(relative_schema_ref_pn):
                                    logger1.error("File not found: %s",
                                                  relative_schema_ref_pn)
                                    logger1.error(
                                        "  via schema $ref '%s' in schema file '%s'",
                                        ref_value, schemas[schema])
                                    exit_code = 1
                            else:
                                #logger1.debug("Is RMB < v20 and 0.8, so report if file not found, and ensure declaration.")
                                # RMB < v20 enables $ref in schema to be a pathname, if the position in the filesystem
                                # and its use in the RAML meets strict conditions.
                                if not os.path.exists(relative_schema_ref_pn):
                                    logger1.error("File not found: %s",
                                                  relative_schema_ref_pn)
                                    logger1.error(
                                        "  via schema $ref '%s' in schema file '%s'",
                                        ref_value, schemas[schema])
                                    exit_code = 1
                                else:
                                    # This RMB version has an extra bit of weirdness.
                                    # If the declaration of a schema key in the raml file needs to be a path,
                                    # (e.g. in raml-util mod-users-bl.raml) then if its included schema has $ref
                                    # to another schema using a relative path with dot-dots, then that schema's key
                                    # needs to be adjusted according to the depth of the path in the top-level
                                    # schema key (e.g. for $ref=../metadata.schema).
                                    if "../" in ref_value:
                                        rel_ref_value = ref_value
                                        for x in range(0, schema.count("/")):
                                            logger1.debug(
                                                "      dot-dot count x=%s",
                                                x + 1)
                                            rel_ref_value = re.sub(
                                                "\.\./",
                                                "",
                                                rel_ref_value,
                                                count=1)
                                        logger1.debug("      rel_ref_value=%s",
                                                      rel_ref_value)
                                        try:
                                            schemas[rel_ref_value]
                                        except KeyError:
                                            logger1.error(
                                                "The schema reference '%s' defined in '%s' needs to be declared as '%s' in RAML file.",
                                                ref_value, schemas[schema],
                                                rel_ref_value)
                                            exit_code = 1
                                    else:
                                        try:
                                            schemas[ref_value]
                                        except KeyError:
                                            logger1.error(
                                                "The schema reference '%s' defined in '%s' is not declared in RAML file.",
                                                ref_value, schemas[schema])
                                            exit_code = 1
            # Sool raml-cop onto it.
            cmd_raml_cop = sh.Command(bin_raml_cop)
            try:
                cmd_raml_cop(input_pn, no_color=True)
            except sh.ErrorReturnCode_1 as err:
                (issues_list, errors_remain) = avoid_specific_errors(
                    repo_name,
                    err.stdout.decode().split(os.linesep), message_avoid_re)
                if errors_remain:
                    logger2.error("  raml-cop detected errors with %s:\n%s",
                                  raml_fn, '\n'.join(issues_list))
                    exit_code = 1
                else:
                    logger2.warning(
                        "  raml-cop detected warnings with %s:\n%s", raml_fn,
                        '\n'.join(issues_list))
                    exit_code = 0
            else:
                logger2.info("  raml-cop did not detect any errors with %s",
                             raml_fn)
    # Report the outcome
    if exit_code == 1:
        logger1.error("There were processing errors. See list above.")
    elif exit_code == 2:
        logger1.error("There were processing errors. See list above.")
    else:
        logger1.info("Did not detect any errors.")
    logging.shutdown()
    return exit_code
Esempio n. 13
0
    def _run(self,
             language,
             target_dir,
             browser,
             skip_screenshots=True,
             force=False,
             screenshot_build="source",
             target_version=None):

        sphinx_build = sh.Command("sphinx-build")

        # check if sources exist for this language
        section = "manual-%s" % language
        target_type = self.config.get(section, "target-type")

        source_dir = os.path.join(self.root_dir,
                                  self.config.get(section, "source"))
        if target_dir is None:
            target_dir = os.path.join(self.root_dir,
                                      self.config.get(section, "target"))
        else:
            target_dir = os.path.join(self.root_dir, target_dir)
        target_dir = target_dir.replace(
            "<version>",
            self._get_doc_target_path()
            if target_version is None else target_version)
        print("generating doc to %s" % target_dir)

        if not os.path.exists(source_dir):
            self.log.error(
                "no sources found for manual (%s) in language '%s'" %
                (source_dir, language))
            sys.exit(1)

        if force and os.path.exists(target_dir):
            # delete target dir
            print("deleting old content in '%s'" % target_dir)
            shutil.rmtree(target_dir)

        if not os.path.exists(target_dir):
            os.makedirs(target_dir)

        # first run generates the widget-example configs
        print(
            '================================================================================'
        )
        print('sphinx_build: first run')
        print(
            '================================================================================'
        )
        sphinx_build("-b",
                     target_type,
                     source_dir,
                     target_dir,
                     _out=self.process_output,
                     _err=self.process_output)

        if not skip_screenshots:
            grunt = sh.Command("grunt")
            # generate the screenshots
            grunt("--force",
                  "screenshots",
                  "--subDir=manual",
                  "--browserName=%s" % browser,
                  "--target=%s" % screenshot_build,
                  _out=self.process_output,
                  _err=self.process_output)

            # 2dn run with access to the generated screenshots
            print(
                '================================================================================'
            )
            print('sphinx_build: second run')
            print(
                '================================================================================'
            )
            sphinx_build("-b",
                         target_type,
                         source_dir,
                         target_dir,
                         _out=self.process_output,
                         _err=self.process_output)

        with open(os.path.join(target_dir, "..", "version"), "w+") as f:
            f.write(self._get_source_version())

        # create symlinks
        symlinkname = ''
        git = sh.Command("git")
        branch = git("rev-parse", "--abbrev-ref", "HEAD").strip() if os.environ.get('TRAVIS_BRANCH') is None \
            else os.environ.get('TRAVIS_BRANCH')

        if branch == "develop":
            # handle develop builds:
            print('detected development build')
            symlinkname = self.config.get("DEFAULT", "develop-version-mapping")
        elif branch == "master":
            # handle releases:
            print('detected build of most recent version of master branch')
            symlinkname = self.config.get("DEFAULT",
                                          "most-recent-version-mapping")

        if '' != symlinkname:
            symlinktarget = os.path.join(target_dir, "..")
            print("setting symlink '%s' to '%s'" %
                  (symlinkname, symlinktarget))
            cwd = os.getcwd()
            os.chdir(os.path.join(symlinktarget, ".."))
            try:
                os.remove(symlinkname)
            except Exception as e:
                print(str(e))
            ls = sh.Command("ls")
            print(ls("-la"))
            os.symlink(os.path.relpath(symlinktarget), symlinkname)
            os.chdir(cwd)
Esempio n. 14
0
 def run(self):
     main = sh.Command(self.tmpdir + '/main')
     loop_factor = self.benchmark_definition['loop_factor']
     results = main(int(5000000 * loop_factor))
     return parse_results(results.splitlines())
Esempio n. 15
0
    def run_distribute(self):
        info_main("# Creating Android project ({})".format(self.name))

        arch = self.ctx.archs[0]
        python_install_dir = self.ctx.get_python_install_dir()
        from_crystax = self.ctx.python_recipe.from_crystax
        crystax_python_dir = join("crystax_python", "crystax_python")

        if len(self.ctx.archs) > 1:
            raise ValueError("LBRY/gradle support only one arch")

        info("Copying LBRY/gradle build for {}".format(arch))
        shprint(sh.rm, "-rf", self.dist_dir)
        shprint(sh.cp, "-r", self.build_dir, self.dist_dir)

        # either the build use environemnt variable (ANDROID_HOME)
        # or the local.properties if exists
        with current_directory(self.dist_dir):
            with open('local.properties', 'w') as fileh:
                fileh.write('sdk.dir={}'.format(self.ctx.sdk_dir))

        with current_directory(self.dist_dir):
            info("Copying Python distribution")

            if not exists("private") and not from_crystax:
                ensure_dir("private")
            if not exists("crystax_python") and from_crystax:
                ensure_dir(crystax_python_dir)

            hostpython = sh.Command(self.ctx.hostpython)
            if not from_crystax:
                try:
                    shprint(hostpython,
                            '-OO',
                            '-m',
                            'compileall',
                            python_install_dir,
                            _tail=10,
                            _filterout="^Listing")
                except sh.ErrorReturnCode:
                    pass
                if not exists('python-install'):
                    shprint(sh.cp, '-a', python_install_dir,
                            './python-install')

            self.distribute_libs(arch, [self.ctx.get_libs_dir(arch.arch)])
            self.distribute_javaclasses(self.ctx.javaclass_dir,
                                        dest_dir=join("src", "main", "java"))

            if not from_crystax:
                info("Filling private directory")
                if not exists(join("private", "lib")):
                    info("private/lib does not exist, making")
                    shprint(sh.cp, "-a", join("python-install", "lib"),
                            "private")
                shprint(sh.mkdir, "-p", join("private", "include",
                                             "python2.7"))

                libpymodules_fn = join("libs", arch.arch, "libpymodules.so")
                if exists(libpymodules_fn):
                    shprint(sh.mv, libpymodules_fn, 'private/')
                shprint(
                    sh.cp,
                    join('python-install', 'include',
                         'python2.7', 'pyconfig.h'),
                    join('private', 'include', 'python2.7/'))

                info('Removing some unwanted files')
                shprint(sh.rm, '-f', join('private', 'lib', 'libpython2.7.so'))
                shprint(sh.rm, '-rf', join('private', 'lib', 'pkgconfig'))

                libdir = join(self.dist_dir, 'private', 'lib', 'python2.7')
                site_packages_dir = join(libdir, 'site-packages')
                with current_directory(libdir):
                    removes = []
                    for dirname, root, filenames in walk("."):
                        for filename in filenames:
                            for suffix in EXCLUDE_EXTS:
                                if filename.endswith(suffix):
                                    removes.append(filename)
                    shprint(sh.rm, '-f', *removes)

                    info('Deleting some other stuff not used on android')
                    # To quote the original distribute.sh, 'well...'
                    shprint(sh.rm, '-rf', 'lib2to3')
                    shprint(sh.rm, '-rf', 'idlelib')
                    for filename in glob.glob('config/libpython*.a'):
                        shprint(sh.rm, '-f', filename)
                    shprint(sh.rm, '-rf', 'config/python.o')

            else:  # Python *is* loaded from crystax
                ndk_dir = self.ctx.ndk_dir
                py_recipe = self.ctx.python_recipe
                python_dir = join(ndk_dir, 'sources', 'python',
                                  py_recipe.version, 'libs', arch.arch)
                shprint(sh.cp, '-r', join(python_dir, 'stdlib.zip'),
                        crystax_python_dir)
                shprint(sh.cp, '-r', join(python_dir, 'modules'),
                        crystax_python_dir)
                shprint(sh.cp, '-r', self.ctx.get_python_install_dir(),
                        join(crystax_python_dir, 'site-packages'))

                info('Renaming .so files to reflect cross-compile')
                site_packages_dir = join(crystax_python_dir, "site-packages")
                find_ret = shprint(sh.find, site_packages_dir, '-iname',
                                   '*.so')
                filenames = find_ret.stdout.decode('utf-8').split('\n')[:-1]
                for filename in filenames:
                    parts = filename.split('.')
                    if len(parts) <= 2:
                        continue
                    shprint(sh.mv, filename, filename.split('.')[0] + '.so')
                site_packages_dir = join(abspath(curdir), site_packages_dir)
            if 'sqlite3' not in self.ctx.recipe_build_order:
                with open('blacklist.txt', 'a') as fileh:
                    fileh.write('\nsqlite3/*\nlib-dynload/_sqlite3.so\n')

        self.strip_libraries(arch)
        self.fry_eggs(site_packages_dir)
        super(LbryBootstrap, self).run_distribute()
Esempio n. 16
0
def run_hook(kind, vmname, diskname):
    if config['hooks'][kind] is not None:
        sh.Command(config['hooks'][kind])(kind, vmname, diskname)
Esempio n. 17
0
import sh

dpkgSig = sh.Command("dpkg-sig").bake(s="builder", _fg=True)
Esempio n. 18
0
    def apk(self, args):
        """Create an APK using the given distribution."""

        ctx = self.ctx
        dist = self._dist

        # Manually fixing these arguments at the string stage is
        # unsatisfactory and should probably be changed somehow, but
        # we can't leave it until later as the build.py scripts assume
        # they are in the current directory.
        fix_args = ('--dir', '--private', '--add-jar', '--add-source',
                    '--whitelist', '--blacklist', '--presplash', '--icon')
        unknown_args = args.unknown_args
        for i, arg in enumerate(unknown_args):
            argx = arg.split('=')
            if argx[0] in fix_args:
                if len(argx) > 1:
                    unknown_args[i] = '='.join(
                        (argx[0], realpath(expanduser(argx[1]))))
                elif i + 1 < len(unknown_args):
                    unknown_args[i + 1] = realpath(
                        expanduser(unknown_args[i + 1]))

        env = os.environ.copy()
        if args.build_mode == 'release':
            if args.keystore:
                env['P4A_RELEASE_KEYSTORE'] = realpath(
                    expanduser(args.keystore))
            if args.signkey:
                env['P4A_RELEASE_KEYALIAS'] = args.signkey
            if args.keystorepw:
                env['P4A_RELEASE_KEYSTORE_PASSWD'] = args.keystorepw
            if args.signkeypw:
                env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.signkeypw
            elif args.keystorepw and 'P4A_RELEASE_KEYALIAS_PASSWD' not in env:
                env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.keystorepw

        build = imp.load_source('build', join(dist.dist_dir, 'build.py'))
        with current_directory(dist.dist_dir):
            self.hook("before_apk_build")
            os.environ["ANDROID_API"] = str(self.ctx.android_api)
            build_args = build.parse_args(args.unknown_args)
            self.hook("after_apk_build")
            self.hook("before_apk_assemble")

            build_type = ctx.java_build_tool
            if build_type == 'auto':
                info('Selecting java build tool:')

                build_tools_versions = os.listdir(
                    join(ctx.sdk_dir, 'build-tools'))
                build_tools_versions = sorted(build_tools_versions,
                                              key=LooseVersion)
                build_tools_version = build_tools_versions[-1]
                info(('Detected highest available build tools '
                      'version to be {}').format(build_tools_version))

                if build_tools_version >= '25.0' and exists('gradlew'):
                    build_type = 'gradle'
                    info('    Building with gradle, as gradle executable is '
                         'present')
                else:
                    build_type = 'ant'
                    if build_tools_version < '25.0':
                        info(('    Building with ant, as the highest '
                              'build-tools-version is only {}'
                              ).format(build_tools_version))
                    else:
                        info('    Building with ant, as no gradle executable '
                             'detected')

            if build_type == 'gradle':
                # gradle-based build
                env["ANDROID_NDK_HOME"] = self.ctx.ndk_dir
                env["ANDROID_HOME"] = self.ctx.sdk_dir

                gradlew = sh.Command('./gradlew')
                if exists('/usr/bin/dos2unix'):
                    # .../dists/bdisttest_python3/gradlew
                    # .../build/bootstrap_builds/sdl2-python3crystax/gradlew
                    # if docker on windows, gradle contains CRLF
                    output = shprint(sh.Command('dos2unix'),
                                     gradlew._path.decode('utf8'),
                                     _tail=20,
                                     _critical=True,
                                     _env=env)
                if args.build_mode == "debug":
                    gradle_task = "assembleDebug"
                elif args.build_mode == "release":
                    gradle_task = "assembleRelease"
                else:
                    raise BuildInterruptingException(
                        "Unknown build mode {} for apk()".format(
                            args.build_mode))
                output = shprint(gradlew,
                                 gradle_task,
                                 _tail=20,
                                 _critical=True,
                                 _env=env)

                # gradle output apks somewhere else
                # and don't have version in file
                apk_dir = join(dist.dist_dir, "build", "outputs", "apk",
                               args.build_mode)
                apk_glob = "*-{}.apk"
                apk_add_version = True

            else:
                # ant-based build
                try:
                    ant = sh.Command('ant')
                except sh.CommandNotFound:
                    raise BuildInterruptingException(
                        'Could not find ant binary, please install it '
                        'and make sure it is in your $PATH.')
                output = shprint(ant,
                                 args.build_mode,
                                 _tail=20,
                                 _critical=True,
                                 _env=env)
                apk_dir = join(dist.dist_dir, "bin")
                apk_glob = "*-*-{}.apk"
                apk_add_version = False

            self.hook("after_apk_assemble")

        info_main('# Copying APK to current directory')

        apk_re = re.compile(r'.*Package: (.*\.apk)$')
        apk_file = None
        for line in reversed(output.splitlines()):
            m = apk_re.match(line)
            if m:
                apk_file = m.groups()[0]
                break

        if not apk_file:
            info_main('# APK filename not found in build output. Guessing...')
            if args.build_mode == "release":
                suffixes = ("release", "release-unsigned")
            else:
                suffixes = ("debug", )
            for suffix in suffixes:
                apks = glob.glob(join(apk_dir, apk_glob.format(suffix)))
                if apks:
                    if len(apks) > 1:
                        info('More than one built APK found... guessing you '
                             'just built {}'.format(apks[-1]))
                    apk_file = apks[-1]
                    break
            else:
                raise BuildInterruptingException(
                    'Couldn\'t find the built APK')

        info_main('# Found APK file: {}'.format(apk_file))
        if apk_add_version:
            info('# Add version number to APK')
            apk_name, apk_suffix = basename(apk_file).split("-", 1)
            apk_file_dest = "{}-{}-{}".format(apk_name, build_args.version,
                                              apk_suffix)
            info('# APK renamed to {}'.format(apk_file_dest))
            shprint(sh.cp, apk_file, apk_file_dest)
        else:
            shprint(sh.cp, apk_file, './')
Esempio n. 19
0
    def do_python_build(self, arch):

        hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)
        shprint(sh.cp, self.ctx.hostpython, self.get_build_dir(arch.arch))
        shprint(sh.cp, self.ctx.hostpgen, self.get_build_dir(arch.arch))
        hostpython = join(self.get_build_dir(arch.arch), 'hostpython')
        hostpgen = join(self.get_build_dir(arch.arch), 'hostpython')

        with current_directory(self.get_build_dir(arch.arch)):


            hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)
            shprint(sh.cp, join(hostpython_recipe.get_recipe_dir(), 'Setup'), 'Modules')

            env = arch.get_env()

            env['HOSTARCH'] = 'arm-eabi'
            env['BUILDARCH'] = shprint(sh.gcc, '-dumpmachine').stdout.decode('utf-8').split('\n')[0]
            env['CFLAGS'] = ' '.join([env['CFLAGS'], '-DNO_MALLINFO'])

            # TODO need to add a should_build that checks if optional
            # dependencies have changed (possibly in a generic way)
            if 'openssl' in self.ctx.recipe_build_order:
                r = Recipe.get_recipe('openssl', self.ctx)
                openssl_build_dir = r.get_build_dir(arch.arch)
                setuplocal = join('Modules', 'Setup.local')
                shprint(sh.cp, join(self.get_recipe_dir(), 'Setup.local-ssl'), setuplocal)
                shprint(sh.sed, '-i.backup', 's#^SSL=.*#SSL={}#'.format(openssl_build_dir), setuplocal)
                env['OPENSSL_VERSION'] = r.version

            if 'sqlite3' in self.ctx.recipe_build_order:
                # Include sqlite3 in python2 build
                r = Recipe.get_recipe('sqlite3', self.ctx)
                i = ' -I' + r.get_build_dir(arch.arch)
                l = ' -L' + r.get_lib_dir(arch) + ' -lsqlite3'
                # Insert or append to env
                f = 'CPPFLAGS'
                env[f] = env[f] + i if f in env else i
                f = 'LDFLAGS'
                env[f] = env[f] + l if f in env else l

            # NDK has langinfo.h but doesn't define nl_langinfo()
            env['ac_cv_header_langinfo_h'] = 'no'
            configure = sh.Command('./configure')
            shprint(configure,
                    '--host={}'.format(env['HOSTARCH']),
                    '--build={}'.format(env['BUILDARCH']),
                    # 'OPT={}'.format(env['OFLAG']),
                    '--prefix={}'.format(realpath('./python-install')),
                    '--enable-shared',
                    '--disable-toolbox-glue',
                    '--disable-framework',
                    _env=env)

            # tito left this comment in the original source. It's still true!
            # FIXME, the first time, we got a error at:
            # python$EXE ../../Tools/scripts/h2py.py -i '(u_long)' /usr/include/netinet/in.h
        # /home/tito/code/python-for-android/build/python/Python-2.7.2/python: 1: Syntax error: word unexpected (expecting ")")
            # because at this time, python is arm, not x86. even that, why /usr/include/netinet/in.h is used ?
            # check if we can avoid this part.

            make = sh.Command(env['MAKE'].split(' ')[0])
            print('First install (expected to fail...')
            try:
                shprint(make, '-j5', 'install', 'HOSTPYTHON={}'.format(hostpython),
                        'HOSTPGEN={}'.format(hostpgen),
                        'CROSS_COMPILE_TARGET=yes',
                        'INSTSONAME=libpython2.7.so',
                        _env=env)
            except sh.ErrorReturnCode_2:
                print('First python2 make failed. This is expected, trying again.')


            print('Second install (expected to work)')
            shprint(sh.touch, 'python.exe', 'python')
            shprint(make, '-j5', 'install', 'HOSTPYTHON={}'.format(hostpython),
                    'HOSTPGEN={}'.format(hostpgen),
                    'CROSS_COMPILE_TARGET=yes',
                    'INSTSONAME=libpython2.7.so',
                    _env=env)

            if is_darwin():
                shprint(sh.cp, join(self.get_recipe_dir(), 'patches', '_scproxy.py'),
                        join('python-install', 'Lib'))
                shprint(sh.cp, join(self.get_recipe_dir(), 'patches', '_scproxy.py'),
                        join('python-install', 'lib', 'python2.7'))

            # reduce python
            for dir_name in ('test', join('json', 'tests'), 'lib-tk',
                             join('sqlite3', 'test'), join('unittest, test'),
                             join('lib2to3', 'tests'), join('bsddb', 'tests'),
                             join('distutils', 'tests'), join('email', 'test'),
                             'curses'):
                shprint(sh.rm, '-rf', join('python-install',
                                           'lib', 'python2.7', dir_name))
ALIGNMENT_TEMP_PDB = "aligned.pdb"
ALIGNMENT_OUTPUT = "alignment.txt"

BIOJAVA_RUNTIME_ARGS = [
    "-Xmx500M",
    "-cp",
    BIOJAVA_CLASSPATH,
]

BIOJAVA_COMMON_ARGS = [
    "-autoFetch", "false", "-pdbDirSplit", "false", "-printFatCat",
    "-outputPDB", "-outFile", ALIGNMENT_TEMP_PDB
]

_java_cmd = sh.Command(JAVA_RUNTIME)
_biojava_base_cmd = _java_cmd.bake(*BIOJAVA_RUNTIME_ARGS)


def run_biojava_alignment(cmd_class,
                          code1,
                          code2,
                          alignment,
                          transformed,
                          superposed,
                          pdb1=None,
                          pdb2=None,
                          log=None):
    try:
        exec_dir = tempfile.mkdtemp()
Esempio n. 21
0
    from StringIO import StringIO

from PIL import Image
from pdfminer.pdfpage import PDFPage
import sh

from django.utils.translation import ugettext_lazy as _

from common.utils import fs_cleanup, mkstemp

from ..classes import ConverterBase
from ..exceptions import PageCountError
from ..settings import setting_pdftoppm_path

try:
    pdftoppm = sh.Command(setting_pdftoppm_path.value)
except sh.CommandNotFound:
    pdftoppm = None
else:
    pdftoppm = pdftoppm.bake('-jpeg')

Image.init()
logger = logging.getLogger(__name__)


class IteratorIO(object):
    def __init__(self, iterator):
        self.file_buffer = StringIO()

        for chunk in iterator:
            self.file_buffer.write(chunk)
Esempio n. 22
0
 def install(self):
     arch = self.filtered_archs[0]
     build_dir = self.get_build_dir(arch.arch)
     hostpython = sh.Command(self.ctx.hostpython)
     with cd(build_dir):
         shprint(hostpython, "setup.py", "install")
Esempio n. 23
0
# standard library imports
import sys
from pathlib import Path

# third-party imports
import pytest
import sh

# module imports
from . import INGEST_OUTPUTS
from . import find_homology_files
from . import help_check
from . import print_docstring

# global constants
azulejo = sh.Command("azulejo")
SUBCOMMAND = "homology"


def test_subcommand_help():
    """Test subcommand help message."""
    help_check(SUBCOMMAND)


@print_docstring()
def test_homology(datadir_mgr, capsys):
    """Test homology clustering, MSA, and tree building."""
    with capsys.disabled():
        with datadir_mgr.in_tmp_dir(
                inpathlist=INGEST_OUTPUTS,
                save_outputs=True,
Esempio n. 24
0
def run_pymodules_install(ctx, modules):
    modules = list(filter(ctx.not_has_package, modules))

    if not modules:
        info('There are no Python modules to install, skipping')
        return

    info('The requirements ({}) don\'t have recipes, attempting to install '
         'them with pip'.format(', '.join(modules)))
    info('If this fails, it may mean that the module has compiled '
         'components and needs a recipe.')

    venv = sh.Command(ctx.virtualenv)
    with current_directory(join(ctx.build_dir)):
        shprint(
            venv, '--python=python{}'.format(
                ctx.python_recipe.major_minor_version_string.partition(".")
                [0]), 'venv')

        info('Creating a requirements.txt file for the Python modules')
        with open('requirements.txt', 'w') as fileh:
            for module in modules:
                key = 'VERSION_' + module
                if key in environ:
                    line = '{}=={}\n'.format(module, environ[key])
                else:
                    line = '{}\n'.format(module)
                fileh.write(line)

        base_env = copy.copy(os.environ)
        base_env["PYTHONPATH"] = ctx.get_site_packages_dir()

        info('Upgrade pip to latest version')
        shprint(sh.bash,
                '-c', ("source venv/bin/activate && pip install -U pip"),
                _env=copy.copy(base_env))

        info('Install Cython in case one of the modules needs it to build')
        shprint(sh.bash,
                '-c', ("venv/bin/pip install Cython"),
                _env=copy.copy(base_env))

        # Get environment variables for build (with CC/compiler set):
        standard_recipe = CythonRecipe()
        standard_recipe.ctx = ctx
        # (note: following line enables explicit -lpython... linker options)
        standard_recipe.call_hostpython_via_targetpython = False
        recipe_env = standard_recipe.get_recipe_env(ctx.archs[0])
        env = copy.copy(base_env)
        env.update(recipe_env)

        info('Installing Python modules with pip')
        info('IF THIS FAILS, THE MODULES MAY NEED A RECIPE. '
             'A reason for this is often modules compiling '
             'native code that is unaware of Android cross-compilation '
             'and does not work without additional '
             'changes / workarounds.')

        # Make sure our build package dir is available, and the virtualenv
        # site packages come FIRST (for the proper pip version):
        env["PYTHONPATH"] += ":" + ctx.get_site_packages_dir()
        env["PYTHONPATH"] = os.path.abspath(
            join(ctx.build_dir, "venv", "lib",
                 "python" + ctx.python_recipe.major_minor_version_string,
                 "site-packages")) + ":" + env["PYTHONPATH"]
        shprint(sh.bash,
                '-c',
                ("source venv/bin/activate && " +
                 "pip install -v --target '{0}' --no-deps -r requirements.txt"
                 ).format(ctx.get_site_packages_dir().replace("'", "'\"'\"'")),
                _env=copy.copy(env))

        # Strip object files after potential Cython or native code builds:
        standard_recipe.strip_object_files(ctx.archs[0],
                                           env,
                                           build_dir=ctx.build_dir)
Esempio n. 25
0
def main():
    logger = logging.getLogger('dobutton')

    s = None
    while not s:
        for serial_dev in set(itertools.chain.from_iterable(comports())):
            try:
                # print("Trying {0}".format(serial_dev))
                s = serial.Serial(serial_dev, baudrate=9600, timeout=0.25)
                connected = False
                tries = 0
                while tries < 3:
                    if "IM A BUTTON" in s.readline():
                        # print("FOUND A BUTTON")
                        connected = True
                        while True:
                            write(s, SUCCESS)
                            time.sleep(.1)
                            if s.readline().strip() == "SUCCESS":
                                print("DoButton found!")
                                break
                        break
                    tries += 1
                else:
                    s.close()
                    continue

                if connected:
                    break
            except serial.serialutil.SerialException as exc:
                # Couldn't connect, skip it.
                # print("Couldn't connect to", serial_dev)
                # print(exc.message)
                continue
        else:
            # Tried all the devices, so sleep a bit before trying again.
            time.sleep(1)
            print(".", end='')
            sys.stdout.flush()

    # Get any button messages:
    while True:
        try:
            line = s.readline()
        except (OSError, serial.serialutil.SerialException, ValueError) as exc:
            # print(exc.message)
            # Arduino got unplugged, so start over.
            return main()

        if "PRESSED" in line:
            line = None
            # Send a message to the button that it's working.
            write(s, WORKING)

            # Call whatever script(s) are in ~/.dobutton/.
            for command in get_executable():
                try:
                    # Note: Don't background these, so they run in sequence
                    # and the "WORKING" message means something
                    print("Running", command)
                    run = sh.Command(command)
                    run()
                    print("Finished successfully.")
                except sh.ErrorReturnCode as exc:
                    # If any error in the executables...

                    # Log it
                    print(exc.message)
                    logger.error("ERROR: {0} exited with a nonzero exit status.".format(command))
                    logger.error(exc.stdout)
                    logger.error(exc.stderr)

                    # And inform the button.
                    write(s, ERROR)

                    # And give up running more commands.
                    break
            else:
                # When they're all done, and they all succeed, send the success message.
                write(s, SUCCESS)
Esempio n. 26
0
    def run(self,
            i,
            restart_file=None,
            use_restart=True,
            multi_node=False,
            num_cores=8,
            overwrite_data=False,
            save_run=False,
            run_idb=False,
            nice_score=0,
            mpirun_opts=''):
        """Run the model.0
            `num_cores`: Number of mpi cores to distribute over.
            `restart_file` (optional): A path to a valid restart archive.  If None and `use_restart=True`,
                                       restart file (i-1) will be used.
            `save_run`:  If True, copy the entire working directory over to GFDL_DATA
                         so that the run can rerun without the python script.
                         (This uses a lot of data storage!)

        """

        self.clear_rundir()

        indir = P(self.rundir, 'INPUT')
        outdir = P(self.datadir, self.runfmt % i)
        resdir = P(self.rundir, 'RESTART')

        if self.check_for_existing_output(i):
            if overwrite_data:
                self.log.warning(
                    'Data for run %d already exists and overwrite_data is True. Overwriting.'
                    % i)
                sh.rm('-r', outdir)
            else:
                self.log.warn(
                    'Data for run %d already exists but overwrite_data is False. Stopping.'
                    % i)
                return False

        # make the output run folder and copy over the input files
        mkdir([indir, resdir, self.restartdir])

        self.codebase.write_source_control_status(
            P(self.rundir, 'git_hash_used.txt'))
        self.write_namelist(self.rundir)
        self.write_field_table(self.rundir)
        self.write_diag_table(self.rundir)

        for filename in self.inputfiles:
            sh.cp([filename, P(indir, os.path.split(filename)[1])])

        if multi_node:
            mpirun_opts += ' -bootstrap pbsdsh -f $PBS_NODEFILE'

        if use_restart and not restart_file and i == 1:
            # no restart file specified, but we are at first run number
            self.log.warn(
                'use_restart=True, but restart_file not specified.  As this is run 1, assuming spin-up from namelist stated initial conditions so continuing.'
            )
            use_restart = False

        if use_restart:
            if not restart_file:
                # get the restart from previous iteration
                restart_file = self.get_restart_file(i - 1)
            if not os.path.isfile(restart_file):
                self.log.error('Restart file not found, expecting file %r' %
                               restart_file)
                raise IOError('Restart file not found, expecting file %r' %
                              restart_file)
            else:
                self.log.info('Using restart file %r' % restart_file)

            self.extract_restart_archive(restart_file, indir)
        else:
            self.log.info('Running without restart file')
            restart_file = None

        vars = {
            'rundir': self.rundir,
            'execdir': self.codebase.builddir,
            'executable': self.codebase.executable_name,
            'env_source': self.env_source,
            'mpirun_opts': mpirun_opts,
            'num_cores': num_cores,
            'run_idb': run_idb,
            'nice_score': nice_score
        }

        runscript = self.templates.get_template('run.sh')

        # employ the template to create a runscript
        t = runscript.stream(**vars).dump(P(self.rundir, 'run.sh'))

        def _outhandler(line):
            handled = self.emit('run:output', self, line)
            if not handled:  # only log the output when no event handler is used
                self.log_output(line)

        self.emit('run:ready', self, i)
        self.log.info("Beginning run %d" % i)
        try:
            #for line in sh.bash(P(self.rundir, 'run.sh'), _iter=True, _err_to_out=True):
            proc = sh.bash(P(self.rundir, 'run.sh'),
                           _bg=True,
                           _out=_outhandler,
                           _err_to_out=True)
            self.log.info('process running as {}'.format(proc.process.pid))
            proc.wait()
            completed = True
        except KeyboardInterrupt as e:
            self.log.error("Manual interrupt, killing process.")
            proc.process.terminate()
            proc.wait()
            #log.info("Cleaning run directory.")
            #self.clear_rundir()
            raise e
        except sh.ErrorReturnCode as e:
            completed = False
            self.log.error("Run %d failed. See log for details." % i)
            self.log.error("Error: %r" % e)
            self.emit('run:failed', self)
            raise FailedRunError()

        self.emit('run:complete', self, i)
        self.log.info('Run %d complete' % i)
        mkdir(outdir)

        if num_cores > 1:
            # use postprocessing tool to combine the output from several cores
            codebase_combine_script = P(self.codebase.builddir,
                                        'mppnccombine_run.sh')
            if not os.path.exists(codebase_combine_script):
                self.log.warning(
                    'combine script does not exist in the commit you are running Isca from.  Falling back to using $GFDL_BASE mppnccombine_run.sh script'
                )
                sh.ln('-s',
                      P(GFDL_BASE, 'postprocessing', 'mppnccombine_run.sh'),
                      codebase_combine_script)
            combinetool = sh.Command(codebase_combine_script)
            for file in self.diag_table.files:
                netcdf_file = '%s.nc' % file
                filebase = P(self.rundir, netcdf_file)
                combinetool(self.codebase.builddir, filebase)
                # copy the combined netcdf file into the data archive directory
                sh.cp(filebase, P(outdir, netcdf_file))
                # remove all netcdf fragments from the run directory
                sh.rm(glob.glob(filebase + '*'))
                self.log.debug('%s combined and copied to data directory' %
                               netcdf_file)

            for restart in glob.glob(P(resdir, '*.res.nc.0000')):
                restartfile = restart.replace('.0000', '')
                combinetool(self.codebase.builddir, restartfile)
                sh.rm(glob.glob(restartfile + '.????'))
                self.log.debug("Restart file %s combined" % restartfile)
            self.emit('run:combined', self, i)
        else:
            for file in self.diag_table.files:
                netcdf_file = '%s.nc' % file
                filebase = P(self.rundir, netcdf_file)
                sh.cp(filebase, P(outdir, netcdf_file))
                sh.rm(glob.glob(filebase + '*'))
                self.log.debug('%s copied to data directory' % netcdf_file)

        # make the restart archive and delete the restart files
        self.make_restart_archive(self.get_restart_file(i), resdir)
        sh.rm('-r', resdir)

        if save_run:
            # copy the complete run directory to GFDL_DATA so that the run can
            # be recreated without the python script if required
            mkdir(resdir)
            sh.cp(['-a', self.rundir, outdir])
        else:
            # just save some useful diagnostic information
            self.write_namelist(outdir)
            self.write_field_table(outdir)
            self.write_diag_table(outdir)
            self.codebase.write_source_control_status(
                P(outdir, 'git_hash_used.txt'))

        self.clear_rundir()
        self.emit('run:finished', self, i)
        return True
def regenerate_files(generate_tests_bin, test_config):
    print("Regenerating files...")
    sh.Command(generate_tests_bin)(
        yaml_config_path=test_config,
        _fg=True)