Ejemplo n.º 1
0
def helper(tree=None, source='src', options=lambda _: []):
    """Run main in clean environments, with various '--source' options.

    Each invocation is done against a separate, temporary directory.
    main is currently run 3 times:
    - from the current working directory, with `--source tmpdir/src`;
    - from `tmpdir`, with `--source src`;
    - from `tmpdir/src`, without --source argument.
    """
    homedir = Path(tempfile.mkdtemp())
    with home(homedir):
        with chdir(homedir):
            with directory_tree(tree) as tmpdir:
                main('--source', tmpdir / source, *options(tmpdir))
                yield tmpdir

            with directory_tree(tree) as tmpdir:
                with chdir(tmpdir):
                    main('--source', source, *options(tmpdir))
                    yield tmpdir

            with directory_tree(tree) as tmpdir:
                with chdir(tmpdir / source):
                    main(*options(tmpdir))
                    yield tmpdir

    # Implicitely asserts that `homedir` == `cwd` is empty
    homedir.rmdir()
Ejemplo n.º 2
0
def helper(tree=None, source='src', options=lambda _: []):
    """Run main in clean environments, with various '--source' options.

    Each invocation is done against a separate, temporary directory.
    main is currently run 3 times:
    - from the current working directory, with `--source tmpdir/src`;
    - from `tmpdir`, with `--source src`;
    - from `tmpdir/src`, without --source argument.
    """
    homedir = Path(tempfile.mkdtemp())
    with home(homedir):
        with chdir(homedir):
            with directory_tree(tree) as tmpdir:
                main('--source', tmpdir / source, *options(tmpdir))
                yield tmpdir

            with directory_tree(tree) as tmpdir:
                with chdir(tmpdir):
                    main('--source', source, *options(tmpdir))
                    yield tmpdir

            with directory_tree(tree) as tmpdir:
                with chdir(tmpdir / source):
                    main(*options(tmpdir))
                    yield tmpdir

    # Implicitely asserts that `homedir` == `cwd` is empty
    homedir.rmdir()
Ejemplo n.º 3
0
def test_json_resolution():
    with directory_tree({
            'src': {
                'emanate.json':
                json.dumps({
                    "source": ".",
                    "destination": "../dest"
                }, ),
            },
    }) as tmpdir:

        config_cwd = Config.from_json(tmpdir / 'src' / 'emanate.json')
        assert config_cwd.resolved

        with chdir(tmpdir):
            config_tmp = Config.from_json(Path('src') / 'emanate.json')
            assert config_tmp.resolved

        with chdir(tmpdir / 'src'):
            config_src = Config.from_json(Path('emanate.json'))
            assert config_src.resolved

        assert config_cwd == config_tmp == config_src
        assert config_cwd.destination.is_absolute()
        assert config_cwd.source.is_absolute()
Ejemplo n.º 4
0
 def test_utils_chdir(self):
     oldcwd = os.getcwd()
     with utils.chdir('/tmp'):
         self.assertEqual(os.getcwd(), '/tmp')
     self.assertEqual(os.getcwd(), oldcwd)
     with self.assertRaises(ValueError):
         with utils.chdir(os.devnull):
             pass # pragma: no cover
Ejemplo n.º 5
0
 def test_utils_chdir(self):
     oldcwd = os.getcwd()
     with utils.chdir('/tmp'):
         self.assertEqual(os.getcwd(), '/tmp')
     self.assertEqual(os.getcwd(), oldcwd)
     with self.assertRaises(ValueError):
         with utils.chdir(os.devnull):
             pass  # pragma: no cover
    def update(self, rev = None):
        assert rev == None

        with chdir(self.path()):
            Run(['git', 'pull', 'origin', 'master'])

        env = os.environ.copy()
        with chdir(self.path()):
            Run(['gclient', 'sync'], self.make_env())
Ejemplo n.º 7
0
def install(should_identify=True):
    # Based on ideas from https://github.com/harvimt/quamash/blob/master/.travis.yml
    if should_identify:
        system_identify()
    td = Travis_Dispatcher()
    xqt(
      # Cached Downloads
      'sudo mkdir -p /downloads',
      'sudo chmod a+rw /downloads')
    sip_ver = 'sip-4.17'
    if not isfile('/downloads/sip.tar.gz'):
        wget('http://downloads.sourceforge.net/project/pyqt/sip/{}/{}'.
             format(sip_ver, _gz(sip_ver)), '/downloads/sip.tar.gz')
    # _`pyqt_ver`: Select a PyQt version. See also qt5_Linux_ and qt5_OS_X_.
    pyqt_ver = '5.5.1'
    pyqt_gpl_ver = 'PyQt-gpl-' + pyqt_ver
    if not isfile('/downloads/pyqt5.tar.gz'):
        wget('http://downloads.sourceforge.net/project/pyqt/PyQt5/PyQt-{}/{}'.
             format(pyqt_ver, _gz(pyqt_gpl_ver)), '/downloads/pyqt5.tar.gz')
    # Builds
    xqt('sudo mkdir -p /builds',
      'sudo chmod a+rw /builds')

    # Qt5
    td.qt5()

    # SIP. With Linux or OS_X, don't use the package manager to install these,
    # since they're installed for the system python, not the pyenv version
    # we're testing with.
    with pushd('/builds'):
        xqt('tar xzf /downloads/sip.tar.gz --keep-newer-files')
        chdir(sip_ver)
        xqt('python configure.py',
          'make',
          'sudo make install')

    # PyQt5
    with pushd('/builds'):
        xqt('tar xzf /downloads/pyqt5.tar.gz --keep-newer-files')
        chdir(pyqt_gpl_ver)
        td.pyqt5_configure()
        xqt('make',
          'sudo make install')

    # PCRE
    td.pcre()

    # Qutepart
    if build_os == 'Linux':
        set_display()
        xqt('sh -e /etc/init.d/xvfb start')
    # Install, which also builds Python C extensions. Use this instead of
    # ``build_ext`` so that Enki will have an already-installed qutepart,
    # rather than needing to regenrate the command below.
    xqt('python setup.py install')
Ejemplo n.º 8
0
def get_jobs(repo):
  """Obtain the list of jobs from the given repo."""
  # Maintain a copy of the repo in the temp dir.
  if not os.path.isdir(TMP_DIR):
    os.mkdir(TMP_DIR)
  with utils.chdir(TMP_DIR):
    dirname = repo.split('/')[-1]
    if not os.path.isdir(dirname):
      subprocess.check_call([
          utils.GIT, 'clone', '--mirror', repo, dirname])
    with utils.chdir(dirname):
      subprocess.check_call([utils.GIT, 'remote', 'update'])
      jobs = json.loads(subprocess.check_output([
          utils.GIT, 'show', 'master:infra/bots/jobs.json']))
      return (BUCKET_SKIA_INTERNAL, jobs)
Ejemplo n.º 9
0
def get_jobs(repo):
  """Obtain the list of jobs from the given repo."""
  # Maintain a copy of the repo in the temp dir.
  if not os.path.isdir(TMP_DIR):
    os.mkdir(TMP_DIR)
  with utils.chdir(TMP_DIR):
    dirname = repo.split('/')[-1]
    if not os.path.isdir(dirname):
      subprocess.check_call([
          utils.GIT, 'clone', '--mirror', repo, dirname])
    with utils.chdir(dirname):
      subprocess.check_call([utils.GIT, 'remote', 'update'])
      jobs = json.loads(subprocess.check_output([
          utils.GIT, 'show', 'master:infra/bots/jobs.json']))
      return (BUCKET_SKIA_INTERNAL, jobs)
Ejemplo n.º 10
0
def test_push_latest(loo):
    sys.argv = [
        'vindaloo', '--noninteractive', 'push', '--latest', 'dev', 'test/foo'
    ]

    loo.cmd.return_value.stdout.decode.return_value.split.return_value = [
        'foo-registry.com/test/foo:1.0.0',
        'foo-registry.com/test/bar:2.0.0',
    ]

    with chdir('tests/test_roots/simple'):
        loo.main()

    # check the arguments docker was called with
    assert len(loo.cmd.call_args_list) == 3
    push_cmd = loo.cmd.call_args_list[1][0][0]
    push2_cmd = loo.cmd.call_args_list[2][0][0]

    assert push_cmd == [
        'docker',
        'push',
        'foo-registry.com/test/foo:1.0.0',
    ]
    assert push2_cmd == [
        'docker',
        'push',
        'foo-registry.com/test/foo:latest',
    ]
Ejemplo n.º 11
0
    def upload_new_version(self, target_dir, commit=False):
        """Upload a new version and update the version file for the asset."""
        version = self.get_next_version()
        target_dir = os.path.abspath(target_dir)
        with utils.tmp_dir():
            zip_file = os.path.join(os.getcwd(), '%d.zip' % version)
            zip_utils.zip(target_dir, zip_file, blacklist=ZIP_BLACKLIST)
            gs_path = GS_PATH_TMPL % (self._gs_subdir, str(version))
            self._gs.copy(zip_file, gs_path)

        def _write_version():
            with open(self.version_file, 'w') as f:
                f.write(str(version))
            subprocess.check_call([utils.GIT, 'add', self.version_file])

        with utils.chdir(SKIA_DIR):
            if commit:
                with utils.git_branch():
                    _write_version()
                    subprocess.check_call([
                        utils.GIT, 'commit', '-m',
                        'Update %s version' % self._name
                    ])
                    subprocess.check_call(
                        [utils.GIT, 'cl', 'upload', '--bypass-hooks'])
            else:
                _write_version()
    def patch(self):
        with utils.chdir(self.folder):
            # Hack 1: Remove reporting errors for warnings that currently are present.
            Run([
                "sed", "-i.bac",
                "s/GCC_TREAT_WARNINGS_AS_ERRORS = YES;/GCC_TREAT_WARNINGS_AS_ERRORS=NO;/",
                "Source/JavaScriptCore/Configurations/Base.xcconfig"
            ])
            Run([
                "sed", "-i.bac",
                "s/GCC_TREAT_WARNINGS_AS_ERRORS = YES;/GCC_TREAT_WARNINGS_AS_ERRORS=NO;/",
                "Source/bmalloc/Configurations/Base.xcconfig"
            ])
            Run([
                "sed", "-i.bac",
                "s/GCC_TREAT_WARNINGS_AS_ERRORS = YES;/GCC_TREAT_WARNINGS_AS_ERRORS=NO;/",
                "Source/WTF/Configurations/Base.xcconfig"
            ])
            Run([
                "sed", "-i.bac",
                "s/std::numeric_limits<unsigned char>::max()/255/",
                "Source/bmalloc/bmalloc/SmallLine.h"
            ])
            #Run(["sed","-i.bac","s/std::numeric_limits<unsigned char>::max()/255/","Source/bmalloc/bmalloc/SmallRun.h"])

            # Hack 2: This check fails currently. Disable checking to still have a build.
            os.remove("Tools/Scripts/check-for-weak-vtables-and-externals")
Ejemplo n.º 13
0
def test_deploy_to_outdir(loo, test_temp_dir):
    # fake arguments

    sys.argv = [
        'vindaloo', '--noninteractive', 'deploy-dir',
        '--apply-output-dir={}'.format(test_temp_dir), 'dev', 'cluster1'
    ]

    loo.cmd.return_value.stdout = b'{}'

    with chdir('tests/test_roots/configmaps'):
        loo.main()

    # check arguments docker and kubectl was called with
    assert len(loo.cmd.call_args_list) == 1
    config_map_create_cmd = loo.cmd.call_args_list[0][0][0]

    assert config_map_create_cmd[:4] == [
        'kubectl',
        'create',
        'configmap',
        'test-config-map',
    ]

    assert os.path.isfile(
        os.path.join(test_temp_dir, "test-config-map_configmap.yaml"))
Ejemplo n.º 14
0
def test_build_latest(loo):
    sys.argv = ['vindaloo', '--noninteractive', 'build', '--latest', 'dev', 'test/foo']

    rev_parse_mock = mock.Mock()
    rev_parse_mock.stdout = b'd6ee34ae'
    build_mock = mock.Mock()
    build_mock.returncode = 0
    loo.cmd.side_effect = [rev_parse_mock, build_mock]

    with chdir('tests/test_roots/simple'):
        loo.main()

    # check the parameters docker was called with
    rev_parse_cmd = loo.cmd.call_args_list[0][0][0]
    build_cmd = loo.cmd.call_args_list[1][0][0]

    assert rev_parse_cmd == [
        'git',
        'rev-parse',
        '--short=8',
        'HEAD'
    ]
    assert build_cmd == [
        'docker',
        'build',
        '-t', 'foo-registry.com/test/foo:d6ee34ae-dev',
        '--no-cache',
        '-t', 'foo-registry.com/test/foo:latest',
        '-f', 'Dockerfile',
        '.'
    ]

    # check generated Dockerfile
    with open('tests/test_roots/simple/Dockerfile', 'r') as fp:
        assert fp.read() == """FROM debian
 def identify(self):
     with chdir(self.path()):
         output = Run(['git', 'log', '-1'])
         m = re.match("commit ([0-9a-z]+)\s*", output)
         if m == None:
             raise Exception('unknown output from git: ' + output)
         return m.group(1)
Ejemplo n.º 16
0
def build_valgrind():
  if os.path.isfile(os.path.join(INSTALL_DIR, 'bin', 'valgrind')):
    return
  with utils.chdir(os.path.join(TEMP_DIR, VALGRIND)):
    subprocess.check_call(['./configure', '--prefix=%s' % INSTALL_DIR])
    subprocess.check_call(['make'])
    subprocess.check_call(['make', 'install'])
Ejemplo n.º 17
0
def test_deploy_to_outdir(loo, test_temp_dir):
    # fake arguments

    sys.argv = [
        'vindaloo', '--noninteractive', 'deploy-dir',
        '--apply-output-dir={}'.format(test_temp_dir), 'dev', 'cluster1'
    ]

    loo.cmd.return_value.stdout = b'{}'

    with chdir('tests/test_roots/configmap'):
        loo.main()

    assert os.path.isfile(
        os.path.join(test_temp_dir, "test-config-map_configmap.json"))
    with open(os.path.join(test_temp_dir, "test-config-map_configmap.json"),
              'r') as file:
        configmap = json.loads(file.read())
        assert configmap['metadata']['name'] == 'test-config-map'
        assert configmap['data']['file_config_key'] == (
            'some_config_value=123\n'
            'another_config=one,two,three\n'
            'template_config=This value depends on the selected environment.\n'
        )
        assert base64.decodebytes(
            configmap['binaryData']
            ['simple_binary_key'].encode()) == b'\x76\x69\x6b\x79'
        with open(
                'tests/test_roots/configmap/k8s/templates/binary_config.conf',
                'br') as binary_file:
            base64_content = configmap['binaryData']['binary_file_config_key']
            assert base64.decodebytes(
                base64_content.encode()) == binary_file.read()
    def make(self):
        if self.config == "android":
            target_cpu = "arm"
        elif self.config == "32bit":
            target_cpu = "x86"
        elif self.config == "64bit":
            target_cpu = "x64"
        else:
            raise Exception("Unknown config in V8Builder.make!")

        objdir = os.path.realpath(self.objdir())
        if not os.path.isdir(objdir):
            out_dir = os.path.join(self.folder, 'v8', 'out')
            if not os.path.isdir(out_dir):
                os.mkdir(out_dir)
            os.mkdir(objdir)

        with utils.chdir(os.path.join(self.folder, 'v8')):
            config = [
                'is_debug = false', 'target_cpu = "{}"'.format(target_cpu)
            ]

            if self.config == "arm":
                config += [
                    'symbol_level = 1', 'v8_android_log_stdout = true',
                    'target_os = "android"'
                ]

            args = 'gn gen ' + objdir + ' --args=\'' + " ".join(config) + '\''
            Run(args, self.env.get(), shell=True)

            Run(["ninja", "-C", objdir, "d8"], self.env.get())
Ejemplo n.º 19
0
def build_valgrind():
  if os.path.isfile(os.path.join(INSTALL_DIR, 'bin', 'valgrind')):
    return
  with utils.chdir(os.path.join(TEMP_DIR, VALGRIND)):
    subprocess.check_call(['./configure', '--prefix=%s' % INSTALL_DIR])
    subprocess.check_call(['make'])
    subprocess.check_call(['make', 'install'])
Ejemplo n.º 20
0
def test_deploy_configmap(loo):
    # fake arguments
    sys.argv = ['vindaloo', '--noninteractive', 'deploy', 'dev', 'cluster1']

    loo.cmd.return_value.stdout = b'{}'

    with chdir('tests/test_roots/configmaps'):
        loo.main()

    # check arguments docker and kubectl was called with
    assert len(loo.cmd.call_args_list) == 4
    auth_cmd = loo.cmd.call_args_list[0][0][0]
    use_context_cmd = loo.cmd.call_args_list[1][0][0]
    config_map_create_cmd = loo.cmd.call_args_list[2][0][0]
    apply_cmd = loo.cmd.call_args_list[3][0][0][0:3]

    assert auth_cmd == ['kubectl', 'auth', 'can-i', 'get', 'deployment']
    assert use_context_cmd == [
        'kubectl',
        'config',
        'use-context',
        'foo-dev:cluster1',
    ]
    assert apply_cmd == [
        'kubectl',
        'apply',
        '-f',
    ]

    assert config_map_create_cmd[:4] == [
        'kubectl',
        'create',
        'configmap',
        'test-config-map',
    ]
Ejemplo n.º 21
0
    def make(self):
        if self.config == "android":
            target_cpu = "arm"
        elif self.config == "32bit":
            target_cpu = "x86"
        elif self.config == "64bit":
            target_cpu = "x64"
        else:
            raise Exception("Unknown config in V8Builder.make!")

        objdir = os.path.realpath(self.objdir())
        if not os.path.isdir(objdir):
            out_dir = os.path.join(self.folder, 'v8', 'out')
            if not os.path.isdir(out_dir):
                os.mkdir(out_dir)
            os.mkdir(objdir)

        with utils.chdir(os.path.join(self.folder, 'v8')):
            config = [
                'is_debug = false',
                'target_cpu = "{}"'.format(target_cpu)
            ]

            if self.config == "arm":
                config += [
                    'symbol_level = 1',
                    'v8_android_log_stdout = true',
                    'target_os = "android"'
                ]

            args = 'gn gen ' + objdir + ' --args=\'' + " ".join(config) + '\''
            Run(args, self.env.get(), shell=True)

            Run(["ninja", "-C", objdir, "d8"], self.env.get())
Ejemplo n.º 22
0
def test_push_all(loo):
    # fake arguments
    sys.argv = ['vindaloo', '--noninteractive', 'push', 'dev']

    rev_parse_mock = mock.Mock()
    rev_parse_mock.stdout = b'd6ee34ae'

    images_mock = mock.Mock()
    images_mock.stdout.decode.return_value.split.return_value = [
        'foo-registry.com/test/foo:d6ee34ae-dev',
        'foo-registry.com/test/bar:2.0.0',
    ]

    push_mock = mock.Mock()
    push_mock.returncode = 0

    loo.cmd.side_effect = [rev_parse_mock, images_mock, push_mock, push_mock]

    with chdir('tests/test_roots/simple'):
        loo.main()

    # check the arguments docker was called with
    assert len(loo.cmd.call_args_list) == 4
    push_cmd = loo.cmd.call_args_list[2][0][0]
    push2_cmd = loo.cmd.call_args_list[3][0][0]
    assert push_cmd == [
        'docker',
        'push',
        'foo-registry.com/test/foo:d6ee34ae-dev',
    ]
    assert push2_cmd == [
        'docker',
        'push',
        'foo-registry.com/test/bar:2.0.0',
    ]
Ejemplo n.º 23
0
def gen_toolchain(chrome_path, msvs_version, isolate_file):
  """Update the VS toolchain, isolate it, and return the isolated hash."""
  with utils.chdir(chrome_path):
    subprocess.check_call([utils.GCLIENT, 'sync'])
    depot_tools = subprocess.check_output([
        'python', os.path.join('build', 'find_depot_tools.py')]).rstrip()
    with utils.git_branch():
      vs_toolchain_py = os.path.join('build', 'vs_toolchain.py')
      env = os.environ.copy()
      env['GYP_MSVS_VERSION'] = msvs_version
      subprocess.check_call(['python', vs_toolchain_py, 'update'], env=env)
      output = subprocess.check_output(['python', vs_toolchain_py,
                                        'get_toolchain_dir'], env=env).rstrip()
      src_dir = get_toolchain_dir(output)
      # Mock out absolute paths in win_toolchain.json.
      win_toolchain_utils.abstract(os.path.join('build', 'win_toolchain.json'),
                                   os.path.dirname(depot_tools))

    # Isolate the toolchain. Assumes we're running on Windows, since the above
    # would fail otherwise.
    isolate_file_dirname = os.path.dirname(isolate_file)
    toolchain_relpath = os.path.relpath(src_dir, isolate_file_dirname)
    chrome_relpath = os.path.relpath(os.getcwd(), isolate_file_dirname)
    depot_tools_relpath = os.path.relpath(depot_tools, isolate_file_dirname)
    isolate = os.path.join(
        os.curdir, 'tools', 'luci-go', 'win64', 'isolate.exe')
    isolate_cmd = [isolate, 'archive', '--quiet',
        '--isolate-server', 'https://isolateserver.appspot.com',
        '-i', isolate_file,
        '-s', 'win_toolchain_%s.isolated' % msvs_version,
        '--extra-variable', 'WIN_TOOLCHAIN_DIR=%s' % toolchain_relpath,
        '--extra-variable', 'DEPOT_TOOLS_DIR=%s' % depot_tools_relpath,
        '--extra-variable', 'CHROME_DIR=%s' % chrome_relpath]
    isolate_out = subprocess.check_output(isolate_cmd).rstrip()
    return shlex.split(isolate_out)[0]
Ejemplo n.º 24
0
def test_push_not_built_image(loo):
    sys.argv = ['vindaloo', '--noninteractive', 'push', 'dev', 'test/foo']

    rev_parse_mock = mock.Mock()
    rev_parse_mock.stdout = b'd6ee34ae'

    images_mock = mock.Mock()
    images_mock.stdout.decode.return_value.split.return_value = [
        'foo-registry.com/test/foo:0.0.9',  # je ubildena jina verze
        'foo-registry.com/test/bar:2.0.0',
    ]

    loo.cmd.side_effect = [rev_parse_mock, images_mock]

    with chdir('tests/test_roots/simple'):
        loo.main()

    # check the arguments docker was called with
    assert len(loo.cmd.call_args_list) == 2
    assert loo.cmd.call_args_list[0][0][0] == [
        'git', 'rev-parse', '--short=8', 'HEAD'
    ]
    assert loo.cmd.call_args_list[1][0][0] == [
        'docker', 'images', '--format', '{{.Repository}}:{{.Tag}}'
    ]
Ejemplo n.º 25
0
def create_asset(target_dir):
  """Create the asset."""
  with utils.chdir(target_dir):
    tarball = 'clang.tgz'
    subprocess.check_call(['wget', '-O', tarball, GS_URL])
    subprocess.check_call(['tar', 'zxvf', tarball])
    os.remove(tarball)
Ejemplo n.º 26
0
def create_asset(target_dir):
  """Create the asset."""
  with utils.chdir(target_dir):
    tarball = 'clang.tgz'
    subprocess.check_call(['wget', '-O', tarball, GS_URL])
    subprocess.check_call(['tar', 'zxvf', tarball])
    os.remove(tarball)
Ejemplo n.º 27
0
def test_versions_not_match(capsys):
    # fake arguments
    sys.argv = ['vindaloo', 'versions']

    def x(*args, **kwargs):
        z = mock.Mock()
        z.returncode = 0
        return z

    calls = [mock.Mock() for _ in range(6)]
    for call in calls:
        call.returncode = 0
    calls[0].stdout = b'd6ee34ae'
    calls[
        3].stdout = b'foo-registry.com/test/foo:d6ee34ae-dev foo-registry.com/test/bar:2.0.0'  # cluster1
    calls[
        5].stdout = b'foo-registry.com/test/foo:0.0.9 foo-registry.com/test/bar:2.0.0'  # cluster2 DIFFERS

    loo = Vindaloo()
    loo.cmd = mock.Mock()
    loo.cmd.side_effect = calls

    with chdir('tests/test_roots/simple'):
        loo.main()

    # check the arguments kubectl was called with
    assert len(loo.cmd.call_args_list) == 6

    assert loo.cmd.call_args_list[0][0][0] == [
        'git', 'rev-parse', '--short=8', 'HEAD'
    ]
    assert loo.cmd.call_args_list[1][0][0] == [
        'kubectl', 'auth', 'can-i', 'get', 'deployment'
    ]
    assert loo.cmd.call_args_list[2][0][0][:3] == [
        'kubectl',
        'config',
        'use-context',
    ]
    assert loo.cmd.call_args_list[2][0][0][3] == 'foo-dev:cluster1'
    assert loo.cmd.call_args_list[3][0][0] == [
        'kubectl', 'get', 'deployment', 'foobar',
        '-o=jsonpath=\'{$.spec.template.spec.containers[*].image}\''
    ]
    assert loo.cmd.call_args_list[4][0][0][:3] == [
        'kubectl',
        'config',
        'use-context',
    ]
    assert loo.cmd.call_args_list[4][0][0][3] == 'foo-dev:cluster2'
    assert loo.cmd.call_args_list[5][0][0] == [
        'kubectl', 'get', 'deployment', 'foobar',
        '-o=jsonpath=\'{$.spec.template.spec.containers[*].image}\''
    ]

    output = capsys.readouterr().out.strip()

    assert '[DIFFERS]' in output
    assert 'test/foo' in output
    assert 'test/bar' in output
Ejemplo n.º 28
0
def test_convert_deployment():
    with chdir(f'tests/test_roots/convert'):
        with open('deployment.yaml', 'r') as fp:
            manifest_data = yaml.load(fp, Loader=yaml.Loader)
        res = get_obj_repr_from_dict(manifest_data)

    assert res == EXPECTED
Ejemplo n.º 29
0
    def upload_new_version(self, target_dir, commit=False, extra_tags=None):
        """Upload a new version and update the version file for the asset."""
        version = self.get_next_version()
        self._store.upload(self._name,
                           version,
                           target_dir,
                           extra_tags=extra_tags)

        def _write_version():
            with open(self.version_file, 'w') as f:
                f.write(str(version))
            subprocess.check_call([utils.GIT, 'add', self.version_file])

        with utils.chdir(SKIA_DIR):
            if commit:
                with utils.git_branch():
                    _write_version()
                    subprocess.check_call([
                        utils.GIT, 'commit', '-m',
                        'Update %s version' % self._name
                    ])
                    subprocess.check_call(
                        [utils.GIT, 'cl', 'upload', '--bypass-hooks'])
            else:
                _write_version()
Ejemplo n.º 30
0
    def benchmark(self, shell, env, args):
        with utils.chdir("/tmp/"):
            full_args = [shell]
            if args:
                full_args.extend(args)
            full_args.append('build/ts/shell.js')
            if "WebKit" in shell:
                full_args.append('--')
            if "v8" in shell:
                full_args.append('--')
            full_args.append('-x')

            tests = []
            totalscore = 0
            bench_path = os.path.join(utils.config.BenchmarkPath, self.folder)
            for name in ["crypto", "deltablue", "raytrace", "richards", "splay"]:
                output = utils.RunTimedCheckOutput(full_args +
                             [os.path.join(bench_path, name+".swf")], env=env)

                lines = output.splitlines()

                for x in lines:
                    m = re.search("NotifyScore (\d+)", x)
                    if not m:
                        continue
                    score = m.group(1)
                    totalscore += int(score)
                    tests.append({ 'name': name, 'time': score})
                    print(score + '    - ' + name)

            if len(tests) > 0:
                tests.append({ 'name': '__total__', 'time': totalscore / len(tests)})
            return tests
 def identify(self):
     with chdir(self.path()):
         output = Run(['svn', 'info'])
         m = re.search("Revision: ([0-9]+)", output)
         if m == None:
             raise Exception('unknown output from svn: ' + output)
         return m.group(1)
Ejemplo n.º 32
0
def test_deploy(loo):
    # fake arguments
    sys.argv = ['vindaloo', '--noninteractive', 'deploy', 'dev', 'cluster1']

    loo.cmd.return_value.stdout.decode.return_value.split.return_value = [
        'foo-registry.com/test/foo:1.0.0',
        'foo-registry.com/test/bar:2.0.0',
    ]

    with chdir('tests/test_roots/simple'):
        loo.main()

    assert vindaloo.app.args.cluster == 'cluster1'

    # check arguments docker and kubectl was called with
    assert len(loo.cmd.call_args_list) == 3
    auth_cmd = loo.cmd.call_args_list[0][0][0]
    use_context_cmd = loo.cmd.call_args_list[1][0][0]
    apply_cmd = loo.cmd.call_args_list[2][0][0][0:3]

    assert auth_cmd == ['kubectl', 'auth', 'can-i', 'get', 'deployment']
    assert use_context_cmd == [
        'kubectl',
        'config',
        'use-context',
        'foo-dev:cluster1',
    ]
    assert apply_cmd == [
        'kubectl',
        'apply',
        '-f',
    ]
Ejemplo n.º 33
0
def test_versions_match(capsys):
    # fake arguments
    sys.argv = ['vindaloo', 'versions']

    calls = [mock.Mock() for _ in range(5)]
    for call in calls:
        call.returncode = 0
    calls[2].stdout = b'foo-registry.com/test/foo:1.0.0 foo-registry.com/test/bar:2.0.0'  # cluster1
    calls[4].stdout = b'foo-registry.com/test/foo:1.0.0 foo-registry.com/test/bar:2.0.0'  # cluster2

    loo = Vindaloo()
    loo.cmd = mock.Mock()
    loo.cmd.side_effect = calls

    with chdir('tests/test_roots/simple'):
        loo.main()

    # check the arguments kubectl was called with
    assert len(loo.cmd.call_args_list) == 5

    assert loo.cmd.call_args_list[0][0][0] == [
        'kubectl',
        'auth',
        'can-i',
        'get',
        'deployment'
    ]
    assert loo.cmd.call_args_list[1][0][0][:3] == [
        'kubectl',
        'config',
        'use-context',
    ]
    assert loo.cmd.call_args_list[1][0][0][3] in ('foo-dev:cluster1', 'foo-dev:cluster2')  # nezname poradi
    assert loo.cmd.call_args_list[2][0][0] == [
        'kubectl',
        'get',
        'deployment',
        'foobar',
        '-o=jsonpath=\'{$.spec.template.spec.containers[*].image}\''
    ]
    assert loo.cmd.call_args_list[3][0][0][:3] == [
        'kubectl',
        'config',
        'use-context',
    ]
    assert loo.cmd.call_args_list[3][0][0][3] in ('foo-dev:cluster1', 'foo-dev:cluster2')  # nezname poradi
    assert loo.cmd.call_args_list[4][0][0] == [
        'kubectl',
        'get',
        'deployment',
        'foobar',
        '-o=jsonpath=\'{$.spec.template.spec.containers[*].image}\''
    ]

    output = capsys.readouterr().out.strip()

    assert '[DIFFERS]' not in output
    assert 'test/foo' in output
    assert 'test/bar' in output
Ejemplo n.º 34
0
    def run(self, benchmark, config):
        env = os.environ.copy()
        env.update(config.env())
        env.update(self.engineInfo["env"])
        args = config.args() + self.engineInfo["args"]

        with utils.chdir(os.path.join(utils.config.BenchmarkPath, benchmark.folder)):
            return benchmark.benchmark(self.engineInfo["binary"], env, args)
Ejemplo n.º 35
0
    def clean(self):
        with utils.chdir(self.folder):
            Run(["svn","revert","Tools/Scripts/check-for-weak-vtables-and-externals"])

            Run(["svn","revert","Source/JavaScriptCore/Configurations/Base.xcconfig"])
            Run(["svn","revert","Source/bmalloc/Configurations/Base.xcconfig"])
            Run(["svn","revert","Source/WTF/Configurations/Base.xcconfig"])
            Run(["svn","revert","Source/bmalloc/bmalloc/SmallLine.h"])
Ejemplo n.º 36
0
def test_select_dev_cluster1(loo):
    # fake arguments
    sys.argv = ['vindaloo', 'kubeenv', 'dev', 'c1']

    with chdir('tests/test_roots/simple'):
        loo.main()

    assert loo.cmd.call_args[0][0] == ['kubectl', 'config', 'use-context', 'foo-dev:cluster1']
Ejemplo n.º 37
0
    def retrieveInfo(self):
        with utils.chdir(os.path.join(self.folder)):
            objdir = os.path.abspath(os.path.join('WebKitBuild', 'Release'))

        info = {}
        info["engine_type"] = "webkit"
        info["env"] = {'DYLD_FRAMEWORK_PATH': objdir}
        return info
Ejemplo n.º 38
0
 def extract(self, filename):
     if "tar.bz2" in filename:
         tar = tarfile.open(self.folder + filename)
         tar.extractall(self.folder)
         tar.close()
     elif "zip" in filename:
         with utils.chdir(self.folder):
             utils.Run(["unzip", filename], silent=True)
Ejemplo n.º 39
0
    def retrieveInfo(self):
        with utils.chdir(os.path.join(self.folder)):
            objdir = os.path.abspath(os.path.join('WebKitBuild', 'Release'))

        info = {}
        info["engine_type"] = "webkit"
        info["env"] = {'DYLD_FRAMEWORK_PATH': objdir}
        return info
    def clone(self):
        os.mkdir(self.folder)
        with chdir(self.folder):
            # get depot_tools
            Run(['git', 'clone', 'https://chromium.googlesource.com/chromium/tools/depot_tools.git'])

            # get actual v8 source
            Run(['fetch', 'v8'], env=self.make_env())
Ejemplo n.º 41
0
def install(should_identify=True):
    if should_identify:
        system_identify()

    # Create a place to store downloads.
    if not isdir(DOWNLOADS):
        mkdir(DOWNLOADS)

    # Download and install PyQt5. Only download if we don't have a cached copy
    # available.
    install_PyQt5 = os.path.join(DOWNLOADS, 'install-PyQt5.exe')
    if not isfile(install_PyQt5):
        wget(
            'http://downloads.sourceforge.net/project/pyqt/PyQt5/PyQt-5.5.1/'
            'PyQt5-5.5.1-gpl-Py3.4-Qt5.5.1-x32.exe', install_PyQt5)
    # See https://github.com/appveyor/ci/issues/363#issuecomment-148915001.
    xqt(
        'REG ADD HKCU\\Software\\Python\\PythonCore\\3.4\\InstallPath /f /ve '
        '/t REG_SZ /d C:\\Python34', install_PyQt5 + ' /S')

    # Download and compile PCRE.
    pcre_ver = 'pcre-8.38'
    pcre_zip = pcre_ver + '.zip'
    pcre_zip_path = os.path.join(DOWNLOADS, pcre_zip)
    if not isfile(pcre_zip_path):
        # Note: Don't use ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/,
        # because this sometimes hangs during download, causing the build to
        # fail. Instead, use the more reliable SourceForge mirror.
        wget(
            'http://downloads.sourceforge.net/project/pcre/pcre/8.38/' +
            pcre_zip, pcre_zip_path)
    # See https://sevenzip.osdn.jp/chm/cmdline/commands/extract_full.htm.
    xqt('7z x {} > nul'.format(pcre_zip_path))
    with pushd(pcre_ver):
        mkdir('build')
        chdir('build')
        xqt(
            'cmake .. -DBUILD_SHARED_LIBS:BOOL=OFF -DPCRE_SUPPORT_UTF:BOOL=ON '
            '-DPCRE_SUPPORT_JIT:BOOL=ON -G "Visual Studio 10 2010"',
            'cmake --build . --config Release')

    # Install, which also builds Python C extensions. Use this instead of
    # ``build_ext`` so that Enki will have an already-installed qutepart,
    # rather than needing to regenrate the command below.
    xqt('python setup.py install --include-dir={}/build '
        '--lib-dir={}/build/Release --force'.format(pcre_ver, pcre_ver))
Ejemplo n.º 42
0
def install(should_identify=True):
    if should_identify:
        system_identify()

    # Create a place to store downloads.
    if not isdir(DOWNLOADS):
        mkdir(DOWNLOADS)

    # Download and install PyQt5. Only download if we don't have a cached copy
    # available.
    install_PyQt5 = os.path.join(DOWNLOADS, 'install-PyQt5.exe')
    if not isfile(install_PyQt5):
        wget('http://downloads.sourceforge.net/project/pyqt/PyQt5/PyQt-5.5.1/'
             'PyQt5-5.5.1-gpl-Py3.4-Qt5.5.1-x32.exe',
              install_PyQt5)
    # See https://github.com/appveyor/ci/issues/363#issuecomment-148915001.
    xqt('REG ADD HKCU\\Software\\Python\\PythonCore\\3.4\\InstallPath /f /ve '
        '/t REG_SZ /d C:\\Python34',
      install_PyQt5 + ' /S')

    # Download and compile PCRE.
    pcre_ver = 'pcre-8.38'
    pcre_zip = pcre_ver + '.zip'
    pcre_zip_path = os.path.join(DOWNLOADS, pcre_zip)
    if not isfile(pcre_zip_path):
        # Note: Don't use ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/,
        # because this sometimes hangs during download, causing the build to
        # fail. Instead, use the more reliable SourceForge mirror.
        wget('http://downloads.sourceforge.net/project/pcre/pcre/8.38/' +
             pcre_zip, pcre_zip_path)
    # See https://sevenzip.osdn.jp/chm/cmdline/commands/extract_full.htm.
    xqt('7z x {} > nul'.format(pcre_zip_path))
    with pushd(pcre_ver):
        mkdir('build')
        chdir('build')
        xqt('cmake .. -DBUILD_SHARED_LIBS:BOOL=OFF -DPCRE_SUPPORT_UTF:BOOL=ON '
            '-DPCRE_SUPPORT_JIT:BOOL=ON -G "Visual Studio 10 2010"',
          'cmake --build . --config Release')

    # Install, which also builds Python C extensions. Use this instead of
    # ``build_ext`` so that Enki will have an already-installed qutepart,
    # rather than needing to regenrate the command below.
    xqt('python setup.py install --include-dir={}/build '
        '--lib-dir={}/build/Release --force'.format(pcre_ver, pcre_ver))
Ejemplo n.º 43
0
    def reconf(self):
        # Step 0. install ndk if needed.
        if self.config.startswith("android"):
            self.env.remove("CC")
            self.env.remove("CXX")
            self.env.remove("LINK")
            self.installNdk()

        # Step 1. autoconf.
        with utils.chdir(os.path.join(self.folder, 'js', 'src')):
            if platform.system() == "Darwin":
                utils.run_realtime("autoconf213", shell=True)
            elif platform.system() == "Linux":
                utils.run_realtime("autoconf2.13", shell=True)
            elif platform.system() == "Windows":
                utils.run_realtime("autoconf-2.13", shell=True)

        # Step 2. configure
        if os.path.exists(os.path.join(self.folder, 'js', 'src', 'Opt')):
            shutil.rmtree(os.path.join(self.folder, 'js', 'src', 'Opt'))
        os.mkdir(os.path.join(self.folder, 'js', 'src', 'Opt'))
        args = ['--enable-optimize', '--disable-debug']
        if self.config == "android":
            args.append("--target=arm-linux-androideabi")
            args.append("--with-android-ndk="+os.path.abspath(self.folder)+"/android-ndk-r12/")
            args.append("--with-android-version=24")
            args.append("--enable-pie")
        if self.config == "android64":
            args.append("--target=aarch64-linux-androideabi")
            args.append("--with-android-ndk="+os.path.abspath(self.folder)+"/android-ndk-r12/")
            args.append("--with-android-version=24")
            args.append("--enable-pie")
        if platform.architecture()[0] == "64bit" and self.config == "32bit":
            if platform.system() == "Darwin":
                args.append("--target=i686-apple-darwin10.0.0")
            elif platform.system() == "Linux":
                args.append("--target=i686-pc-linux-gnu")
            else:
                assert False

        with utils.chdir(os.path.join(self.folder, 'js', 'src', 'Opt')):
            Run(['../configure'] + args, self.env.get())
        return True
Ejemplo n.º 44
0
    def update(self, rev = None):
        with chdir(self.path()):
            if not rev:
                output = Run(['svn', 'update'])
                return

            output = Run(['svn', 'update', '-r', rev])
            if re.search("No such revision", output) != None:
                raise Exception('unknown revision: ' + output)
            return
Ejemplo n.º 45
0
def update_sdk_file(skia_path, isolated_hash):
  """Edit the android_sdk_hash file, upload a CL."""
  with utils.chdir(skia_path):
    with utils.git_branch():
      hash_file = os.path.join('infra', 'bots', 'android_sdk_hash')
      with open(hash_file, 'w') as f:
        f.write(isolated_hash)
      subprocess.check_call([utils.GIT, 'add', hash_file])
      subprocess.check_call([utils.GIT, 'commit', '-m', 'Update Android SDK'])
      subprocess.check_call([utils.GIT, 'cl', 'upload', '--bypass-hooks'])
Ejemplo n.º 46
0
    def __init__(self, folder, suite=None):
        if folder.endswith("/"):
            folder = folder[:-1]

        self.suite = suite if suite is not None else self.name()
        self.folder_ = folder

        with utils.chdir(os.path.join(utils.config.BenchmarkPath, self.folder_)):
            fp = open("VERSION", 'r')
            self.version = self.suite + " " + fp.read().strip("\r\n\r\n \t")
            fp.close()
Ejemplo n.º 47
0
def install (package_name):
    # Load the module
    exec ("import %s"%(package_name))
    pkg = sys.modules[package_name]

    # Check package preconditions
    pkg.check_preconditions()

    # Change the current directory
    prev_dir = chdir (config.COMPILATION_BASE_DIR)

    # Perform
    try:
        pkg.perform()
    except:
        chdir (prev_dir)
        raise

    # Done and dusted
    chdir (prev_dir)
Ejemplo n.º 48
0
    def patch(self):
        with utils.chdir(self.folder):
            # Hack 1: Remove reporting errors for warnings that currently are present.
            Run(["sed","-i.bac","s/GCC_TREAT_WARNINGS_AS_ERRORS = YES;/GCC_TREAT_WARNINGS_AS_ERRORS=NO;/","Source/JavaScriptCore/Configurations/Base.xcconfig"])
            Run(["sed","-i.bac","s/GCC_TREAT_WARNINGS_AS_ERRORS = YES;/GCC_TREAT_WARNINGS_AS_ERRORS=NO;/","Source/bmalloc/Configurations/Base.xcconfig"])
            Run(["sed","-i.bac","s/GCC_TREAT_WARNINGS_AS_ERRORS = YES;/GCC_TREAT_WARNINGS_AS_ERRORS=NO;/","Source/WTF/Configurations/Base.xcconfig"])
            Run(["sed","-i.bac","s/std::numeric_limits<unsigned char>::max()/255/","Source/bmalloc/bmalloc/SmallLine.h"])
            #Run(["sed","-i.bac","s/std::numeric_limits<unsigned char>::max()/255/","Source/bmalloc/bmalloc/SmallRun.h"])

            # Hack 2: This check fails currently. Disable checking to still have a build.
            os.remove("Tools/Scripts/check-for-weak-vtables-and-externals")
Ejemplo n.º 49
0
 def make(self):
     try:
         self.patch()
         with utils.chdir(os.path.join(self.folder, 'Tools', 'Scripts')):
             args = ['/usr/bin/perl', 'build-jsc']
             if self.config == '32bit':
                 args += ['--32-bit']
             Run(args, self.env.get())
     finally:
         self.clean()
     Run(["install_name_tool", "-change", "/System/Library/Frameworks/JavaScriptCore.framework/Versions/A/JavaScriptCore", self.objdir()+"/JavaScriptCore.framework/JavaScriptCore", self.objdir() + "/jsc"])
Ejemplo n.º 50
0
 def createApp(self):
     appdef = self.createAppDefinition(False)
     appName = appdef["Settings"]["Title"].replace(" ", "")
     appdefFile = utils.tempFilenameInTempFolder(appName + ".json")
     folder = os.path.dirname(appdefFile)
     print folder
     exportLayers(appdef["Layers"], folder, self.progress, 3)
     saveAppdef(appdef, appdefFile)
     from ftplib import FTP
     url = self.g3mUrlBox.text()
     ftp = FTP(url, 'qgis', 'qgis')
     utils.chdir("ftp-box/g3m-qgis/%s" % appName, ftp)
     #ftp.cwd("ftp-box/g3m-qgis/%s" % appName)
     with open(appdefFile, 'rb') as f:
         ftp.storbinary('STOR ' + appName + ".json", f)
     utils.chdir("layers", ftp)
     for layer in appdef["Layers"]:
         layerName = utils.safeName(layer.layer)
         with open(folder + "/layers/lyr_%s.js" % layerName, 'rb') as f:
             ftp.storbinary('STOR ' + layerName + ".json", f)
Ejemplo n.º 51
0
def main():
    """
    The command entry point.
    """

    _dir = os.getcwd()
    options, config_options = get_options()
    utils.chdir(config_options['working_dir'])

    # Log in
    spacewalk = xmlrpclib.Server("https://%s/rpc/api" % config_options['server'], verbose=0)
    spacekey = spacewalk.auth.login(config_options['user'], config_options['password'])

    # Check if channel exists
    try:
        channel_details=spacewalk.configchannel.getDetails(spacekey, options.channel)
    except xmlrpclib.Fault, err:
        print "Error getting channel details (Code %s, %s)" % (err.faultCode, err.faultString)
        spacewalk.auth.logout(spacekey)
        sys.exit(1)
Ejemplo n.º 52
0
def test_defaults():
    assert config.defaults().destination == Path.home()
    assert config.defaults().source == Path.cwd()

    with directory_tree({}) as tmpdir:
        with chdir(tmpdir):
            assert config.defaults().source.samefile(tmpdir)
            assert Path.cwd().samefile(tmpdir)

        with home(tmpdir):
            assert config.defaults().destination.samefile(tmpdir)
            assert Path.home().samefile(tmpdir)
Ejemplo n.º 53
0
def install(should_identify=True):
    if should_identify:
        system_identify()

    # Create a place to store downloads.
    if not isdir(DOWNLOADS):
        mkdir(DOWNLOADS)

    # Download and compile PCRE.
    pcre_raw_ver = '8.39'
    pcre_ver = 'pcre-' + pcre_raw_ver
    pcre_zip = pcre_ver + '.zip'
    pcre_zip_path = os.path.join(DOWNLOADS, pcre_zip)
    if not isfile(pcre_zip_path):
        # Note: Don't use ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/,
        # because this sometimes hangs during download, causing the build to
        # fail. Instead, use the more reliable SourceForge mirror.
        wget('http://downloads.sourceforge.net/project/pcre/pcre/{}/{}'.
            format(pcre_raw_ver, pcre_zip), pcre_zip_path)
    # See https://sevenzip.osdn.jp/chm/cmdline/commands/extract_full.htm.
    xqt('7z x {} > nul'.format(pcre_zip_path))
    with pushd(pcre_ver):
        mkdir('build')
        chdir('build')
        # Per https://cmake.org/cmake/help/latest/generator/Visual%20Studio%2014%202015.html,
        # add the Win64 string for 64-bit Python.
        use_Win64 = ' Win64' if is_64bits else ''
        xqt('cmake .. -DBUILD_SHARED_LIBS:BOOL=OFF -DPCRE_SUPPORT_UTF:BOOL=ON '
            '-DPCRE_SUPPORT_JIT:BOOL=ON -G "Visual Studio 14 2015{}"'.
            format(use_Win64),
          'cmake --build . --config Release')

    # First, build Python C extensions. Use this instead of
    # ``build_ext`` so that Enki will have an already-installed qutepart,
    # rather than needing to regenrate the command below.
    xqt('python setup.py build_ext --include-dir={}/build '
        '--lib-dir={}/build/Release --force'.format(pcre_ver, pcre_ver))
    # Next, install it along with its dependencies. See comments at
    # ``install_requires`` on why this is necessary.
    xqt('python -m pip install -e .')
Ejemplo n.º 54
0
def update_toolchain_file(skia_path, msvs_version, isolated_hash):
  """Edit the win_toolchain_hash file, upload a CL."""
  with utils.chdir(skia_path):
    with utils.git_branch():
      hash_file = os.path.join('infra', 'bots', 'win_toolchain_hash.json')
      with open(hash_file) as f:
        hashes = json.load(f)
      hashes[msvs_version] = isolated_hash
      with open(hash_file, 'w') as f:
        json.dump(hashes, f, indent=4, sort_keys=True)
      subprocess.check_call([utils.GIT, 'add', hash_file])
      subprocess.check_call([utils.GIT, 'commit', '-m', 'Update Win toolchain'])
      subprocess.check_call([utils.GIT, 'cl', 'upload', '--bypass-hooks'])