Beispiel #1
0
def test_pex_builder():
  # test w/ and w/o zipfile dists
  with nested(temporary_dir(), make_bdist('p1', zipped=True)) as (td, p1):
    write_pex(td, exe_main, dists=[p1])

    success_txt = os.path.join(td, 'success.txt')
    PEX(td).run(args=[success_txt])
    assert os.path.exists(success_txt)
    with open(success_txt) as fp:
      assert fp.read() == 'success'

  # test w/ and w/o zipfile dists
  with nested(temporary_dir(), temporary_dir(), make_bdist('p1', zipped=True)) as (
      td1, td2, p1):
    target_egg_dir = os.path.join(td2, os.path.basename(p1.location))
    safe_mkdir(target_egg_dir)
    with closing(zipfile.ZipFile(p1.location, 'r')) as zf:
      zf.extractall(target_egg_dir)
    p1 = DistributionHelper.distribution_from_path(target_egg_dir)

    write_pex(td1, exe_main, dists=[p1])

    success_txt = os.path.join(td1, 'success.txt')
    PEX(td1).run(args=[success_txt])
    assert os.path.exists(success_txt)
    with open(success_txt) as fp:
      assert fp.read() == 'success'
Beispiel #2
0
def test_nested_requirements():
  with temporary_dir() as td1:
    with temporary_dir() as td2:
      with open(os.path.join(td1, 'requirements.txt'), 'w') as fp:
        fp.write(dedent('''
            requirement1
            requirement2
            -r %s
            -r %s
        ''' % (
            os.path.join(td2, 'requirements_nonrelative.txt'),
            os.path.join('relative', 'requirements_relative.txt'))
        ))

      with open(os.path.join(td2, 'requirements_nonrelative.txt'), 'w') as fp:
        fp.write(dedent('''
        requirement3
        requirement4
        '''))

      os.mkdir(os.path.join(td1, 'relative'))
      with open(os.path.join(td1, 'relative', 'requirements_relative.txt'), 'w') as fp:
        fp.write(dedent('''
        requirement5
        requirement6
        '''))

      def rr(req):
        return ResolvableRequirement.from_string(req, ResolverOptionsBuilder())

      reqs = requirements_from_file(os.path.join(td1, 'requirements.txt'))
      assert reqs == [rr('requirement%d' % k) for k in (1, 2, 3, 4, 5, 6)]
Beispiel #3
0
def test_resolve_prereleases_cached():
  stable_dep = make_sdist(name='dep', version='2.0.0')
  prerelease_dep = make_sdist(name='dep', version='3.0.0rc3')

  with temporary_dir() as td:
    for sdist in (stable_dep, prerelease_dep):
      safe_copy(sdist, os.path.join(td, os.path.basename(sdist)))
    fetchers = [Fetcher([td])]

    with temporary_dir() as cd:
      def assert_resolve(dep, expected_version, **resolve_kwargs):
        dists = list(
          resolve_multi([dep], cache=cd, cache_ttl=1000, **resolve_kwargs)
        )
        assert 1 == len(dists)
        dist = dists[0]
        assert expected_version == dist.version

      Crawler.reset_cache()

      # First do a run to load it into the cache.
      assert_resolve('dep>=1,<4', '3.0.0rc3', allow_prereleases=True, fetchers=fetchers)

      # This simulates running from another pex command. The Crawler cache actually caches an empty
      # cache so this fails in the same "process".
      Crawler.reset_cache()

      # Now assert that we can get it from the cache by removing the source.
      assert_resolve('dep>=1,<4', '3.0.0rc3', allow_prereleases=True, fetchers=[])

      # It should also be able to resolve without allow_prereleases, if explicitly requested.
      Crawler.reset_cache()
      assert_resolve('dep>=1.rc1,<4', '3.0.0rc3', fetchers=[])
Beispiel #4
0
def test_pex_builder_compilation():
  with nested(temporary_dir(), temporary_dir(), temporary_dir()) as (td1, td2, td3):
    src = os.path.join(td1, 'src.py')
    with open(src, 'w') as fp:
      fp.write(exe_main)

    exe = os.path.join(td1, 'exe.py')
    with open(exe, 'w') as fp:
      fp.write(exe_main)

    def build_and_check(path, precompile):
      pb = PEXBuilder(path)
      pb.add_source(src, 'lib/src.py')
      pb.set_executable(exe, 'exe.py')
      pb.freeze(bytecode_compile=precompile)
      for pyc_file in ('exe.pyc', 'lib/src.pyc', '__main__.pyc'):
        pyc_exists = os.path.exists(os.path.join(path, pyc_file))
        if precompile:
          assert pyc_exists
        else:
          assert not pyc_exists
      bootstrap_dir = os.path.join(path, PEXBuilder.BOOTSTRAP_DIR)
      bootstrap_pycs = []
      for _, _, files in os.walk(bootstrap_dir):
        bootstrap_pycs.extend(f for f in files if f.endswith('.pyc'))
      if precompile:
        assert len(bootstrap_pycs) > 0
      else:
        assert 0 == len(bootstrap_pycs)

    build_and_check(td2, False)
    build_and_check(td3, True)
Beispiel #5
0
def test_access_zipped_assets_integration():
  test_executable = dedent('''
      import os
      from _pex.util import DistributionHelper
      temp_dir = DistributionHelper.access_zipped_assets('my_package', 'submodule')
      with open(os.path.join(temp_dir, 'mod.py'), 'r') as fp:
        for line in fp:
          print(line)
  ''')
  with nested(temporary_dir(), temporary_dir()) as (td1, td2):
    pb = PEXBuilder(path=td1)
    with open(os.path.join(td1, 'exe.py'), 'w') as fp:
      fp.write(test_executable)
      pb.set_executable(fp.name)

    submodule = os.path.join(td1, 'my_package', 'submodule')
    safe_mkdir(submodule)
    mod_path = os.path.join(submodule, 'mod.py')
    with open(mod_path, 'w') as fp:
      fp.write('accessed')
      pb.add_source(fp.name, 'my_package/submodule/mod.py')

    pex = os.path.join(td2, 'app.pex')
    pb.build(pex)

    output, returncode = run_simple_pex(pex)
    try:
      output = output.decode('UTF-8')
    except ValueError:
      pass
    assert output == 'accessed\n'
    assert returncode == 0
Beispiel #6
0
def test_source_packages():
  for ext in ('.tar.gz', '.tar', '.tgz', '.zip', '.tar.bz2'):
    sl = SourcePackage('a_p_r-3.1.3' + ext)
    assert sl._name == 'a_p_r'
    assert sl.name == 'a-p-r'
    assert sl.raw_version == '3.1.3'
    assert sl.version == parse_version(sl.raw_version)
    for req in ('a_p_r', 'a_p_r>2', 'a_p_r>3', 'a_p_r>=3.1.3', 'a_p_r==3.1.3', 'a_p_r>3,<3.5'):
      assert sl.satisfies(req)
      assert sl.satisfies(Requirement.parse(req))
    for req in ('foo', 'a_p_r==4.0.0', 'a_p_r>4.0.0', 'a_p_r>3.0.0,<3.0.3', 'a==3.1.3'):
      assert not sl.satisfies(req)
  sl = SourcePackage('python-dateutil-1.5.tar.gz')
  assert sl.name == 'python-dateutil'
  assert sl.raw_version == '1.5'

  with temporary_dir() as td:
    dateutil_base = 'python-dateutil-1.5'
    dateutil = '%s.zip' % dateutil_base
    with contextlib.closing(ZipFile(os.path.join(td, dateutil), 'w')) as zf:
      zf.writestr(os.path.join(dateutil_base, 'file1.txt'), 'junk1')
      zf.writestr(os.path.join(dateutil_base, 'file2.txt'), 'junk2')
    sl = SourcePackage('file://' + os.path.join(td, dateutil), opener=Web())
    with temporary_dir() as td2:
      sl.fetch(location=td2)
      print(os.listdir(td2))
      assert set(os.listdir(os.path.join(td2, dateutil_base))) == set(['file1.txt', 'file2.txt'])
Beispiel #7
0
    def test_import_export(self):
        with temporary_dir() as state_dir:
            with temporary_dir() as server_dir1:
                with TestServer(config=dict(serverdir=server_dir1)) as devpi:
                    self.assertEqual(200, requests.get(devpi.url).status_code)
                export_state(server_dir1, state_dir)

            with temporary_dir() as server_dir2:
                import_state(server_dir2, state_dir)
                with TestServer(config=dict(serverdir=server_dir2)) as devpi:
                    self.assertEqual(200, requests.get(devpi.url).status_code)
Beispiel #8
0
def test_diamond_local_resolve_cached():
  # This exercises the issue described here: https://github.com/pantsbuild/pex/issues/120
  project1_sdist = make_sdist(name='project1', install_reqs=['project2<1.0.0'])
  project2_sdist = make_sdist(name='project2')

  with temporary_dir() as dd:
    for sdist in (project1_sdist, project2_sdist):
      safe_copy(sdist, os.path.join(dd, os.path.basename(sdist)))
    fetchers = [Fetcher([dd])]
    with temporary_dir() as cd:
      dists = resolve(['project1', 'project2'], fetchers=fetchers, cache=cd, cache_ttl=1000)
      assert len(dists) == 2
Beispiel #9
0
def test_nested_pushd():
  pre_cwd = os.getcwd()
  with temporary_dir() as tempdir1:
    with pushd(tempdir1) as path1:
      assert os.getcwd() == os.path.realpath(tempdir1)
      with temporary_dir(root_dir=tempdir1) as tempdir2:
        with pushd(tempdir2) as path2:
          assert os.getcwd() == os.path.realpath(tempdir2)
        assert os.getcwd() == os.path.realpath(tempdir1)
      assert os.getcwd() == os.path.realpath(tempdir1)
    assert os.getcwd() == pre_cwd
  assert os.getcwd() == pre_cwd
Beispiel #10
0
  def test_find_root_thrifts(self):
    with temporary_dir() as dir:
      root_1 = self.write(os.path.join(dir, 'root_1.thrift'), '# noop')
      root_2 = self.write(os.path.join(dir, 'root_2.thrift'), '# noop')
      self.assertEquals(set([root_1, root_2]),
                        find_root_thrifts(basedirs=[], sources=[root_1, root_2]))

    with temporary_dir() as dir:
      root_1 = self.write(os.path.join(dir, 'root_1.thrift'), 'include "mid_1.thrift"')
      self.write(os.path.join(dir, 'mid_1.thrift'), 'include "leaf_1.thrift"')
      self.write(os.path.join(dir, 'leaf_1.thrift'), '# noop')
      root_2 = self.write(os.path.join(dir, 'root_2.thrift'), 'include "root_1.thrift"')
      self.assertEquals(set([root_2]), find_root_thrifts(basedirs=[], sources=[root_1, root_2]))
Beispiel #11
0
 def test_round_trip(prefix=None):
   with temporary_dir() as fromdir:
     safe_mkdir(os.path.join(fromdir, 'a/b/c'))
     touch(os.path.join(fromdir, 'a/b/d/e.txt'))
     with temporary_dir() as archivedir:
       archive = archiver.create(fromdir, archivedir, 'archive', prefix=prefix)
       with temporary_dir() as todir:
         archiver.extract(archive, todir)
         fromlisting = listtree(fromdir)
         if prefix:
           fromlisting = set(os.path.join(prefix, x) for x in fromlisting)
           if empty_dirs:
             fromlisting.add(prefix)
         self.assertEqual(fromlisting, listtree(todir))
Beispiel #12
0
def test_pex_root():
  with temporary_dir() as tmp_home:
    with environment_as(HOME=tmp_home):
      with temporary_dir() as td:
        with temporary_dir() as output_dir:
          env = os.environ.copy()
          env['PEX_INTERPRETER'] = '1'

          output_path = os.path.join(output_dir, 'pex.pex')
          args = ['pex', '-o', output_path, '--not-zip-safe', '--pex-root={0}'.format(td)]
          results = run_pex_command(args=args, env=env)
          results.assert_success()
          assert ['pex.pex'] == os.listdir(output_dir), 'Expected built pex file.'
          assert [] == os.listdir(tmp_home), 'Expected empty temp home dir.'
          assert 'build' in os.listdir(td), 'Expected build directory in tmp pex root.'
Beispiel #13
0
def test_create_ioerror(chown):
  chown.side_effect = IOError('Disk is borked')

  with temporary_dir() as d:
    ds = DirectorySandbox(d)
    with pytest.raises(DirectorySandbox.CreationError):
      ds.create()
  def test_killTask_during_runner_initialize(self):  # noqa
    proxy_driver = ProxyDriver()

    task = make_task(HELLO_WORLD_MTI)

    with temporary_dir() as td:
      te = FastThermosExecutor(
          runner_provider=make_provider(td),
          sandbox_provider=SlowSandboxProvider())
      te.launchTask(proxy_driver, task)
      te.sandbox_initialized.wait()
      te.killTask(proxy_driver, mesos_pb2.TaskID(value=task.task_id.value))
      assert te.runner_aborted.is_set()
      assert not te.sandbox_created.is_set()

      # we've simulated a "slow" initialization by blocking it until the killTask was sent - so now,
      # trigger the initialization to complete
      te._sandbox._init_start.set()

      # however, wait on the runner to definitely finish its initialization before continuing
      # (otherwise, this function races ahead too fast)
      te._sandbox._init_done.wait()
      te.sandbox_created.wait()
      assert te.sandbox_initialized.is_set()
      assert te.sandbox_created.is_set()

      proxy_driver.wait_stopped()

      updates = proxy_driver.method_calls['sendStatusUpdate']
      assert len(updates) == 2
      assert updates[-1][0][0].state == mesos_pb2.TASK_KILLED
  def test_filesystem_image_containerizer_not_executable(self):
    proxy_driver = ProxyDriver()

    with temporary_dir() as tempdir:

      tempfile = os.path.join(tempdir, 'fake-containierizer')
      with open(tempfile, 'a'):
        os.utime(tempfile, None)

      te = FastThermosExecutor(
        runner_provider=make_provider(tempdir, mesos_containerizer_path=tempfile),
        sandbox_provider=FileSystemImageTestSandboxProvider())

      te.SANDBOX_INITIALIZATION_TIMEOUT = Amount(1, Time.MILLISECONDS)
      te.START_TIMEOUT = Amount(10, Time.MILLISECONDS)
      te.STOP_TIMEOUT = Amount(10, Time.MILLISECONDS)

      te.launchTask(proxy_driver, make_task(HELLO_WORLD_MTI))

      proxy_driver.wait_stopped()

      updates = proxy_driver.method_calls['sendStatusUpdate']
      assert len(updates) == 2
      assert updates[0][0][0].state == mesos_pb2.TASK_STARTING
      assert updates[1][0][0].state == mesos_pb2.TASK_FAILED
  def test_sibling_references(self):
    with temporary_dir() as root_dir:
      buildfile = create_buildfile(root_dir, 'a', name='BUILD',
        content=dedent("""
          dependencies(name='util',
            dependencies=[
              jar(org='com.twitter', name='util', rev='0.0.1')
            ]
          )
        """).strip()
      )
      sibling = create_buildfile(root_dir, 'a', name='BUILD.sibling',
        content=dedent("""
          dependencies(name='util-ex',
            dependencies=[
              pants(':util'),
              jar(org='com.twitter', name='util-ex', rev='0.0.1')
            ]
          )
        """).strip()
      )
      ParseContext(buildfile).parse()

      utilex = Target.get(Address.parse(root_dir, 'a:util-ex', is_relative=False))
      utilex_deps = set(utilex.resolve())

      util = Target.get(Address.parse(root_dir, 'a:util', is_relative=False))
      util_deps = set(util.resolve())

      self.assertEquals(util_deps, util_deps.intersection(utilex_deps))
Beispiel #17
0
 def workspace(self, *buildfiles):
   with temporary_dir() as root_dir:
     with BuildRoot().temporary(root_dir):
       with pushd(root_dir):
         for buildfile in buildfiles:
           touch(os.path.join(root_dir, buildfile))
         yield os.path.realpath(root_dir)
Beispiel #18
0
def test_verify_group_match(mock_check_output):
  with temporary_dir() as d:
    sandbox = FileSystemImageSandbox(d, user='******', sandbox_mount_point='/some/path')

    mock_check_output.return_value = 'test-group:x:2:'

    # valid case
    sandbox._verify_group_match_in_taskfs(2, 'test-group')
    mock_check_output.assert_called_with(
        ['chroot', sandbox._task_fs_root, 'getent', 'group', 'test-group'])

    # invalid group id
    with pytest.raises(FileSystemImageSandbox.CreationError):
      sandbox._verify_group_match_in_taskfs(3, 'test-group')

    # invalid group name
    with pytest.raises(FileSystemImageSandbox.CreationError):
      sandbox._verify_group_match_in_taskfs(2, 'invalid-group')

    # exception case
    exception = subprocess.CalledProcessError(
        returncode=1,
        cmd='some command',
        output=None)
    mock_check_output.side_effect = exception
    with pytest.raises(FileSystemImageSandbox.CreationError):
      sandbox._verify_group_match_in_taskfs(2, 'test-group')
Beispiel #19
0
def TestServer(users={}, indices={}, config={}, fail_on_output=['Traceback']):
    """
    Starts a devpi server to be used within tests.
    """
    with temporary_dir() as server_dir:

        server_options = {
            'port': 2414,
            'serverdir': server_dir}
        server_options.update(config)

        if 'serverdir' not in config:
            prefill_serverdir(server_options)

        with DevpiServer(server_options) as url:
            with DevpiClient(url, 'root', '') as client:

                for user, kwargs in iteritems(users):
                    client.create_user(user, **kwargs)

                for index, kwargs in iteritems(indices):
                    client.create_index(index, **kwargs)

                yield client

        _assert_no_logged_errors(fail_on_output, server_options['serverdir'] + '/.xproc/devpi-server/xprocess.log')
Beispiel #20
0
def test_empty_resolve():
  empty_resolve = resolve([])
  assert empty_resolve == []

  with temporary_dir() as td:
    empty_resolve = resolve([], cache=td)
    assert empty_resolve == []
Beispiel #21
0
def test_empty_resolve():
  empty_resolve_multi = list(resolve_multi([]))
  assert empty_resolve_multi == []

  with temporary_dir() as td:
    empty_resolve_multi = list(resolve_multi([], cache=td))
    assert empty_resolve_multi == []
Beispiel #22
0
def test_resolve_prereleases_and_no_version():
  prerelease_dep = make_sdist(name='dep', version='3.0.0rc3')

  with temporary_dir() as td:
    safe_copy(prerelease_dep, os.path.join(td, os.path.basename(prerelease_dep)))
    fetchers = [Fetcher([td])]

    def assert_resolve(deps, expected_version, **resolve_kwargs):
      dists = list(
        resolve_multi(deps, fetchers=fetchers, **resolve_kwargs)
      )
      assert 1 == len(dists)
      dist = dists[0]
      assert expected_version == dist.version

    # When allow_prereleases is specified, the requirement (from two dependencies)
    # for a specific pre-release version and no version specified, accepts the pre-release
    # version correctly.
    assert_resolve(['dep==3.0.0rc3', 'dep'], '3.0.0rc3', allow_prereleases=True)

    # Without allow_prereleases set, the pre-release version is rejected.
    # This used to be an issue when a command-line use did not pass the `--pre` option
    # correctly into the API call for resolve_multi() from build_pex() in pex.py.
    with pytest.raises(Unsatisfiable):
      assert_resolve(['dep==3.0.0rc3', 'dep'], '3.0.0rc3')
Beispiel #23
0
def test_log_tee():
    with temporary_dir() as td:
        taskpath = make_taskpath(td)
        sandbox = setup_sandbox(td, taskpath)

        # Create file stdout for capturing output. We can't use StringIO mock
        # because TestProcess is running fork.
        with open(os.path.join(td, "sys_stdout"), "w+") as stdout:
            with open(os.path.join(td, "sys_stderr"), "w+") as stderr:
                with mutable_sys():
                    sys.stdout, sys.stderr = stdout, stderr

                    p = TestProcess(
                        "process",
                        "echo hello world; echo >&2 hello stderr",
                        0,
                        taskpath,
                        sandbox,
                        logger_destination=LoggerDestination.BOTH,
                    )
                    p.start()
                    rc = wait_for_rc(taskpath.getpath("process_checkpoint"))

                    assert rc == 0
                    # Check log files were created in std path with correct content
                    assert_log_content(taskpath, "stdout", "hello world\n")
                    assert_log_content(taskpath, "stderr", "hello stderr\n")

                    # Check mock stdout
                    stdout.seek(0)
                    assert stdout.read() == "hello world\n"

                    # Check mock stderr
                    stderr.seek(0)
                    assert stderr.read() == "hello stderr\n"
Beispiel #24
0
def test_simple_process_filesystem_isolator():
    with temporary_dir() as td:
        taskpath = make_taskpath(td)
        sandbox = setup_sandbox(td, taskpath)

        test_isolator_path = os.path.join(td, "fake-mesos-containerier")
        with open(test_isolator_path, "w") as fd:
            # We use a fake version of the mesos-containerizer binary that just echoes out its args so
            # we can assert on them in the process's output.
            fd.write("\n".join(["#!/bin/sh", 'echo "$@"']))

            fd.close()

            chmod_plus_x(test_isolator_path)

            p = TestProcess(
                "process", "echo hello world", 0, taskpath, sandbox, mesos_containerizer_path=test_isolator_path
            )
            p.start()

        rc = wait_for_rc(taskpath.getpath("process_checkpoint"))
        assert rc == 0
        assert_log_content(
            taskpath,
            "stdout",
            "launch --unshare_namespace_mnt --rootfs=/some/path/taskfs --user=None "
            '--command={"shell":true,"value":"echo hello world"}\n',
        )
Beispiel #25
0
def test_verify_user_match(mock_check_output):
    with temporary_dir() as d:
        sandbox = FileSystemImageSandbox(os.path.join(d, "sandbox"), user="******", sandbox_mount_point="/some/path")

        mock_check_output.return_value = "uid=1(test-user) gid=2(test-group) groups=2(test-group)"
        # valid case
        sandbox._verify_user_match_in_taskfs(1, "test-user", 2, "test-group")
        mock_check_output.assert_called_with(["chroot", sandbox._task_fs_root, "id", "test-user"])

        # invalid user id
        with pytest.raises(FileSystemImageSandbox.CreationError):
            sandbox._verify_user_match_in_taskfs(0, "test-user", 2, "test-group")

        # invalid user name
        with pytest.raises(FileSystemImageSandbox.CreationError):
            sandbox._verify_user_match_in_taskfs(1, "invalid-user", 2, "test-group")

        # invalid group id
        with pytest.raises(FileSystemImageSandbox.CreationError):
            sandbox._verify_user_match_in_taskfs(1, "test-user", 0, "test-group")

        # invalid group name
        with pytest.raises(FileSystemImageSandbox.CreationError):
            sandbox._verify_user_match_in_taskfs(1, "test-user", 2, "invalid-group")

        # exception case
        exception = subprocess.CalledProcessError(returncode=1, cmd="some command", output=None)
        mock_check_output.side_effect = exception
        with pytest.raises(FileSystemImageSandbox.CreationError):
            sandbox._verify_user_match_in_taskfs(1, "test-user", 2, "test-group")
def test_load_invalid_syntax():
  with temporary_dir() as td:
    # bad json
    clusters_json = os.path.join(td, 'clusters.json')
    with open(clusters_json, 'w') as fp:
      fp.write('This is not json')
    with pytest.raises(Clusters.ParseError):
      Clusters.from_file(clusters_json)

    # bad yaml
    clusters_yml = os.path.join(td, 'clusters.yml')
    with open(clusters_yml, 'w') as fp:
      fp.write('L{}L')
    with pytest.raises(Clusters.ParseError):
      Clusters.from_file(clusters_yml)

    # bad layout
    clusters_yml = os.path.join(td, 'clusters.yml')
    with open(clusters_yml, 'w') as fp:
      fp.write('just a string')
    with pytest.raises(Clusters.ParseError):
      Clusters.from_file(clusters_yml)

    # not a dict
    clusters_json = os.path.join(td, 'clusters.json')
    with open(clusters_json, 'w') as fp:
      fp.write(json.dumps({'cluster1': ['not', 'cluster', 'values']}))
    with pytest.raises(Clusters.ParseError):
      Clusters.from_file(clusters_json)
Beispiel #27
0
def test_crawler_local():
  FL = ('a.txt', 'b.txt', 'c.txt')
  with temporary_dir() as td:
    for fn in FL:
      with open(os.path.join(td, fn), 'w') as fp:
        pass
    for dn in (1, 2):
      os.mkdir(os.path.join(td, 'dir%d' % dn))
      for fn in FL:
        with open(os.path.join(td, 'dir%d' % dn, fn), 'w') as fp:
          pass

    # basic file / dir rel splitting
    links, rels = Crawler(enable_cache=False).execute(td)
    assert set(links) == set(os.path.join(td, fn) for fn in FL)
    assert set(rels) == set(os.path.join(td, 'dir%d' % n) for n in (1, 2))

    # recursive crawling, single vs multi-threaded
    for caching in (False, True):
      for threads in (1, 2, 3):
        links = Crawler(enable_cache=caching, threads=threads).crawl([td])
        expect_links = (set(os.path.join(td, fn) for fn in FL) |
                        set(os.path.join(td, 'dir1', fn) for fn in FL) |
                        set(os.path.join(td, 'dir2', fn) for fn in FL))
        assert set(links) == expect_links
Beispiel #28
0
  def relativize_analysis_file(self, src, dst):
    # Make an analysis cache portable. Work on a tmpfile, for safety.
    #
    # NOTE: We can't port references to deps on the Java home. This is because different JVM
    # implementations on different systems have different structures, and there's not
    # necessarily a 1-1 mapping between Java jars on different systems. Instead we simply
    # drop those references from the analysis file.
    #
    # In practice the JVM changes rarely, and it should be fine to require a full rebuild
    # in those rare cases.
    with temporary_dir() as tmp_analysis_dir:
      stripped_src = os.path.join(tmp_analysis_dir, 'analysis.nojava')
      tmp_analysis_file = os.path.join(tmp_analysis_dir, 'analysis.relativized')

      # Strip all lines containing self._java_home.
      with open(src, 'r') as infile:
        with open (stripped_src, 'w') as outfile:
          for line in infile:
            if not self._java_home in line:
              outfile.write(line)

      rebasings = [
        (self._ivy_home, ZincUtils.IVY_HOME_PLACEHOLDER),
        (self._pants_home, ZincUtils.PANTS_HOME_PLACEHOLDER),
      ]
      Analysis.rebase(stripped_src, tmp_analysis_file, rebasings)
      shutil.move(tmp_analysis_file, dst)
    def yield_runner(self, runner_class, **bindings):
        with contextlib.nested(temporary_dir(), temporary_dir()) as (td1, td2):
            sandbox = DirectorySandbox(td1)
            checkpoint_root = td2

            task_runner = runner_class(
                runner_pex=os.path.join("dist", "thermos_runner.pex"),
                task_id="hello_world",
                task=TASK.bind(**bindings).task(),
                role=getpass.getuser(),
                portmap={},
                sandbox=sandbox,
                checkpoint_root=checkpoint_root,
            )

            yield task_runner
Beispiel #30
0
def test_log_console():
  with temporary_dir() as td:
    taskpath = make_taskpath(td)
    sandbox = setup_sandbox(td, taskpath)

    # Create file stdout for capturing output. We can't use StringIO mock
    # because TestProcess is running fork.
    with open(os.path.join(td, 'sys_stdout'), 'w+') as stdout:
      with open(os.path.join(td, 'sys_stderr'), 'w+') as stderr:
        with mutable_sys():
          sys.stdout, sys.stderr = stdout, stderr

          p = TestProcess('process', 'echo hello world; echo >&2 hello stderr', 0,
                          taskpath, sandbox, logger_destination=LoggerDestination.CONSOLE)
          p.start()
          rc = wait_for_rc(taskpath.getpath('process_checkpoint'))

          assert rc == 0
          # Check no log files were created in std path
          assert_log_dne(taskpath, 'stdout')
          assert_log_dne(taskpath, 'stderr')

          # Check mock stdout
          stdout.seek(0)
          assert stdout.read() == 'hello world\n'

          # Check mock stderr
          stderr.seek(0)
          assert stderr.read() == 'hello stderr\n'
Beispiel #31
0
def inherit_path(inherit_path):
  with temporary_dir() as output_dir:
    exe = os.path.join(output_dir, 'exe.py')
    body = "import sys ; print('\\n'.join(sys.path))"
    with open(exe, 'w') as f:
      f.write(body)

    pex_path = os.path.join(output_dir, 'pex.pex')
    results = run_pex_command([
      '--disable-cache',
      'msgpack_python',
      '--inherit-path{}'.format(inherit_path),
      '-o',
      pex_path,
    ])

    results.assert_success()

    env = os.environ.copy()
    env["PYTHONPATH"] = "/doesnotexist"
    stdout, rc = run_simple_pex(
      pex_path,
      args=(exe,),
      env=env,
    )
    assert rc == 0

    stdout_lines = stdout.decode().split('\n')
    requests_paths = tuple(i for i, l in enumerate(stdout_lines) if 'msgpack_python' in l)
    sys_paths = tuple(i for i, l in enumerate(stdout_lines) if 'doesnotexist' in l)
    assert len(requests_paths) == 1
    assert len(sys_paths) == 1

    if inherit_path == "=fallback":
      assert requests_paths[0] < sys_paths[0]
    else:
      assert requests_paths[0] > sys_paths[0]
Beispiel #32
0
def test_simple_process_filesystem_isolator_launch_info():
    with temporary_dir() as td:
        taskpath = make_taskpath(td)
        sandbox = setup_sandbox(td, taskpath)

        test_isolator_path = os.path.join(td, 'fake-mesos-containerier')
        with open(test_isolator_path, 'w') as fd:
            # We use a fake version of the mesos-containerizer binary that just echoes out its args so
            # we can assert on them in the process's output. Also imitates a failure when there are not
            # enough arguments, this is used to find the version of the binary (by checking the failure
            # message)
            fd.write('\n'.join([
                '#!/bin/sh', 'if [ "$#" -eq 1 ]; then',
                '  { echo "launch_info" >&2; };', 'else', '  echo "$@";', 'fi'
            ]))

            fd.close()

            chmod_plus_x(test_isolator_path)

            p = FakeProcess('process',
                            'echo hello world',
                            0,
                            taskpath,
                            sandbox,
                            mesos_containerizer_path=test_isolator_path,
                            container_sandbox=sandbox)
            p.start()

        rc = wait_for_rc(taskpath.getpath('process_checkpoint'))
        assert rc == 0
        assert_log_content(
            taskpath, 'stdout',
            'launch --unshare_namespace_mnt --launch_info={"command": {"shell": false, "arguments": '
            '["/bin/bash", "-c", "echo hello world"], "value": "/bin/bash"}, '
            '"working_directory": "%s", "user": "******", "rootfs": "/some/path/taskfs"}\n'
            % (sandbox))
Beispiel #33
0
  def _render_jar_tool_args(self):
    args = []

    if self._main:
      args.append('-main=%s' % self._main)

    if self._classpath:
      args.append('-classpath=%s' % ','.join(self._classpath))

    with temporary_dir() as stage_dir:
      if self._manifest:
        args.append('-manifest=%s' % self._manifest.materialize(stage_dir))

      if self._entries:
        def as_cli_entry(entry):
          src = entry.materialize(stage_dir)
          return '%s=%s' % (src, entry.dest) if entry.dest else src

        args.append('-files=%s' % ','.join(map(as_cli_entry, self._entries)))

      if self._jars:
        args.append('-jars=%s' % ','.join(self._jars))

      yield args
Beispiel #34
0
def test_pex_exec_with_pex_python_path_only():
    with temporary_dir() as td:
        pexrc_path = os.path.join(td, '.pexrc')
        with open(pexrc_path, 'w') as pexrc:
            # set pex python path
            pex_python_path = ':'.join([
                ensure_python_interpreter('2.7.10'),
                ensure_python_interpreter('3.6.3')
            ])
            pexrc.write("PEX_PYTHON_PATH=%s" % pex_python_path)

        pex_out_path = os.path.join(td, 'pex.pex')
        res = run_pex_command([
            '--disable-cache',
            '--rcfile=%s' % pexrc_path, '-o', pex_out_path
        ])
        res.assert_success()

        # test that pex bootstrapper selects lowest version interpreter
        # in pex python path (python2.7)
        stdin_payload = b'import sys; print(sys.executable); sys.exit(0)'
        stdout, rc = run_simple_pex(pex_out_path, stdin=stdin_payload)
        assert rc == 0
        assert str(pex_python_path.split(':')[0]).encode() in stdout
Beispiel #35
0
  def test_filesystem_image_containerizer_not_executable(self):
    proxy_driver = ProxyDriver()

    with temporary_dir() as tempdir:

      tempfile = os.path.join(tempdir, 'fake-containierizer')
      with open(tempfile, 'a'):
        os.utime(tempfile, None)

      te = FastThermosExecutor(
        runner_provider=make_provider(tempdir, mesos_containerizer_path=tempfile),
        sandbox_provider=FileSystemImageTestSandboxProvider(), stop_timeout_in_secs=1)

      te.SANDBOX_INITIALIZATION_TIMEOUT = Amount(1, Time.MILLISECONDS)
      te.START_TIMEOUT = Amount(10, Time.MILLISECONDS)

      te.launchTask(proxy_driver, make_task(HELLO_WORLD_MTI))

      proxy_driver.wait_stopped()

      updates = proxy_driver.method_calls['sendStatusUpdate']
      assert len(updates) == 2
      assert updates[0][0][0].state == mesos_pb2.TASK_STARTING
      assert updates[1][0][0].state == mesos_pb2.TASK_FAILED
Beispiel #36
0
  def test_shutdown_order(self):
    proxy_driver = ProxyDriver()

    with contextlib.nested(
        temporary_dir(),
        mock.patch.object(ChainedStatusChecker, 'stop'),
        mock.patch.object(ThermosTaskRunner, 'stop')) as (
            checkpoint_root,
            status_check_stop,
            runner_stop):

      parent = mock.MagicMock()
      parent.attach_mock(status_check_stop, 'status_check_stop')
      parent.attach_mock(runner_stop, 'runner_stop')

      _, executor = make_executor(proxy_driver,
          checkpoint_root,
          SLEEP60_MTI)
      executor.shutdown(proxy_driver)
      executor.terminated.wait()

      parent.assert_has_calls(
          [mock.call.status_check_stop(), mock.call.runner_stop()],
          any_order=False)
Beispiel #37
0
def test_resolve_prereleases_multiple_set():
  stable_dep = make_sdist(name='dep', version='2.0.0')
  prerelease_dep1 = make_sdist(name='dep', version='3.0.0rc3')
  prerelease_dep2 = make_sdist(name='dep', version='3.0.0rc4')
  prerelease_dep3 = make_sdist(name='dep', version='3.0.0rc5')

  with temporary_dir() as td:
    for sdist in (stable_dep, prerelease_dep1, prerelease_dep2, prerelease_dep3):
      safe_copy(sdist, os.path.join(td, os.path.basename(sdist)))
    fetchers = [Fetcher([td])]

    def assert_resolve(expected_version, **resolve_kwargs):
      dists = list(
        resolve_multi(['dep>=3.0.0rc1', 'dep==3.0.0rc4'],
                      fetchers=fetchers,
                      **resolve_kwargs)
      )
      assert 1 == len(dists)
      dist = dists[0]
      assert expected_version == dist.version

    # This should resolve with explicit prerelease being set or implicitly.
    assert_resolve('3.0.0rc4', allow_prereleases=True)
    assert_resolve('3.0.0rc4')
Beispiel #38
0
def assert_create_user_and_group(mock_check_call, gid_exists, uid_exists):
    mock_pwent = pwd.struct_passwd((
        'someuser',  # login name
        'hunter2',  # password
        834,  # uid
        835,  # gid
        'Some User',  # user name
        '/home/someuser',  # home directory
        '/bin/sh'))  # login shell

    mock_grent = grp.struct_group((
        'users',  # group name
        '*',  # password
        835,  # gid
        ['someuser']))  # members

    exception = subprocess.CalledProcessError(
        returncode=FileSystemImageSandbox._USER_OR_GROUP_ID_EXISTS,
        cmd='some command',
        output=None)

    mock_check_call.side_effect = [
        None if gid_exists else exception, None if uid_exists else exception
    ]

    with temporary_dir() as d:
        with mock.patch.object(FileSystemImageSandbox,
                               'get_user_and_group',
                               return_value=(mock_pwent, mock_grent)):

            sandbox = FileSystemImageSandbox(os.path.join(d, 'sandbox'),
                                             user='******',
                                             sandbox_mount_point='/some/path')
            sandbox._create_user_and_group_in_taskfs()

    assert len(mock_check_call.mock_calls) == 2
Beispiel #39
0
    def _run_pants(self, pants_repo: pathlib.PosixPath, pants_target: str,
                   extension: str) -> pathlib.PosixPath:
        """Runs pants with widget UI display."""

        # Version check for pants v1 vs v2 flags/behavior.
        is_pants_v1 = pants_repo.joinpath('pants.ini').exists()
        if is_pants_v1:
            goal_name = 'binary'
            tmp_root = None
        else:
            goal_name = 'package'
            # N.B. pants v2 doesn't support `--pants-distdir` outside of the build root.
            tmp_root = pants_repo.joinpath('dist')
            # N.B. The dist dir must exist for temporary_dir.
            tmp_root.mkdir(exist_ok=True)

        with temporary_dir(root_dir=tmp_root, cleanup=False) as tmp_dir:
            tmp_path = pathlib.PosixPath(tmp_dir)
            title = f'[Build] ./pants {goal_name} {pants_target}'
            cmd = f'cd {pants_repo} && ./pants --pants-distdir="{tmp_path}" {goal_name} {pants_target}'
            return self._stream_binary_build_with_output(cmd,
                                                         title,
                                                         tmp_path,
                                                         extension=extension)
Beispiel #40
0
def find_java_home():
    # A kind-of-insane hack to find the effective java home. On some platforms there are so
    # many hard and symbolic links into the JRE dirs that it's actually quite hard to
    # establish what path to use as the java home, e.g., for the purpose of rebasing.
    # In practice, this seems to work fine.
    #
    # TODO: In the future we should probably hermeticize the Java enivronment rather than relying
    # on whatever's on the shell's PATH. E.g., you either specify a path to the Java home via a
    # cmd-line flag or .pantsrc, or we infer one with this method but verify that it's of a
    # supported version.
    with temporary_dir() as tmpdir:
        with open(os.path.join(tmpdir, 'X.java'), 'w') as srcfile:
            srcfile.write('''
        class X {
          public static void main(String[] argv) {
            System.out.println(System.getProperty("java.home"));
          }
        }''')
        subprocess.Popen(['javac', '-d', tmpdir, srcfile.name],
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE).communicate()
        return subprocess.Popen(['java', '-cp', tmpdir, 'X'],
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE).communicate()[0]
Beispiel #41
0
def test_simple_process_filesystem_isolator():
  with temporary_dir() as td:
    taskpath = make_taskpath(td)
    sandbox = setup_sandbox(td, taskpath)

    test_isolator_path = os.path.join(td, 'fake-mesos-containerier')
    with open(test_isolator_path, 'w') as fd:
      # We use a fake version of the mesos-containerizer binary that just echoes out its args so
      # we can assert on them in the process's output.
      fd.write('\n'.join([
        '#!/bin/sh',
        'echo "$@"'
      ]))

      fd.close()

      chmod_plus_x(test_isolator_path)

      p = TestProcess(
          'process',
          'echo hello world',
          0,
          taskpath,
          sandbox,
          mesos_containerizer_path=test_isolator_path,
          container_sandbox=sandbox)
      p.start()

    rc = wait_for_rc(taskpath.getpath('process_checkpoint'))
    assert rc == 0
    assert_log_content(
        taskpath,
        'stdout',
        'launch --unshare_namespace_mnt --working_directory=%s --rootfs=/some/path/taskfs --user=None '
        '--command={"shell":true,"value":"/bin/bash -c \'echo hello world\'"}\n' % (
            sandbox))
Beispiel #42
0
        def post_process_cached_vts(cached_vts):
            # Merge the localized analysis with the global one (if any).
            analyses_to_merge = []
            for vt in cached_vts:
                for target in vt.targets:
                    analysis_file = JvmCompile._analysis_for_target(
                        self._analysis_tmpdir, target)
                    portable_analysis_file = JvmCompile._portable_analysis_for_target(
                        self._analysis_tmpdir, target)
                    if os.path.exists(portable_analysis_file):
                        self._analysis_tools.localize(portable_analysis_file,
                                                      analysis_file)
                    if os.path.exists(analysis_file):
                        analyses_to_merge.append(analysis_file)

            if len(analyses_to_merge) > 0:
                if os.path.exists(self._analysis_file):
                    analyses_to_merge.append(self._analysis_file)
                with contextutil.temporary_dir() as tmpdir:
                    tmp_analysis = os.path.join(tmpdir, 'analysis')
                    with self.context.new_workunit(name='merge_analysis'):
                        self._analysis_tools.merge_from_paths(
                            analyses_to_merge, tmp_analysis)
                    self.move(tmp_analysis, self._analysis_file)
Beispiel #43
0
    def test_basic_as_job(self):
        proxy_driver = ProxyDriver()

        with temporary_dir() as tempdir:
            te = AuroraExecutor(runner_provider=make_provider(tempdir),
                                sandbox_provider=DefaultTestSandboxProvider())
            te.launchTask(proxy_driver,
                          make_task(MESOS_JOB(task=HELLO_WORLD), instanceId=0))
            te.runner_started.wait()
            while te._status_manager is None:
                time.sleep(0.1)
            te.terminated.wait()
            tm = TaskMonitor(tempdir, task_id=HELLO_WORLD_TASK_ID)
            runner_state = tm.get_state()

        assert 'hello_world_hello_world-001' in runner_state.processes, (
            'Could not find processes, got: %s' %
            ' '.join(runner_state.processes))
        updates = proxy_driver.method_calls['sendStatusUpdate']
        assert len(updates) == 3
        status_updates = [arg_tuple[0][0] for arg_tuple in updates]
        assert status_updates[0].state == mesos_pb2.TASK_STARTING
        assert status_updates[1].state == mesos_pb2.TASK_RUNNING
        assert status_updates[2].state == mesos_pb2.TASK_FINISHED
Beispiel #44
0
def test_env(content=TEST_CONTENT):
    with temporary_dir() as d:
        with tempfile.NamedTemporaryFile() as f:
            f.write(content)
            f.flush()
            yield f, CacheKeyGenerator(), BuildInvalidator(d)
Beispiel #45
0
 def test_gc_ignore_retained_task(self):
     with temporary_dir() as root:
         task_id = self.setup_task(self.HELLO_WORLD, root, finished=True)
         gcs = self.run_gc(root, task_id, retain=True)
         assert len(gcs) == 0
Beispiel #46
0
def test_task_detector():
  with temporary_dir() as root:
    active_log_dir = os.path.join(root, 'active_log')
    finished_log_dir = os.path.join(root, 'finished_log')

    path = TaskPath(root=root)
    detector = TaskDetector(root)

    # test empty paths

    assert list(detector.get_task_ids(state='active')) == []
    assert list(detector.get_task_ids(state='finished')) == []
    assert set(detector.get_task_ids()) == set()

    assert detector.get_checkpoint(task_id='active_task') == path.given(
        task_id='active_task').getpath('runner_checkpoint')

    assert detector.get_checkpoint(task_id='finished_task') == path.given(
        task_id='finished_task').getpath('runner_checkpoint')

    assert set(detector.get_process_checkpoints('active_task')) == set()
    assert set(detector.get_process_checkpoints('finished_task')) == set()
    assert set(detector.get_process_runs('active_task', active_log_dir)) == set()
    assert set(detector.get_process_runs('finished_task', finished_log_dir)) == set()
    assert set(detector.get_process_logs('active_task', active_log_dir)) == set()
    assert set(detector.get_process_logs('finished_task', finished_log_dir)) == set()

    # create paths

    paths = [
        path.given(state='active', task_id='active_task').getpath('task_path'),
        path.given(state='finished', task_id='finished_task').getpath('task_path'),
        path.given(task_id='active_task').getpath('runner_checkpoint'),
        path.given(task_id='finished_task').getpath('runner_checkpoint'),
        path.given(
            task_id='active_task',
            process='hello_world',
            run='0',
            log_dir=active_log_dir
        ).with_filename('stdout').getpath('process_logdir'),
        path.given(
            task_id='finished_task',
            process='goodbye_world',
            run='1',
            log_dir=finished_log_dir
        ).with_filename('stderr').getpath('process_logdir'),
        path.given(task_id='active_task', process='hello_world').getpath('process_checkpoint'),
        path.given(task_id='finished_task', process='goodbye_world').getpath('process_checkpoint'),
    ]

    for p in paths:
      touch(p)

    detector = TaskDetector(root)

    assert list(detector.get_task_ids(state='active')) == list([('active', 'active_task')])
    assert list(detector.get_task_ids(state='finished')) == list([('finished', 'finished_task')])
    assert set(detector.get_task_ids()) == set(
        [('active', 'active_task'), ('finished', 'finished_task')])

    assert list(detector.get_process_checkpoints('active_task')) == [
        path.given(task_id='active_task', process='hello_world').getpath('process_checkpoint')]

    assert list(detector.get_process_checkpoints('finished_task')) == [
        path.given(task_id='finished_task', process='goodbye_world').getpath('process_checkpoint')]

    assert list(detector.get_process_runs('active_task', active_log_dir)) == [
        ('hello_world', 0)]
    assert list(detector.get_process_runs('finished_task', finished_log_dir)) == [
        ('goodbye_world', 1)]

    assert list(detector.get_process_logs('active_task', active_log_dir)) == [
        path.given(
            task_id='active_task',
            process='hello_world',
            run='0',
            log_dir=active_log_dir
        ).with_filename('stdout').getpath('process_logdir')]

    assert list(detector.get_process_logs('finished_task', finished_log_dir)) == [
        path.given(
            task_id='finished_task',
            process='goodbye_world',
            run='1',
            log_dir=finished_log_dir
        ).with_filename('stderr').getpath('process_logdir')]
Beispiel #47
0
def test_local_cache():
    with temporary_dir() as artifact_root:
        with temporary_dir() as cache_root:
            artifact_cache = FileBasedArtifactCache(None, artifact_root,
                                                    cache_root)
            do_test_artifact_cache(artifact_cache)
Beispiel #48
0
def test_temporary_dir_with_root_dir():
    with temporary_dir() as path1:
        with temporary_dir(root_dir=path1) as path2:
            assert os.path.realpath(path2).startswith(os.path.realpath(path1)), \
              'nested temporary dir should be created within outer dir.'
Beispiel #49
0
def test_env(content=TEST_CONTENT):
    with temporary_dir() as d:
        with tempfile.NamedTemporaryFile() as f:
            f.write(content)
            f.flush()
            yield f, ArtifactCache(d)
Beispiel #50
0
def test_load_yaml():
    with temporary_dir() as td:
        clusters_yml = os.path.join(td, 'clusters.yml')
        with open(clusters_yml, 'w') as fp:
            fp.write(TEST_YAML)
        validate_loaded_clusters(Clusters.from_file(clusters_yml))
Beispiel #51
0
def test_load():
    with temporary_dir() as td:
        clusters_json = os.path.join(td, 'clusters.json')
        with open(clusters_json, 'w') as fp:
            fp.write(CLUSTERS)
        validate_loaded_clusters(Clusters.from_file(clusters_json))
Beispiel #52
0
def test_clp_prereleases_resolver():
    prerelease_dep = make_sdist(name='dep', version='1.2.3b1')
    with temporary_dir() as td:
        safe_copy(prerelease_dep,
                  os.path.join(td, os.path.basename(prerelease_dep)))
        fetcher = Fetcher([td])

        # When no specific options are specified, allow_prereleases is None
        parser, resolver_options_builder = configure_clp()
        assert resolver_options_builder._allow_prereleases is None

        # When we specify `--pre`, allow_prereleases is True
        options, reqs = parser.parse_args(
            args=['--pre', 'dep==1.2.3b1', 'dep'])
        assert resolver_options_builder._allow_prereleases
        # We need to use our own fetcher instead of PyPI
        resolver_options_builder._fetchers.insert(0, fetcher)

        #####
        # The resolver created during processing of command line options (configure_clp)
        # is not actually passed into the API call (resolve_multi) from build_pex().
        # Instead, resolve_multi() calls resolve() where a new ResolverOptionsBuilder instance
        # is created. The only way to supply our own fetcher to that new instance is to patch it
        # here in the test so that it can fetch our test package (dep-1.2.3b1). Hence, this class
        # below and the change in the `pex.resolver` module where the patched object resides.
        #
        import pex.resolver

        class BuilderWithFetcher(ResolverOptionsBuilder):
            def __init__(self,
                         fetchers=None,
                         allow_all_external=False,
                         allow_external=None,
                         allow_unverified=None,
                         allow_prereleases=None,
                         use_manylinux=None,
                         precedence=None,
                         context=None):
                super(BuilderWithFetcher,
                      self).__init__(fetchers=fetchers,
                                     allow_all_external=allow_all_external,
                                     allow_external=allow_external,
                                     allow_unverified=allow_unverified,
                                     allow_prereleases=allow_prereleases,
                                     use_manylinux=None,
                                     precedence=precedence,
                                     context=context)
                self._fetchers.insert(0, fetcher)

        # end stub
        #####

        # Without a corresponding fix in pex.py, this test failed for a dependency requirement of
        # dep==1.2.3b1 from one package and just dep (any version accepted) from another package.
        # The failure was an exit from build_pex() with the message:
        #
        # Could not satisfy all requirements for dep==1.2.3b1:
        #     dep==1.2.3b1, dep
        #
        # With a correct behavior the assert line is reached and pex_builder object created.
        with mock.patch.object(pex.resolver, 'ResolverOptionsBuilder',
                               BuilderWithFetcher):
            pex_builder = build_pex(reqs, options, resolver_options_builder)
            assert pex_builder is not None
 def test_local_cache(self):
   with temporary_dir() as artifact_root:
     with temporary_dir() as cache_root:
       artifact_cache = LocalArtifactCache(None, artifact_root, cache_root)
       self.do_test_artifact_cache(artifact_cache)
Beispiel #54
0
  def test_analysis_files(self):
    classes_dir = '/Users/kermit/src/acme.web/.pants.d/scalac/classes/'
    parser = ZincAnalysisParser(classes_dir)

    with temporary_dir() as tmpdir:
      # Extract analysis files from tarball.
      analysis_tarball = os.path.join(os.path.dirname(__file__), 'testdata', 'analysis.tar.bz2')
      analysis_dir = os.path.join(tmpdir, 'orig')
      print('Extracting %s to %s' % (analysis_tarball, analysis_dir))
      os.mkdir(analysis_dir)
      with contextlib.closing(tarfile.open(analysis_tarball, 'r:bz2')) as tar:
        tar.extractall(analysis_dir)

      # Parse them.
      analysis_files = [os.path.join(analysis_dir, f)
                        for f in os.listdir(analysis_dir) if f.endswith('.analysis')]
      num_analyses = len(analysis_files)

      def parse(f):
        return parser.parse_from_path(f)

      analyses = self._time(lambda: [parse(f) for f in analysis_files],
                            'Parsed %d files' % num_analyses)

      # Write them back out individually.
      writeout_dir = os.path.join(tmpdir, 'write')
      os.mkdir(writeout_dir)
      def write(file_name, analysis):
        outpath = os.path.join(writeout_dir, file_name)
        analysis.write_to_path(outpath)

      def _write_all():
        for analysis_file, analysis in zip(analysis_files, analyses):
          write(os.path.basename(analysis_file), analysis)

      self._time(_write_all, 'Wrote %d files' % num_analyses)

      # Merge them.
      merged_analysis = self._time(lambda: ZincAnalysis.merge(analyses),
                                   'Merged %d files' % num_analyses)

      # Write merged analysis to file.
      merged_analysis_path = os.path.join(tmpdir, 'analysis.merged')
      self._time(lambda: merged_analysis.write_to_path(merged_analysis_path),
                 'Wrote merged analysis to %s' % merged_analysis_path)

      # Read merged analysis from file.
      merged_analysis2 = self._time(lambda: parser.parse_from_path(merged_analysis_path),
                                    'Read merged analysis from %s' % merged_analysis_path)

      # Split the merged analysis back to individual analyses.
      sources_per_analysis = [a.stamps.sources.keys() for a in analyses]
      split_analyses = self._time(lambda: merged_analysis2.split(sources_per_analysis, catchall=True),
                                  'Split back into %d analyses' % num_analyses)

      self.assertEquals(num_analyses + 1, len(split_analyses))  # +1 for the catchall.
      catchall_analysis = split_analyses[-1]

      # We expect an empty catchall.
      self.assertEquals(0, len(catchall_analysis.stamps.sources))

      # Diff the original analyses and the split ones.

      # Write the split to the tmpdir, for ease of debugging on failure.
      splits_dir = os.path.join(tmpdir, 'splits')
      os.mkdir(splits_dir)
      for analysis_file, analysis, split_analysis in zip(analysis_files, analyses, split_analyses):
        outfile_path = os.path.join(splits_dir, os.path.basename(analysis_file))
        split_analysis.write_to_path(outfile_path)
        diffs = analysis.diff(split_analysis)
        self.assertEquals(analysis, split_analysis, ''.join(diffs))

    print('Total time: %f seconds' % self.total_time)
Beispiel #55
0
def yield_pex_builder(zip_safe=True):
  with nested(temporary_dir(), make_distribution('p1', zipped=True, zip_safe=zip_safe)) as (td, p1):
    pb = PEXBuilder(path=td)
    pb.add_egg(p1.location)
    yield pb
Beispiel #56
0
def pex_with_no_entrypoints():
  with temporary_dir() as out:
    pex = os.path.join(out, 'pex.pex')
    run_pex_command(['setuptools==36.2.7', '-o', pex])
    test_script = b'from setuptools.sandbox import run_setup; print(str(run_setup))'
    yield pex, test_script, out
Beispiel #57
0
    def test(self):
        self.assertEqual(set(), self.git.changed_files())
        self.assertEqual(set(['README']),
                         self.git.changed_files(from_commit='HEAD^'))

        tip_sha = self.git.commit_id
        self.assertTrue(tip_sha)

        self.assertTrue(tip_sha in self.git.changelog())

        self.assertTrue(self.git.tag_name.startswith('first-'),
                        msg='un-annotated tags should be found')
        self.assertEqual('master', self.git.branch_name)

        def edit_readme():
            with open(self.readme_file, 'a') as readme:
                readme.write('More data.')

        edit_readme()
        with open(os.path.join(self.worktree, 'INSTALL'), 'w') as untracked:
            untracked.write('make install')
        self.assertEqual(set(['README']), self.git.changed_files())
        self.assertEqual(set(['README', 'INSTALL']),
                         self.git.changed_files(include_untracked=True))

        try:
            # These changes should be rejected because our branch point from origin is 1 commit behind
            # the changes pushed there in clone 2.
            self.git.commit('API Changes.')
        except Scm.RemoteException:
            with environment_as(GIT_DIR=self.gitdir,
                                GIT_WORK_TREE=self.worktree):
                subprocess.check_call(
                    ['git', 'reset', '--hard', 'depot/master'])
            self.git.refresh()
            edit_readme()

        self.git.commit('''API '"' " Changes.''')
        self.git.tag('second', message='''Tagged ' " Changes''')

        with temporary_dir() as clone:
            with pushd(clone):
                self.init_repo('origin', self.origin)
                subprocess.check_call(
                    ['git', 'pull', '--tags', 'origin', 'master:master'])

                with open(os.path.realpath('README')) as readme:
                    self.assertEqual('--More data.', readme.read())

                git = Git()

                # Check that we can pick up committed and uncommitted changes.
                with safe_open(os.path.realpath('CHANGES'), 'w') as changes:
                    changes.write('none')
                subprocess.check_call(['git', 'add', 'CHANGES'])
                self.assertEqual(set(['README', 'CHANGES']),
                                 git.changed_files(from_commit='first'))

                self.assertEqual('master', git.branch_name)
                self.assertEqual('second',
                                 git.tag_name,
                                 msg='annotated tags should be found')
Beispiel #58
0
def test_pex_python():
    py2_path_interpreter = ensure_python_interpreter('2.7.10')
    py3_path_interpreter = ensure_python_interpreter('3.6.3')
    path = ':'.join([
        os.path.dirname(py2_path_interpreter),
        os.path.dirname(py3_path_interpreter)
    ])
    with environment_as(PATH=path):
        with temporary_dir() as td:
            pexrc_path = os.path.join(td, '.pexrc')
            with open(pexrc_path, 'w') as pexrc:
                pex_python = ensure_python_interpreter('3.6.3')
                pexrc.write("PEX_PYTHON=%s" % pex_python)

            # test PEX_PYTHON with valid constraints
            pex_out_path = os.path.join(td, 'pex.pex')
            res = run_pex_command([
                '--disable-cache',
                '--rcfile=%s' % pexrc_path, '--interpreter-constraint=>3',
                '--interpreter-constraint=<3.8', '-o', pex_out_path
            ])
            res.assert_success()

            stdin_payload = b'import sys; print(sys.executable); sys.exit(0)'
            stdout, rc = run_simple_pex(pex_out_path, stdin=stdin_payload)
            assert rc == 0
            correct_interpreter_path = pex_python.encode()
            assert correct_interpreter_path in stdout

            # test PEX_PYTHON with incompatible constraints
            pexrc_path = os.path.join(td, '.pexrc')
            with open(pexrc_path, 'w') as pexrc:
                pex_python = ensure_python_interpreter('2.7.10')
                pexrc.write("PEX_PYTHON=%s" % pex_python)

            pex_out_path = os.path.join(td, 'pex2.pex')
            res = run_pex_command([
                '--disable-cache',
                '--rcfile=%s' % pexrc_path, '--interpreter-constraint=>3',
                '--interpreter-constraint=<3.8', '-o', pex_out_path
            ])
            res.assert_success()

            stdin_payload = b'import sys; print(sys.executable); sys.exit(0)'
            stdout, rc = run_simple_pex(pex_out_path, stdin=stdin_payload)
            assert rc == 1
            fail_str = 'not compatible with specified interpreter constraints'.encode(
            )
            assert fail_str in stdout

            # test PEX_PYTHON with no constraints
            pex_out_path = os.path.join(td, 'pex3.pex')
            res = run_pex_command([
                '--disable-cache',
                '--rcfile=%s' % pexrc_path, '-o', pex_out_path
            ])
            res.assert_success()

            stdin_payload = b'import sys; print(sys.executable); sys.exit(0)'
            stdout, rc = run_simple_pex(pex_out_path, stdin=stdin_payload)
            assert rc == 0
            correct_interpreter_path = pex_python.encode()
            assert correct_interpreter_path in stdout
Beispiel #59
0
def test_interpreter_selection_using_os_environ_for_bootstrap_reexec():
  """
  This is a test for verifying the proper function of the
  pex bootstrapper's interpreter selection logic and validate a corresponding
  bugfix. More details on the nature of the bug can be found at:
  https://github.com/pantsbuild/pex/pull/441
  """
  with temporary_dir() as td:
    pexrc_path = os.path.join(td, '.pexrc')

    # Select pexrc interpreter versions based on test environemnt.
    # The parent interpreter is the interpreter we expect the parent pex to 
    # execute with. The child interpreter is the interpreter we expect the 
    # child pex to execute with. 
    if (sys.version_info[0], sys.version_info[1]) == (3, 6):
      child_pex_interpreter_version = '3.6.3'
    else:
      child_pex_interpreter_version = '2.7.10'

    # Write parent pex's pexrc.
    with open(pexrc_path, 'w') as pexrc:
      pexrc.write("PEX_PYTHON=%s" % sys.executable)

    test_setup_path = os.path.join(td, 'setup.py')
    with open(test_setup_path, 'w') as fh:
      fh.write(dedent('''
        from setuptools import setup

        setup(
          name='tester',
          version='1.0',
          description='tests',
          author='tester',
          author_email='*****@*****.**',
          packages=['testing']
        )
        '''))

    os.mkdir(os.path.join(td, 'testing'))
    test_init_path = os.path.join(td, 'testing/__init__.py')
    with open(test_init_path, 'w') as fh:
      fh.write(dedent('''
        def tester():     
          from pex.testing import (
            run_pex_command,
            run_simple_pex
          )
          import os
          import tempfile
          import shutil
          from textwrap import dedent
          td = tempfile.mkdtemp()
          try:
            pexrc_path = os.path.join(td, '.pexrc')
            with open(pexrc_path, 'w') as pexrc:
              pexrc.write("PEX_PYTHON={}")          
            test_file_path = os.path.join(td, 'build_and_run_child_pex.py')
            with open(test_file_path, 'w') as fh:
              fh.write(dedent("""
                import sys
                print(sys.executable)
                """))
            pex_out_path = os.path.join(td, 'child.pex')
            res = run_pex_command(['--disable-cache',
              '-o', pex_out_path])
            stdin_payload = b'import sys; print(sys.executable); sys.exit(0)'
            stdout, rc = run_simple_pex(pex_out_path, stdin=stdin_payload)
            print(stdout)
          finally:
            shutil.rmtree(td)
        '''.format(ensure_python_interpreter(child_pex_interpreter_version))))

    pex_out_path = os.path.join(td, 'parent.pex')
    res = run_pex_command(['--disable-cache',
      'pex',
      '{}'.format(td),
      '-e', 'testing:tester',
      '-o', pex_out_path])
    res.assert_success()

    stdout, rc = run_simple_pex(pex_out_path)
    assert rc == 0
    # Ensure that child pex used the proper interpreter as specified by its pexrc.
    correct_interpreter_path = ensure_python_interpreter(child_pex_interpreter_version)
    correct_interpreter_path = correct_interpreter_path.encode()  # Py 2/3 compatibility 
    assert correct_interpreter_path in stdout
Beispiel #60
0
    def merge_artifact(self, versioned_target_set):
        if len(versioned_target_set.targets) <= 1:
            return

        with temporary_dir() as tmpdir:
            dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths(
                versioned_target_set.targets)
            safe_rmtree(dst_output_dir)
            safe_mkdir(dst_output_dir)
            src_analysis_caches = []

            analysis_args = []
            analysis_args.extend(self._zinc_jar_args)
            analysis_args.extend([
                '-log-level',
                self.context.options.log_level or 'info',
                '-analysis',
            ])

            # TODO: Do we actually need to merge deps? Zinc will stomp them anyway on success.
            dst_deps = Dependencies(dst_output_dir)

            for target in versioned_target_set.targets:
                src_output_dir, src_depfile, src_analysis_cache = self.create_output_paths(
                    [target])
                if os.path.exists(src_depfile):
                    src_deps = Dependencies(src_output_dir)
                    src_deps.load(src_depfile)
                    dst_deps.merge(src_deps)

                    classes_by_source = src_deps.findclasses([target]).get(
                        target, {})
                    for source, classes in classes_by_source.items():
                        for cls in classes:
                            src = os.path.join(src_output_dir, cls)
                            dst = os.path.join(dst_output_dir, cls)
                            # src may not exist if we aborted a build in the middle. That's OK: zinc will notice that
                            # it's missing and rebuild it.
                            # dst may already exist if we have overlapping targets. It's not a good idea
                            # to have those, but until we enforce it, we must allow it here.
                            if os.path.exists(src) and not os.path.exists(dst):
                                # Copy the class file.
                                safe_mkdir(os.path.dirname(dst))
                                os.link(src, dst)

                    # Use zinc to rebase a copy of the per-target analysis files prior to merging.
                    if os.path.exists(src_analysis_cache):
                        src_analysis_cache_tmp = \
                          os.path.join(tmpdir, os.path.relpath(src_analysis_cache, self._analysis_cache_dir))
                        shutil.copyfile(src_analysis_cache,
                                        src_analysis_cache_tmp)
                        src_analysis_caches.append(src_analysis_cache_tmp)
                        rebase_args = analysis_args + [
                            '-cache',
                            src_analysis_cache_tmp,
                            '-rebase',
                            '%s:%s' % (src_output_dir, dst_output_dir),
                        ]
                        if self.runjava(self._main,
                                        classpath=self._zinc_classpath,
                                        args=rebase_args,
                                        jvmargs=self._jvm_args):
                            self.context.log.warn('In merge_artifact: zinc failed to rebase analysis file %s. ' \
                            'Target may require a full rebuild.' % src_analysis_cache_tmp)

            dst_deps.save(dst_depfile)

            # Use zinc to merge the analysis files.
            merge_args = analysis_args + [
                '-cache',
                dst_analysis_cache,
                '-merge',
                ':'.join(src_analysis_caches),
            ]
            if self.runjava(self._main,
                            classpath=self._zinc_classpath,
                            args=merge_args,
                            jvmargs=self._jvm_args):
                raise TaskError, 'zinc failed to merge analysis files %s to %s' % \
                                 (':'.join(src_analysis_caches), dst_analysis_cache)