예제 #1
0
  def setUp(self):
    self.origin = safe_mkdtemp()
    with pushd(self.origin):
      subprocess.check_call(['git', 'init', '--bare'])

    self.gitdir = safe_mkdtemp()
    self.worktree = safe_mkdtemp()

    self.readme_file = os.path.join(self.worktree, 'README')

    with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
      self.init_repo('depot', self.origin)

      touch(self.readme_file)
      subprocess.check_call(['git', 'add', 'README'])
      subprocess.check_call(['git', 'commit', '-am', 'initial commit with decode -> \x81b'])
      subprocess.check_call(['git', 'tag', 'first'])
      subprocess.check_call(['git', 'push', '--tags', 'depot', 'master'])
      subprocess.check_call(['git', 'branch', '--set-upstream', 'master', 'depot/master'])

      with safe_open(self.readme_file, 'w') as readme:
        readme.write('Hello World.')
      subprocess.check_call(['git', 'commit', '-am', 'Update README.'])

    self.clone2 = safe_mkdtemp()
    with pushd(self.clone2):
      self.init_repo('origin', self.origin)
      subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])

      with safe_open(os.path.realpath('README'), 'a') as readme:
        readme.write('--')
      subprocess.check_call(['git', 'commit', '-am', 'Update README 2.'])
      subprocess.check_call(['git', 'push', '--tags', 'origin', 'master'])

    self.git = Git(gitdir=self.gitdir, worktree=self.worktree)
예제 #2
0
  def _gen_options_reference(self):
    """Generate the options reference rst doc."""
    goals = gen_tasks_options_reference_data()
    filtered_goals = []
    omit_impl_regexps = [re.compile(r) for r in self.get_options().omit_impl_re]
    for g in goals:
      if any(r.match(t['impl']) for r in omit_impl_regexps for t in g.tasks):
        continue
      filtered_goals.append(g)
    glopts = gen_glopts_reference_data()

    # generate the .rst file
    template = resource_string(__name__,
                               os.path.join(self._templates_dir, 'options_reference.mustache'))
    filename = os.path.join(self._outdir, 'options_reference.rst')
    self.context.log.info('Generating {}'.format(filename))
    with safe_open(filename, 'wb') as outfile:
      generator = Generator(template, goals=filtered_goals, glopts=glopts)
      generator.write(outfile)

    # generate the .html file
    template = resource_string(__name__,
                               os.path.join(self._templates_dir, 'oref_html.mustache'))
    filename = os.path.join(self._outdir, 'options_reference.html')
    self.context.log.info('Generating {}'.format(filename))
    with safe_open(filename, 'wb') as outfile:
      generator = Generator(template, goals=filtered_goals, glopts=glopts)
      generator.write(outfile)
예제 #3
0
  def symlink_cachepath(ivy_home, inpath, symlink_dir, outpath):
    """Symlinks all paths listed in inpath that are under ivy_home into symlink_dir.

    Preserves all other paths. Writes the resulting paths to outpath.
    Returns a map of path -> symlink to that path.
    """
    safe_mkdir(symlink_dir)
    with safe_open(inpath, 'r') as infile:
      paths = filter(None, infile.read().strip().split(os.pathsep))
    new_paths = []
    for path in paths:
      if not path.startswith(ivy_home):
        new_paths.append(path)
        continue
      symlink = os.path.join(symlink_dir, os.path.relpath(path, ivy_home))
      try:
        os.makedirs(os.path.dirname(symlink))
      except OSError as e:
        if e.errno != errno.EEXIST:
          raise
      # Note: The try blocks cannot be combined. It may be that the dir exists but the link doesn't.
      try:
        os.symlink(path, symlink)
      except OSError as e:
        # We don't delete and recreate the symlink, as this may break concurrently executing code.
        if e.errno != errno.EEXIST:
          raise
      new_paths.append(symlink)
    with safe_open(outpath, 'w') as outfile:
      outfile.write(':'.join(new_paths))
    symlink_map = dict(zip(paths, new_paths))
    return symlink_map
  def test_go_test_unstyle(self):
    with self.temporary_sourcedir() as srcdir:
      lib_unstyle_relpath = 'src/go/libUnstyle'
      lib_unstyle_dir = os.path.join(srcdir, lib_unstyle_relpath)
      with safe_open(os.path.join(lib_unstyle_dir, 'unstyle.go'), 'w') as fp:
        # NB: Go format violating indents below.
        fp.write(dedent("""
            package libUnstyle

            func Speak() {
              println("Hello from libUnstyle!")
              println("Bye from libUnstyle!")
            }

            func Add(a int, b int) int {
            return a + b
            }
            """).strip())
      with safe_open(os.path.join(lib_unstyle_dir, 'BUILD'), 'w') as fp:
        fp.write('go_library()')

      args = ['compile', 'lint', lib_unstyle_dir]
      pants_run = self.run_pants(args)
      self.assert_failure(pants_run)

      args = ['compile', 'lint', '--lint-go-skip', lib_unstyle_dir]
      pants_run = self.run_pants(args)
      self.assert_success(pants_run)
 def _create_invalid_jax_ws_project(self):
   with self.temporary_sourcedir() as srcdir:
     with safe_open(os.path.join(srcdir, 'src/jax_ws/test/InvalidService.wsdl'), 'w') as fp:
       fp.write(dedent("""
           <definitions xmlns:wsu="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd"
               xmlns:wsp="http://www.w3.org/ns/ws-policy"
               xmlns:wsp1_2="http://schemas.xmlsoap.org/ws/2004/09/policy"
               xmlns:wsam="http://www.w3.org/2007/05/addressing/metadata"
               xmlns:soap="http://schemas.xmlsoap.org/wsdl/soap/"
               xmlns:tns="http://example.com/"
               xmlns:xsd="http://www.w3.org/2001/XMLSchema"
               xmlns="http://schemas.xmlsoap.org/wsdl/"
               targetNamespace="http://example.com/"
               name="InvalidServerImplService">
             <types/>
             <service name="InvalidServerImplService">
               <port name="InvalidServerImplPort" binding="tns:InvalidServerImplPortBinding">
                 <soap:address location="http://127.0.0.1:9876/invalid"/>
               </port>
             </service>
           </definitions>
           """).strip())
     with safe_open(os.path.join(srcdir, 'src/jax_ws/test/BUILD'), 'w') as fp:
       fp.write(dedent("""
           jax_ws_library(name='invalid-service',
             sources = [
               'InvalidService.wsdl',
             ],
             dependencies = [],
           )
           """).strip())
     yield srcdir
  def known_commits(self):
    with temporary_dir(root_dir=get_buildroot()) as worktree:
      with safe_open(os.path.join(worktree, 'README'), 'w') as fp:
        fp.write('Just a test tree.')

      with initialize_repo(worktree=worktree, gitdir=os.path.join(worktree, '.git')) as git:
        src_file = os.path.join(worktree, 'src/java/org/pantsbuild/Class.java')
        with safe_open(src_file, 'w') as fp:
          fp.write(dedent("""
          package org.pantsbuild;

          class Class {
            static final int MEANING_OF_LIFE = 42;
          }
          """))

        src_build_file = os.path.join(worktree, 'src/java/org/pantsbuild/BUILD')
        with safe_open(src_build_file, 'w') as fp:
          fp.write("java_library(name='pantsbuild', sources=['Class.java'])")

        git.add(src_file, src_build_file)
        git.commit('Introduce Class.')

        test_file = os.path.join(worktree, 'tests/java/org/pantsbuild/ClassTest.java')
        with safe_open(test_file, 'w') as fp:
          fp.write(dedent("""
          package org.pantsbuild;

          import org.junit.Assert;
          import org.junit.Test;

          public class ClassTest {
            @Test public void test() {
              Assert.assertEquals(42, Class.MEANING_OF_LIFE);
            }
          }
          """))

        test_build_file = os.path.join(worktree, 'tests/java/org/pantsbuild/BUILD')
        with safe_open(test_build_file, 'w') as fp:
          fp.write(dedent("""
          jar_library(name='junit', jars=[jar('junit', 'junit', '4.12')])

          junit_tests(
            name='pantsbuild',
            sources=['ClassTest.java'],
            dependencies=[
              ':junit',
              '{}'
            ]
          )
          """).format(os.path.relpath(os.path.dirname(src_build_file), get_buildroot())))

        git.add(test_file, test_build_file)
        git.commit('Introduce ClassTest.')

        yield
  def _create_thrift_project(self):
    with self.temporary_sourcedir() as srcdir:
      with safe_open(os.path.join(srcdir, 'src/thrift/thrifttest/duck.thrift'), 'w') as fp:
        fp.write(dedent("""
            namespace go thrifttest.duck

            struct Duck {
              1: optional string quack,
            }

            service Feeder {
              void feed(1:Duck duck),
            }
            """).strip())
      with safe_open(os.path.join(srcdir, 'src/thrift/thrifttest/BUILD'), 'w') as fp:
        fp.write(dedent("""
            go_thrift_library(
              name='fleem',
              sources=['duck.thrift']
            )
            """).strip())

      with safe_open(os.path.join(srcdir, 'src/go/usethrift/example.go'), 'w') as fp:
        fp.write(dedent("""
            package usethrift

            import "thrifttest/duck"

            func whatevs(f duck.Feeder) string {
              d := duck.NewDuck()
              f.Feed(d)
              return d.GetQuack()
            }
            """).strip())
      with safe_open(os.path.join(srcdir, 'src/go/usethrift/BUILD'), 'w') as fp:
        fp.write(dedent("""
            go_library(
              dependencies=[
                '{srcdir}/src/thrift/thrifttest:fleem'
              ]
            )
            """.format(srcdir=os.path.relpath(srcdir, get_buildroot()))).strip())

      with safe_open(os.path.join(srcdir, '3rdparty/go/github.com/apache/thrift/BUILD'), 'w') as fp:
        fp.write("go_remote_library(rev='0.9.3', pkg='lib/go/thrift')")

      config = {
        'gen.go-thrift': {
          'thrift_import_target':
              os.path.join(os.path.relpath(srcdir, get_buildroot()),
                           '3rdparty/go/github.com/apache/thrift:lib/go/thrift'),
          'thrift_import': 'github.com/apache/thrift/lib/go/thrift'
        }
      }
      yield srcdir, config
예제 #8
0
  def setUp(self):
    self.origin = safe_mkdtemp()
    with pushd(self.origin):
      subprocess.check_call(['git', 'init', '--bare'])

    self.gitdir = safe_mkdtemp()
    self.worktree = safe_mkdtemp()

    self.readme_file = os.path.join(self.worktree, 'README')

    with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
      self.init_repo('depot', self.origin)

      touch(self.readme_file)
      subprocess.check_call(['git', 'add', 'README'])
      safe_mkdir(os.path.join(self.worktree, 'dir'))
      with open(os.path.join(self.worktree, 'dir', 'f'), 'w') as f:
        f.write("file in subdir")

      # Make some symlinks
      os.symlink('f', os.path.join(self.worktree, 'dir', 'relative-symlink'))
      os.symlink('no-such-file', os.path.join(self.worktree, 'dir', 'relative-nonexistent'))
      os.symlink('dir/f', os.path.join(self.worktree, 'dir', 'not-absolute\u2764'))
      os.symlink('../README', os.path.join(self.worktree, 'dir', 'relative-dotdot'))
      os.symlink('dir', os.path.join(self.worktree, 'link-to-dir'))
      os.symlink('README/f', os.path.join(self.worktree, 'not-a-dir'))
      os.symlink('loop1', os.path.join(self.worktree, 'loop2'))
      os.symlink('loop2', os.path.join(self.worktree, 'loop1'))

      subprocess.check_call(['git', 'add', 'README', 'dir', 'loop1', 'loop2',
                             'link-to-dir', 'not-a-dir'])
      subprocess.check_call(['git', 'commit', '-am', 'initial commit with decode -> \x81b'])
      self.initial_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
      subprocess.check_call(['git', 'tag', 'first'])
      subprocess.check_call(['git', 'push', '--tags', 'depot', 'master'])
      subprocess.check_call(['git', 'branch', '--set-upstream-to', 'depot/master'])

      with safe_open(self.readme_file, 'w') as readme:
        readme.write('Hello World.\u2764'.encode('utf-8'))
      subprocess.check_call(['git', 'commit', '-am', 'Update README.'])

      self.current_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()

    self.clone2 = safe_mkdtemp()
    with pushd(self.clone2):
      self.init_repo('origin', self.origin)
      subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])

      with safe_open(os.path.realpath('README'), 'a') as readme:
        readme.write('--')
      subprocess.check_call(['git', 'commit', '-am', 'Update README 2.'])
      subprocess.check_call(['git', 'push', '--tags', 'origin', 'master'])

    self.git = Git(gitdir=self.gitdir, worktree=self.worktree)
예제 #9
0
파일: ivy_utils.py 프로젝트: sebzz/pants
  def symlink_cachepath(cls, ivy_cache_dir, inpath, symlink_dir, outpath, existing_symlink_map):
    """Symlinks all paths listed in inpath that are under ivy_cache_dir into symlink_dir.

    If there is an existing symlink for a file under inpath, it is used rather than creating
    a new symlink. Preserves all other paths. Writes the resulting paths to outpath.
    Returns a map of path -> symlink to that path.
    """
    safe_mkdir(symlink_dir)
    # The ivy_cache_dir might itself be a symlink. In this case, ivy may return paths that
    # reference the realpath of the .jar file after it is resolved in the cache dir. To handle
    # this case, add both the symlink'ed path and the realpath to the jar to the symlink map.
    real_ivy_cache_dir = os.path.realpath(ivy_cache_dir)
    updated_symlink_map = OrderedDict()
    with safe_open(inpath, 'r') as infile:
      inpaths = filter(None, infile.read().strip().split(os.pathsep))
      paths = OrderedSet()
      for path in inpaths:
        paths.add(path)
        realpath = os.path.realpath(path)
        if path != realpath:
          paths.add(realpath)
          if realpath.startswith(real_ivy_cache_dir):
            paths.add(os.path.join(ivy_cache_dir, realpath[len(real_ivy_cache_dir)+1:]))

    for path in paths:
      if path.startswith(ivy_cache_dir):
        updated_symlink_map[path] = os.path.join(symlink_dir, os.path.relpath(path, ivy_cache_dir))
      elif path.startswith(real_ivy_cache_dir):
        updated_symlink_map[path] = os.path.join(symlink_dir, os.path.relpath(path, real_ivy_cache_dir))
      else:
        # This path is outside the cache. We won't symlink it.
        updated_symlink_map[path] = path

    # Create symlinks for paths in the ivy cache dir that we haven't seen before.
    new_symlinks = cls._find_new_symlinks(existing_symlink_map, updated_symlink_map)

    for path, symlink in new_symlinks.iteritems():
      if path == symlink:
        # Skip paths that aren't going to be symlinked.
        continue
      safe_mkdir(os.path.dirname(symlink))
      try:
        os.symlink(path, symlink)
      except OSError as e:
        # We don't delete and recreate the symlink, as this may break concurrently executing code.
        if e.errno != errno.EEXIST:
          raise

    # (re)create the classpath with all of the paths
    with safe_open(outpath, 'w') as outfile:
      outfile.write(':'.join(OrderedSet(updated_symlink_map.values())))

    return dict(updated_symlink_map)
예제 #10
0
  def post_fork_child(self, fingerprint, jvm_options, classpath, stdout, stderr):
    """Post-fork() child callback for ProcessManager.daemon_spawn()."""
    java = SubprocessExecutor(self._distribution)

    subproc = java.spawn(classpath=classpath,
                         main='com.martiansoftware.nailgun.NGServer',
                         jvm_options=jvm_options,
                         args=[':0'],
                         stdin=safe_open('/dev/null', 'r'),
                         stdout=safe_open(self._ng_stdout, 'w'),
                         stderr=safe_open(self._ng_stderr, 'w'),
                         close_fds=True)

    self.write_pid(subproc.pid)
예제 #11
0
  def assert_checker(self, relpath, contents, expected_code=0, expected_message=''):
    with temporary_dir() as td:
      with safe_open(os.path.join(td, relpath), 'w') as fp:
        fp.write(contents)

      args=['--root-dir={}'.format(td)]
      for plugin_type in checker.plugins():
        opts = {'skip': False, 'max_length': self._MAX_LENGTH, 'ignore': ['E111']}
        args.append('--{}-options={}'.format(plugin_type.name(), json.dumps(opts)))
      args.append(relpath)

      with open(os.path.join(td, 'stdout'), 'w+') as stdout:
        with open(os.path.join(td, 'stderr'), 'w+') as stderr:
          with stdio_as(stdout_fd=stdout.fileno(), stderr_fd=stderr.fileno(), stdin_fd=-1):
            with self.assertRaises(SystemExit) as error:
              checker.main(args=args)

          def read_stdio(fp):
            fp.flush()
            fp.seek(0)
            return fp.read()

          self.assertEqual(expected_code, error.exception.code,
                           'STDOUT:\n{}\nSTDERR:\n{}'.format(read_stdio(stdout),
                                                             read_stdio(stderr)))

          self.assertEqual(expected_message, read_stdio(stdout).strip())
          self.assertEqual('', read_stdio(stderr))
예제 #12
0
  def test_support_url_multi(self):
    """Tests to make sure existing base urls function as expected."""

    bootstrap_dir = '/tmp'

    with temporary_dir() as invalid_local_files, temporary_dir() as valid_local_files:
      binary_util = self._gen_binary_util(
        baseurls=[
          'BLATANTLY INVALID URL',
          'https://dl.bintray.com/pantsbuild/bin/reasonably-invalid-url',
          invalid_local_files,
          valid_local_files,
          'https://dl.bintray.com/pantsbuild/bin/another-invalid-url',
        ],
        bootstrap_dir=bootstrap_dir)

      binary_request = binary_util._make_deprecated_binary_request(
        supportdir='bin/protobuf',
        version='2.4.1',
        name='protoc')

      binary_path = binary_request.get_download_path(binary_util._host_platform())
      contents = b'proof'
      with safe_open(os.path.join(valid_local_files, binary_path), 'wb') as fp:
        fp.write(contents)

      binary_path_abs = os.path.join(bootstrap_dir, binary_path)

      self.assertEqual(os.path.realpath(binary_path_abs),
                       os.path.realpath(binary_util.select(binary_request)))

      self.assertEqual(contents, self._read_file(binary_path_abs))
예제 #13
0
  def execute(self):
    pages = []
    targets = self.context.targets()
    for target in targets:
      if isinstance(target, Page):
        for wiki_artifact in target.payload.provides:
          pages.append((target, wiki_artifact))

    urls = list()

    genmap = self.context.products.get('wiki_html')
    for page, wiki_artifact in pages:
      html_info = genmap.get((wiki_artifact, page))
      if len(html_info) > 1:
        raise TaskError('Unexpected resources for %s: %s' % (page, html_info))
      basedir, htmls = html_info.items()[0]
      if len(htmls) != 1:
        raise TaskError('Unexpected resources for %s: %s' % (page, htmls))
      with safe_open(os.path.join(basedir, htmls[0])) as contents:
        url = self.publish_page(
          page.address,
          wiki_artifact.config['space'],
          wiki_artifact.config['title'],
          contents.read(),
          # Default to none if not present in the hash.
          parent=wiki_artifact.config.get('parent')
        )
        if url:
          urls.append(url)
          self.context.log.info('Published %s to %s' % (page, url))

    if self.open and urls:
      binary_util.ui_open(*urls)
예제 #14
0
  def _await_nailgun_server(self, stdout, stderr):
    nailgun_timeout_seconds = 5
    max_socket_connect_attempts = 10
    nailgun = None
    port_parse_start = time.time()
    with safe_open(self._ng_out, 'r') as ng_out:
      while not nailgun:
        started = ng_out.readline()
        if started:
          port = self._parse_nailgun_port(started)
          nailgun = self._create_ngclient(port, stdout, stderr)
          log.debug('Detected ng server up on port %d' % port)
        elif time.time() - port_parse_start > nailgun_timeout_seconds:
          raise NailgunClient.NailgunError('Failed to read ng output after'
                                           ' %s seconds' % nailgun_timeout_seconds)

    attempt = 0
    while nailgun:
      sock = nailgun.try_connect()
      if sock:
        sock.close()
        endpoint = self._get_nailgun_endpoint()
        if endpoint:
          log.debug('Connected to ng server with fingerprint %s pid: %d @ port: %d' % endpoint)
        else:
          raise NailgunClient.NailgunError('Failed to connect to ng server.')
        return nailgun
      elif attempt > max_socket_connect_attempts:
        raise nailgun.NailgunError('Failed to connect to ng output after %d connect attempts'
                                   % max_socket_connect_attempts)
      attempt += 1
      log.debug('Failed to connect on attempt %d' % attempt)
      time.sleep(0.1)
예제 #15
0
 def commit_contents_to_files(content, *files):
   for path in files:
     with safe_open(os.path.join(self.worktree, path), 'w') as fp:
       fp.write(content)
   subprocess.check_call(['git', 'add', '.'])
   subprocess.check_call(['git', 'commit', '-m', 'change '+path])
   return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
예제 #16
0
 def test_code_syntax_error(self):
   with safe_open(self.fullpath('BUILD.badsyntax'), 'w') as fp:
     fp.write('java_library(name=if)')
   build_file = self.create_buildfile('BUILD.badsyntax')
   with self.assertRaises(SyntaxError) as e:
     build_file.code()
   self.assertEqual(build_file.full_path, e.exception.filename)
예제 #17
0
  def checkstyle(self, sources, targets):
    egroups = self.context.products.get_data('exclusives_groups')
    etag = egroups.get_group_key_for_target(targets[0])
    classpath = self.tool_classpath(self._checkstyle_bootstrap_key)
    cp = egroups.get_classpath_for_group(etag)
    classpath.extend(jar for conf, jar in cp if conf in self._confs)

    args = [
      '-c', self._configuration_file,
      '-f', 'plain'
    ]

    if self._properties:
      properties_file = os.path.join(self.workdir, 'checkstyle.properties')
      with safe_open(properties_file, 'w') as pf:
        for k, v in self._properties.items():
          pf.write('%s=%s\n' % (k, v))
      args.extend(['-p', properties_file])

    # We've hit known cases of checkstyle command lines being too long for the system so we guard
    # with Xargs since checkstyle does not accept, for example, @argfile style arguments.
    def call(xargs):
      return self.runjava(classpath=classpath, main=CHECKSTYLE_MAIN,
                          args=args + xargs, workunit_name='checkstyle')
    checks = Xargs(call)

    return checks.execute(sources)
예제 #18
0
  def checkstyle(self, targets, sources):
    runtime_classpaths = self.context.products.get_data('runtime_classpath')
    union_classpath = OrderedSet(self.tool_classpath('checkstyle'))
    for target in targets:
      runtime_classpath = runtime_classpaths.get_for_targets(target.closure(bfs=True))
      union_classpath.update(jar for conf, jar in runtime_classpath
                             if conf in self.get_options().confs)

    args = [
      '-c', self.get_options().configuration,
      '-f', 'plain'
    ]

    if self.get_options().properties:
      properties_file = os.path.join(self.workdir, 'checkstyle.properties')
      with safe_open(properties_file, 'w') as pf:
        for k, v in self.get_options().properties.items():
          pf.write('{key}={value}\n'.format(key=k, value=v))
      args.extend(['-p', properties_file])

    # We've hit known cases of checkstyle command lines being too long for the system so we guard
    # with Xargs since checkstyle does not accept, for example, @argfile style arguments.
    def call(xargs):
      return self.runjava(classpath=union_classpath, main=self._CHECKSTYLE_MAIN,
                          jvm_options=self.get_options().jvm_options,
                          args=args + xargs, workunit_name='checkstyle')
    checks = Xargs(call)

    return checks.execute(sources)
예제 #19
0
  def _await_socket(self, timeout):
    """Blocks for the nailgun subprocess to bind and emit a listening port in the nailgun stdout."""
    with safe_open(self._ng_stdout, 'r') as ng_stdout:
      start_time = time.time()
      accumulated_stdout = ''
      while 1:
        readable, _, _ = select.select([ng_stdout], [], [], self._SELECT_WAIT)
        if readable:
          line = ng_stdout.readline()                          # TODO: address deadlock risk here.
          try:
            return self._NG_PORT_REGEX.match(line).group(1)
          except AttributeError:
            pass
          accumulated_stdout += line

        if (time.time() - start_time) > timeout:
          stderr = read_file(self._ng_stderr)
          raise NailgunClient.NailgunError(
            'Failed to read nailgun output after {sec} seconds!\n'
            'Stdout:\n{stdout}\nStderr:\n{stderr}'.format(
              sec=timeout,
              stdout=accumulated_stdout,
              stderr=stderr,
            )
          )
예제 #20
0
  def test_parse_test_info_invalid_dir(self):
    with temporary_dir() as xml_dir:
      with safe_open(os.path.join(xml_dir, 'subdir', 'TEST-c.xml'), 'w') as fp:
        fp.write('<invalid></xml>')

      tests_info = self.parse_test_info(xml_dir, self._raise_handler)
      self.assertEqual({}, tests_info)
  def _emit_targets(cls, workdir):
    with safe_open(os.path.join(workdir, 'BUILD'), 'w') as f:
      f.write(dedent("""
      remote_sources(name='proto-7',
        dest=java_protobuf_library,
        sources_target=':external-source',
        args=dict(
          platform='java7',
        ),
      )

      remote_sources(name='proto-8',
        dest=java_protobuf_library,
        sources_target=':external-source',
        args=dict(
          platform='java8',
        ),
      )

      unpacked_jars(name='external-source',
        libraries=[':external-source-jars'],
        include_patterns=[
          'com/squareup/testing/**/*.proto',
        ],
      )

      jar_library(name='external-source-jars',
        jars=[
          jar(org='com.squareup.testing.protolib', name='protolib-external-test', rev='0.0.2'),
        ],
      )
      """))
    return ['{}:proto-{}'.format(os.path.relpath(workdir, get_buildroot()), num) for num in (7, 8)]
예제 #22
0
  def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None,
                             **kwargs):

    args = ['--no-pantsrc',
            '--pants-workdir=' + workdir,
            '--kill-nailguns',
            '--print-exception-stacktrace']

    if config:
      config_data = config.copy()
      ini = ConfigParser.ConfigParser(defaults=config_data.pop('DEFAULT', None))
      for section, section_config in config_data.items():
        ini.add_section(section)
        for key, value in section_config.items():
          ini.set(section, key, value)
      ini_file_name = os.path.join(workdir, 'pants.ini')
      with safe_open(ini_file_name, mode='w') as fp:
        ini.write(fp)
      args.append('--config-override=' + ini_file_name)

    pants_script = os.path.join(get_buildroot(), self.PANTS_SCRIPT_NAME)
    pants_command = [pants_script] + args + command

    env = os.environ.copy()
    env.update(extra_env or {})

    proc = subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
                            stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
    (stdout_data, stderr_data) = proc.communicate(stdin_data)

    return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
                       stderr_data.decode("utf-8"), workdir)
예제 #23
0
  def java_home_exe(self):
    with distribution(executables=EXE('bin/java', version='1')) as jdk1_home:
      with distribution(executables=EXE('bin/java', version='2')) as jdk2_home:
        with temporary_dir() as tmpdir:
          osx_java_home_exe = os.path.join(tmpdir, 'java_home')
          with safe_open(osx_java_home_exe, 'w') as fp:
            fp.write(textwrap.dedent("""
                #!/bin/sh
                echo '<?xml version="1.0" encoding="UTF-8"?>
                <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
                                       "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
                <plist version="1.0">
                <array>
                  <dict>
                    <key>JVMHomePath</key>
                    <string>{jdk1_home}</string>
                  </dict>
                  <dict>
                    <key>JVMHomePath</key>
                    <string>{jdk2_home}</string>
                  </dict>
                </array>
                </plist>
                '
              """.format(jdk1_home=jdk1_home, jdk2_home=jdk2_home)).strip())
          chmod_plus_x(osx_java_home_exe)

          original_osx_java_home_exe = DistributionLocator._OSX_JAVA_HOME_EXE
          DistributionLocator._OSX_JAVA_HOME_EXE = osx_java_home_exe
          try:
            yield jdk1_home, jdk2_home
          finally:
            DistributionLocator._OSX_JAVA_HOME_EXE = original_osx_java_home_exe
예제 #24
0
  def select_binary(self, name):
    """Selects a binary matching the current os and architecture.

    :param name: the name of the binary to fetch.
    :raises: :class:`pants.binary_util.BinaryUtil.BinaryNotFound` if no binary of the given version
      and name could be found.
    """
    # TODO(John Sirois): finish doc of the path structure expected under base_path
    binary_path = BinaryUtil.select_binary_base_path(self._supportdir, self._version, name)
    bootstrap_dir = os.path.realpath(os.path.expanduser(self._pants_bootstrapdir))
    bootstrapped_binary_path = os.path.join(bootstrap_dir, binary_path)
    if not os.path.exists(bootstrapped_binary_path):
      downloadpath = bootstrapped_binary_path + '~'
      try:
        with self.select_binary_stream(name) as stream:
          with safe_open(downloadpath, 'wb') as bootstrapped_binary:
            bootstrapped_binary.write(stream())
          os.rename(downloadpath, bootstrapped_binary_path)
          chmod_plus_x(bootstrapped_binary_path)
      finally:
        safe_delete(downloadpath)

    logger.debug('Selected {binary} binary bootstrapped to: {path}'
                 .format(binary=name, path=bootstrapped_binary_path))
    return bootstrapped_binary_path
예제 #25
0
  def _await_socket(self, timeout):
    """Blocks for the nailgun subprocess to bind and emit a listening port in the nailgun stdout."""
    with safe_open(self._ng_stdout, 'r') as ng_stdout:
      start_time = time.time()
      accumulated_stdout = ''
      while 1:
        # TODO: share the decreasing timeout logic here with NailgunProtocol.iter_chunks() by adding
        # a method to pants.util.contextutil!
        remaining_time = time.time() - (start_time + timeout)
        if remaining_time > 0:
          stderr = read_file(self._ng_stderr, binary_mode=True)
          raise self.InitialNailgunConnectTimedOut(
            timeout=timeout,
            stdout=accumulated_stdout,
            stderr=stderr,
          )

        readable, _, _ = select.select([ng_stdout], [], [], (-1 * remaining_time))
        if readable:
          line = ng_stdout.readline()                          # TODO: address deadlock risk here.
          try:
            return self._NG_PORT_REGEX.match(line).group(1)
          except AttributeError:
            pass
          accumulated_stdout += line
예제 #26
0
  def _download_zip(self, zip_url, dest_dir):
    """Downloads a zip file at the given URL into the given directory.

    :param str zip_url: Full URL pointing to zip file.
    :param str dest_dir: Absolute path of directory into which the unzipped contents
                         will be placed into, not including the zip directory itself.
    """
    # TODO(jsirois): Wrap with workunits, progress meters, checksums.
    self.context.log.info('Downloading {}...'.format(zip_url))
    sess = requests.session()
    sess.mount('file://', self.LocalFileAdapter())
    res = sess.get(zip_url)
    if not res.status_code == requests.codes.ok:
      raise TaskError('Failed to download {} ({} error)'.format(zip_url, res.status_code))

    with open_zip(BytesIO(res.content)) as zfile:
      safe_mkdir(dest_dir)
      for info in zfile.infolist():
        if info.filename.endswith('/'):
          # Skip directories.
          continue
        # Strip zip directory name from files.
        filename = os.path.relpath(info.filename, get_basedir(info.filename))
        f = safe_open(os.path.join(dest_dir, filename), 'w')
        f.write(zfile.read(info))
        f.close()
예제 #27
0
  def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None, **kwargs):
    config = config.copy() if config else {}

    # We add workdir to the DEFAULT section, and also ensure that it's emitted first.
    default_section = config.pop('DEFAULT', {})
    default_section['pants_workdir'] = '%s' % workdir

    ini = ''
    for section, section_config in [('DEFAULT', default_section)] + config.items():
      ini += '\n[%s]\n' % section
      for key, val in section_config.items():
        ini += '%s: %s\n' % (key, val)

    ini_file_name = os.path.join(workdir, 'pants.ini')
    with safe_open(ini_file_name, mode='w') as fp:
      fp.write(ini)
    env = os.environ.copy()
    env['PANTS_CONFIG_OVERRIDE'] = ini_file_name
    env.update(extra_env or {})

    pants_command = ([os.path.join(get_buildroot(), self.PANTS_SCRIPT_NAME)] + command +
                     ['--no-lock', '--kill-nailguns'])

    proc = subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
                            stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
    (stdout_data, stderr_data) = proc.communicate(stdin_data)
    return PantsResult(pants_command, proc.returncode, stdout_data, stderr_data)
예제 #28
0
  def write(self, target, path, confs=None, extra_confs=None):
    # TODO(John Sirois): a dict is used here to de-dup codegen targets which have both the original
    # codegen target - say java_thrift_library - and the synthetic generated target (java_library)
    # Consider reworking codegen tasks to add removal of the original codegen targets when rewriting
    # the graph
    dependencies = OrderedDict()
    internal_codegen = {}
    configurations = set(confs or [])
    for dep in target_internal_dependencies(target):
      jar = self._as_versioned_jar(dep)
      dependencies[(jar.org, jar.name)] = self.internaldep(jar, dep)
      if dep.is_codegen:
        internal_codegen[jar.name] = jar.name
    for jar in target.jar_dependencies:
      if jar.rev:
        dependencies[(jar.org, jar.name)] = self.jardep(jar)
        configurations |= set(jar._configurations)

    target_jar = self.internaldep(self._as_versioned_jar(target),
                                  configurations=list(configurations))
    target_jar = target_jar.extend(dependencies=dependencies.values())

    template_kwargs = self.templateargs(target_jar, confs, extra_confs)
    with safe_open(path, 'w') as output:
      template = pkgutil.get_data(self.template_package_name, self.template_relpath)
      Generator(template, **template_kwargs).write(output)
  def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None,
                             **kwargs):
    config = config.copy() if config else {}

    # We add workdir to the DEFAULT section, and also ensure that it's emitted first.
    default_section = config.pop('DEFAULT', {})
    default_section['pants_workdir'] = '%s' % workdir

    ini = ''
    for section, section_config in [('DEFAULT', default_section)] + config.items():
      ini += '\n[%s]\n' % section
      for key, val in section_config.items():
        ini += '%s: %s\n' % (key, val)

    ini_file_name = os.path.join(workdir, 'pants.ini')
    with safe_open(ini_file_name, mode='w') as fp:
      fp.write(ini)
    env = os.environ.copy()
    env.update(extra_env or {})

    pants_script = os.path.join(get_buildroot(), self.PANTS_SCRIPT_NAME)
    pants_command = [pants_script,
                     '--kill-nailguns',
                     '--no-pantsrc',
                     '--config-override={0}'.format(ini_file_name),
                     '--print-exception-stacktrace'] + command

    proc = subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
                            stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
    (stdout_data, stderr_data) = proc.communicate(stdin_data)
    return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
                       stderr_data.decode("utf-8"))
예제 #30
0
파일: go_buildgen.py 프로젝트: ttim/pants
    def generate_build_files(self, targets):
        goal_name = self.options_scope
        flags = "--materialize"
        if self.get_options().remote:
            flags += " --remote"
        template_header = dedent(
            """\
      # Auto-generated by pants!
      # To re-generate run: `pants {goal_name} {flags}`

      """
        ).format(goal_name=goal_name, flags=flags)
        template_text = template_header + self.get_options().template
        build_file_basename = "BUILD" + self.get_options().extension

        targets_by_spec_path = defaultdict(set)
        for target in targets:
            targets_by_spec_path[target.address.spec_path].add(target)

        for spec_path, targets in targets_by_spec_path.items():
            rel_path = os.path.join(spec_path, build_file_basename)
            result = self._create_template_data(rel_path, list(targets))
            if result:
                generator = Generator(template_text, target=result.data)
                build_file_path = os.path.join(get_buildroot(), rel_path)
                with safe_open(build_file_path, mode="w") as fp:
                    generator.write(stream=fp)
                yield result
예제 #31
0
    def test_pantsd_invalidation_pants_toml_file(self):
        # Test tmp_pants_toml (--pants-config-files=$tmp_pants_toml)'s removal
        tmp_pants_toml = os.path.abspath("testprojects/test_pants.toml")

        # Create tmp_pants_toml file
        with safe_open(tmp_pants_toml, "w") as f:
            f.write("[DEFAULT]\n")

        with self.pantsd_successful_run_context() as ctx:
            ctx.runner([f"--pants-config-files={tmp_pants_toml}", "help"])
            ctx.checker.assert_started()
            time.sleep(5)

            # Delete tmp_pants_toml
            os.unlink(tmp_pants_toml)
            time.sleep(10)
            ctx.checker.assert_stopped()
예제 #32
0
  def test_pantsd_invalidation_pants_ini_file(self):
    # Test tmp_pants_ini (--pants-config-files=$tmp_pants_ini)'s removal
    tmp_pants_ini = os.path.abspath("testprojects/test_pants.ini")

    # Create tmp_pants_ini file
    with safe_open(tmp_pants_ini, 'w') as f:
      f.write("[DEFAULT]\n")

    with self.pantsd_successful_run_context() as (pantsd_run, checker, _, _):
      pantsd_run(['--pants-config-files={}'.format(tmp_pants_ini), 'help'])
      checker.assert_started()
      time.sleep(5)

      # Delete tmp_pants_ini
      os.unlink(tmp_pants_ini)
      time.sleep(10)
      checker.assert_stopped()
예제 #33
0
  def generate_ivy(self, jar, version, publications):
    template_relpath = os.path.join(_TEMPLATES_RELPATH, 'ivy.mustache')
    template_text = pkgutil.get_data(__name__, template_relpath)

    pubs = [TemplateData(name=None if p.name == jar.name else p.name,
                         classifier=p.classifier,
                         ext=None if p.ext == 'jar' else p.ext) for p in publications]

    generator = Generator(template_text,
                          org=jar.org,
                          name=jar.name,
                          rev=version,
                          publications=pubs)

    with safe_open(os.path.join(self.workdir, 'ivy.xml'), 'w') as ivyxml:
      generator.write(ivyxml)
      return ivyxml.name
예제 #34
0
    def stage_artifact(tgt, jar, version, tag, changelog, confs=None, artifact_ext='',
                       extra_confs=None, classifier=''):
      def path(name=None, suffix='', extension='jar'):
        return self.artifact_path(jar, version, name=name, suffix=suffix, extension=extension,
                                  artifact_ext=artifact_ext)

      if self.publish_changelog:
        with safe_open(path(suffix='-CHANGELOG', extension='txt'), 'wb') as changelog_file:
          changelog_file.write(changelog.encode('utf-8'))
      ivyxml = path(name='ivy', extension='xml')

      IvyWriter(get_pushdb).write(tgt, ivyxml, confs=confs, extra_confs=extra_confs,
                                  classifier=classifier)
      PomWriter(get_pushdb, tag).write(tgt, path(extension='pom'), extra_confs=extra_confs,
                                       classifier=classifier)

      return ivyxml
예제 #35
0
  def test_no_address_no_family(self):
    with self.assertRaises(ResolveError):
      self.address_mapper.resolve(Address.parse('a/c'))

    # Errors are not cached.
    with self.assertRaises(ResolveError):
      self.address_mapper.resolve(Address.parse('a/c'))

    build_file = os.path.join(self.build_root, 'a/c/c.BUILD.json')
    with safe_open(build_file, 'w') as fp:
      fp.write('{"type_alias": "configuration", "name": "c"}')

    resolved = self.address_mapper.resolve(Address.parse('a/c'))
    self.assertEqual(Configuration(name='c'), resolved)

    # But success is cached.
    self.assertIs(resolved, self.address_mapper.resolve(Address.parse('a/c')))
예제 #36
0
    def create_file(self,
                    relpath: str,
                    contents: str = "",
                    mode: str = "w") -> str:
        """Writes to a file under the buildroot.

        :API: public

        relpath:  The relative path to the file from the build root.
        contents: A string containing the contents of the file - '' by default..
        mode:     The mode to write to the file in - over-write by default.
        """
        path = os.path.join(self.build_root, relpath)
        with safe_open(path, mode=mode) as fp:
            fp.write(contents)
        self._invalidate_for(relpath)
        return path
예제 #37
0
  def _fetch_binary(self, name, binary_path):
    bootstrap_dir = os.path.realpath(os.path.expanduser(self._pants_bootstrapdir))
    bootstrapped_binary_path = os.path.join(bootstrap_dir, binary_path)
    if not os.path.exists(bootstrapped_binary_path):
      downloadpath = bootstrapped_binary_path + '~'
      try:
        with self._select_binary_stream(name, binary_path) as stream:
          with safe_open(downloadpath, 'wb') as bootstrapped_binary:
            bootstrapped_binary.write(stream())
          os.rename(downloadpath, bootstrapped_binary_path)
          chmod_plus_x(bootstrapped_binary_path)
      finally:
        safe_delete(downloadpath)

    logger.debug('Selected {binary} binary bootstrapped to: {path}'
                 .format(binary=name, path=bootstrapped_binary_path))
    return bootstrapped_binary_path
예제 #38
0
 def tree(self) -> Iterator[Tuple[str, str]]:
     # root/
     #   a/
     #     b/
     #       1
     #       2
     #     2 -> root/a/b/2
     #   b -> root/a/b
     with temporary_dir() as root:
         with safe_open(os.path.join(root, "a", "b", "1"), "wb") as fp:
             fp.write(b"1")
         touch(os.path.join(root, "a", "b", "2"))
         os.symlink(os.path.join(root, "a", "b", "2"),
                    os.path.join(root, "a", "2"))
         os.symlink(os.path.join(root, "a", "b"), os.path.join(root, "b"))
         with temporary_dir() as dst:
             yield root, dst
예제 #39
0
  def report(self, targets, tests, tests_failed_exception=None):
    if tests_failed_exception:
      self._context.log.warn('Test failed: {0}'.format(str(tests_failed_exception)))
      if self._coverage_force:
        self._context.log.warn('Generating report even though tests failed')
      else:
        return
    args = [
      'report',
      '-in', self._coverage_metadata_file,
      '-in', self._coverage_file,
      '-exit'
      ]
    source_bases = set()

    def collect_source_base(target):
      if self.is_coverage_target(target):
        source_bases.add(target.target_base)
    for target in self._test_target_candidates(targets):
      target.walk(collect_source_base)
    for source_base in source_bases:
      args.extend(['-sp', source_base])

    sorting = ['-Dreport.sort', '+name,+class,+method,+block']
    args.extend(['-r', 'txt',
                 '-Dreport.txt.out.file={0}'.format(self._coverage_console_file)] + sorting)
    args.extend(['-r', 'xml', '-Dreport.xml.out.file={0}'.format(self._coverage_xml_file)])
    args.extend(['-r', 'html',
                 '-Dreport.html.out.file={0}'.format(self._coverage_html_file),
                 '-Dreport.out.encoding=UTF-8'] + sorting)

    main = 'emma'
    result = execute_java(classpath=self._emma_classpath,
                          main=main,
                          jvm_options=self._coverage_jvm_options,
                          args=args,
                          workunit_factory=self._context.new_workunit,
                          workunit_name='emma-report')
    if result != 0:
      raise TaskError("java {0} ... exited non-zero ({1})"
                      " 'failed to generate code coverage reports'".format(main, result))

    with safe_open(self._coverage_console_file) as console_report:
      sys.stdout.write(console_report.read())
    if self._coverage_open:
      binary_util.ui_open(self._coverage_html_file)
예제 #40
0
    def test_no_address_no_family(self):
        spec = SingleAddress('a/c', None)
        # Should fail: does not exist.
        with self.assertRaises(ResolveError):
            self.resolve(spec)

        # Exists on disk, but not yet in memory.
        # NB: Graph invalidation not yet implemented.
        build_file = os.path.join(self.build_root, 'a/c/c.BUILD.json')
        with safe_open(build_file, 'w') as fp:
            fp.write('{"type_alias": "struct", "name": "c"}')
        with self.assertRaises(ResolveError):
            self.resolve(spec)

        # Success.
        self.scheduler.product_graph.clear()
        self.assertEqual(Struct(name='c'), self.resolve(spec).struct)
예제 #41
0
  def test_invalidate_build_file_changed(self):
    with self.assertRaises(ResolveError):
      self.address_mapper.resolve(Address.parse('a/b:c'))

    build_file = os.path.join(self.build_root, 'a/b/b.BUILD.json')
    with safe_open(build_file, 'w+') as fp:
      fp.write('{"type_alias": "configuration", "name": "c"}')

    with self.assertRaises(ResolveError):
      self.address_mapper.resolve(Address.parse('a/b:c'))

    self.address_mapper.invalidate_build_file('a/b/b.BUILD.json')
    resolved = self.address_mapper.resolve(Address.parse('a/b:c'))
    self.assertEqual(Configuration(name='c'), resolved)

    # But success is cached.
    self.assertIs(resolved, self.address_mapper.resolve(Address.parse('a/b:c')))
예제 #42
0
    def prepare_resources(self, target, chroot):
        for service, impls in target.services.items():
            if impls:
                service_provider_configuration_file = os.path.join(
                    chroot, self.service_info_path(service))
                # NB: provider configuration files must be UTF-8 encoded, see the mini-spec:
                # https://docs.oracle.com/javase/6/docs/api/java/util/ServiceLoader.html
                with safe_open(service_provider_configuration_file,
                               'wb') as fp:

                    def write_line(line):
                        fp.write((line + '\n').encode('utf-8'))

                    write_line('# Generated from pants target {}'.format(
                        target.address.spec))
                    for impl in impls:
                        write_line(impl)
예제 #43
0
 def _log_exception(self, msg):
     if self._workdir:
         try:
             output_path = os.path.join(self._workdir, 'logs',
                                        'exceptions.log')
             with safe_open(output_path, 'a') as exception_log:
                 exception_log.write('timestamp: {}\n'.format(
                     datetime.datetime.now().isoformat()))
                 exception_log.write('args: {}\n'.format(sys.argv))
                 exception_log.write('pid: {}\n'.format(os.getpid()))
                 exception_log.write(msg)
                 exception_log.write('\n')
         except Exception as e:
             # This is all error recovery logic so we catch all exceptions from the logic above because
             # we don't want to hide the original error.
             logger.error(
                 'Problem logging original exception: {}'.format(e))
예제 #44
0
 def tree(self) -> Iterator[Tuple[str, str]]:
     # root/
     #   a/
     #     b/
     #       1
     #       2
     #     2 -> root/a/b/2
     #   b -> root/a/b
     with temporary_dir() as root:
         with safe_open(os.path.join(root, 'a', 'b', '1'), 'wb') as fp:
             fp.write(b'1')
         touch(os.path.join(root, 'a', 'b', '2'))
         os.symlink(os.path.join(root, 'a', 'b', '2'),
                    os.path.join(root, 'a', '2'))
         os.symlink(os.path.join(root, 'a', 'b'), os.path.join(root, 'b'))
         with temporary_dir() as dst:
             yield root, dst
예제 #45
0
    def _await_nailgun_server(self, stdout, stderr):
        nailgun_timeout_seconds = 5
        max_socket_connect_attempts = 10
        nailgun = None
        port_parse_start = time.time()
        with safe_open(self._ng_out, 'r') as ng_out:
            while not nailgun:
                started = ng_out.readline()
                if started.find(
                        'Listening for transport dt_socket at address:') >= 0:
                    nailgun_timeout_seconds = 60
                    log.warn(
                        'Timeout extended to {timeout} seconds for debugger to attach to ng server.'
                        .format(timeout=nailgun_timeout_seconds))
                    started = ng_out.readline()
                if started:
                    port = self._parse_nailgun_port(started)
                    nailgun = self._create_ngclient(port, stdout, stderr)
                    log.debug('Detected ng server up on port %d' % port)
                elif time.time() - port_parse_start > nailgun_timeout_seconds:
                    raise NailgunClient.NailgunError(
                        'Failed to read ng output after'
                        ' %s seconds' % nailgun_timeout_seconds)

        attempt = 0
        while nailgun:
            sock = nailgun.try_connect()
            if sock:
                sock.close()
                endpoint = self._get_nailgun_endpoint()
                if endpoint:
                    log.debug(
                        'Connected to ng server launched with %s fingerprint %s pid: %d @ port: %d'
                        % endpoint)
                else:
                    raise NailgunClient.NailgunError(
                        'Failed to connect to ng server.')
                return nailgun
            elif attempt > max_socket_connect_attempts:
                raise nailgun.NailgunError(
                    'Failed to connect to ng output after %d connect attempts'
                    % max_socket_connect_attempts)
            attempt += 1
            log.debug('Failed to connect on attempt %d' % attempt)
            time.sleep(0.1)
예제 #46
0
def test_vcs_versioning(tag_regex, tag, expected_version, tmp_path: Path,
                        rule_runner: RuleRunner) -> None:
    worktree = tmp_path / "worktree"
    gitdir = worktree / ".git"
    with safe_open(worktree / "README", "w") as f:
        f.write("dummy content")
    tag_regex_field = "" if tag_regex is None else f"tag_regex='{tag_regex}',"
    rule_runner.write_files({
        "src/python/BUILD":
        textwrap.dedent(f"""
        vcs_version(
            name="version",
            {tag_regex_field}
            generate_to="src/python/_version.py",
            template ='version = "{{version}}"'
        )
        """)
    })
    address = Address("src/python", target_name="version")
    tgt = rule_runner.get_target(address)

    with environment_as(
            GIT_DIR=str(gitdir),
            GIT_WORK_TREE=str(worktree),
            GIT_CONFIG_GLOBAL="/dev/null",
    ):
        subprocess.check_call(["git", "init"])
        subprocess.check_call(
            ["git", "config", "user.email", "*****@*****.**"])
        subprocess.check_call(["git", "config", "user.name", "Your Name"])
        subprocess.check_call(["git", "add", "."])
        subprocess.check_call(["git", "commit", "-am", "Add project files."])
        subprocess.check_call(["git", "tag", tag])

        generated_sources = rule_runner.request(
            GeneratedSources,
            [GeneratePythonFromSetuptoolsSCMRequest(EMPTY_SNAPSHOT, tgt)])
        assert generated_sources.snapshot.files == ("src/python/_version.py", )
        dc = rule_runner.request(DigestContents,
                                 [generated_sources.snapshot.digest])
        assert len(dc) == 1
        fc = dc[0]
        assert fc.path == "src/python/_version.py"
        assert fc.content.decode() == f'version = "{expected_version}"'
        assert fc.is_executable is False
    def _emit_targets(cls, workdir):
        with safe_open(os.path.join(workdir, 'BUILD'), 'w') as f:
            f.write(
                dedent("""
      remote_sources(name='proto-7',
        dest=java_protobuf_library,
        sources_target=':external-source',
        args=dict(
          platform='java7',
        ),
      )

      remote_sources(name='proto-8',
        dest=java_protobuf_library,
        sources_target=':external-source',
        args=dict(
          platform='java8',
        ),
        dependencies=[
          ':proto-sources',
        ],
      )

      remote_sources(name='proto-sources',
        dest=resources,
        sources_target=':external-source',
      )

      unpacked_jars(name='external-source',
        libraries=[':external-source-jars'],
        include_patterns=[
          'com/squareup/testing/**/*.proto',
        ],
      )

      jar_library(name='external-source-jars',
        jars=[
          jar(org='com.squareup.testing.protolib', name='protolib-external-test', rev='0.0.2'),
        ],
      )
      """))
        return [
            '{}:proto-{}'.format(os.path.relpath(workdir, get_buildroot()),
                                 num) for num in (7, 8)
        ]
예제 #48
0
  def test_parse_failed_targets_nominal(self):
    registry = RegistryOfTests({JUnitTest('org.pantsbuild.Failure'): 'Bob',
                                JUnitTest('org.pantsbuild.Error'): 'Jane',
                                JUnitTest('org.pantsbuild.AnotherError'): 'Bob',
                                JUnitTest('org.pantsbuild.subpackage.AnotherFailure'): 'Mary'})

    with temporary_dir() as junit_xml_dir:
      with open(os.path.join(junit_xml_dir, 'TEST-a.xml'), 'w') as fp:
        fp.write("""
        <testsuite failures="1" errors="1">
          <testcase classname="org.pantsbuild.Green" name="testOK"/>
          <testcase classname="org.pantsbuild.Failure" name="testFailure">
            <failure/>
          </testcase>
          <testcase classname="org.pantsbuild.Error" name="testError">
            <error/>
          </testcase>
        </testsuite>
        """)
      with open(os.path.join(junit_xml_dir, 'TEST-b.xml'), 'w') as fp:
        fp.write("""
        <testsuite failures="0" errors="1">
          <testcase classname="org.pantsbuild.AnotherError" name="testAnotherError">
            <error/>
          </testcase>
        </testsuite>
        """)
      with open(os.path.join(junit_xml_dir, 'random.xml'), 'w') as fp:
        fp.write('<invalid></xml>')
      with safe_open(os.path.join(junit_xml_dir, 'subdir', 'TEST-c.xml'), 'w') as fp:
        fp.write("""
        <testsuite failures="1" errors="0">
          <testcase classname="org.pantsbuild.subpackage.AnotherFailure" name="testAnotherFailue">
            <failure/>
          </testcase>
        </testsuite>
        """)

      failed_targets = parse_failed_targets(registry, junit_xml_dir, self._raise_handler)
      self.assertEqual({'Bob': {JUnitTest('org.pantsbuild.Failure', 'testFailure'),
                                JUnitTest('org.pantsbuild.AnotherError', 'testAnotherError')},
                        'Jane': {JUnitTest('org.pantsbuild.Error', 'testError')},
                        'Mary': {JUnitTest('org.pantsbuild.subpackage.AnotherFailure',
                                           'testAnotherFailue')}},
                       failed_targets)
예제 #49
0
    def run_pants_with_workdir(self,
                               command,
                               workdir,
                               config=None,
                               stdin_data=None,
                               extra_env=None,
                               **kwargs):
        config = config.copy() if config else {}

        # We add workdir to the DEFAULT section, and also ensure that it's emitted first.
        default_section = config.pop('DEFAULT', {})
        default_section['pants_workdir'] = '%s' % workdir

        ini = ''
        for section, section_config in [('DEFAULT', default_section)
                                        ] + config.items():
            ini += '\n[%s]\n' % section
            for key, val in section_config.items():
                ini += '%s: %s\n' % (key, val)

        ini_file_name = os.path.join(workdir, 'pants.ini')
        with safe_open(ini_file_name, mode='w') as fp:
            fp.write(ini)
        env = os.environ.copy()
        env.update(extra_env or {})

        # TODO: We can replace the env var with a '--config-override={0}'.format(ini_file_name) arg,
        # once we're rid of the special-case handling of this env var in pants_exe.
        env['PANTS_CONFIG_OVERRIDE'] = ini_file_name

        pants_script = os.path.join(get_buildroot(), self.PANTS_SCRIPT_NAME)
        pants_command = [
            pants_script, '--no-lock', '--kill-nailguns', '--no-pantsrc',
            '--print-exception-stacktrace'
        ] + command

        proc = subprocess.Popen(pants_command,
                                env=env,
                                stdin=subprocess.PIPE,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                **kwargs)
        (stdout_data, stderr_data) = proc.communicate(stdin_data)
        return PantsResult(pants_command, proc.returncode, stdout_data,
                           stderr_data)
예제 #50
0
 def unpacked_aar_library(self,
                          location,
                          manifest=True,
                          classes_jar=True,
                          resources=True,
                          filenames=None):
     """Mock the contents of an aar file, with optional components and additional files."""
     if manifest:
         manifest_file = os.path.join(location, 'AndroidManifest.xml')
         touch(manifest_file)
         with safe_open(manifest_file, 'w') as fp:
             fp.write(self.android_manifest())
             fp.close()
     if classes_jar:
         self.create_jarfile(location, filenames=filenames)
     if resources:
         safe_mkdir(os.path.join(location, 'res'))
     return location
예제 #51
0
 def _emit_targets(cls, workdir):
     prep_command_path = os.path.join(
         workdir, 'src/java/org/pantsbuild/prepcommand')
     with safe_open(os.path.join(prep_command_path, 'BUILD'), 'w') as fp:
         for name, touch_target in cls._SENTINELS.items():
             fp.write(
                 dedent("""
       prep_command(
         name='{name}',
         goals=['{goal}'],
         prep_executable='touch',
         prep_args=['{tmpdir}/{touch_target}'],
       )
     """.format(name=name,
                goal=name,
                tmpdir=workdir,
                touch_target=touch_target)))
     return [f'{prep_command_path}:{name}' for name in cls._SENTINELS]
예제 #52
0
def distribution(files=None, executables=None, java_home=None, dist_dir=None):
    # NB attempt to include the java version in the tmp dir name for better test failure messages.
    executables_as_list = ensure_list(executables or (), expected_type=EXE)
    if executables_as_list:
        dist_prefix = "jvm_{}_".format(executables_as_list[0]._version)
    else:
        dist_prefix = "jvm_na_"
    with temporary_dir(root_dir=dist_dir, prefix=dist_prefix) as dist_root:
        for f in ensure_str_list(files or ()):
            touch(os.path.join(dist_root, f))
        for executable in executables_as_list:
            path = os.path.join(dist_root, executable.relpath)
            with safe_open(path, "w") as fp:
                java_home = os.path.join(dist_root,
                                         java_home) if java_home else dist_root
                fp.write(executable.contents(java_home))
            chmod_plus_x(path)
        yield dist_root
예제 #53
0
    def generate_ivysettings(self, ivy, publishedjars, publish_local=None):
        template_relpath = os.path.join(_TEMPLATES_RELPATH, "ivysettings.xml.mustache")
        template_text = pkgutil.get_data(__name__, template_relpath).decode()

        published = [TemplateData(org=jar.org, name=jar.name) for jar in publishedjars]

        generator = Generator(
            template_text,
            ivysettings=self.fetch_ivysettings(ivy),
            dir=self.workdir,
            cachedir=self.cachedir,
            published=published,
            publish_local=publish_local,
        )

        with safe_open(os.path.join(self.workdir, "ivysettings.xml"), "w") as wrapper:
            generator.write(wrapper)
            return wrapper.name
예제 #54
0
    def create_plugin(distribution_repo_dir,
                      plugin,
                      version=None,
                      packager_cls=None):
        with safe_open(os.path.join(distribution_repo_dir, plugin, 'setup.py'),
                       'w') as fp:
            fp.write(
                dedent(f"""
        from setuptools import setup


        setup(name="{plugin}", version="{version or '0.0.0'}")
      """))
        packager_cls = packager_cls or Packager
        packager = packager_cls(source_dir=os.path.join(
            distribution_repo_dir, plugin),
                                install_dir=distribution_repo_dir)
        packager.run()
예제 #55
0
 def _get_remote_import_paths(self, pkg, gopath=None):
     """Returns the remote import paths declared by the given Go `pkg`."""
     out = self.go_dist.create_go_cmd('list',
                                      args=['-json', pkg],
                                      gopath=gopath).check_output()
     try:
         data = json.loads(out)
         imports = data.get('Imports', [])
         imports.extend(data.get('TestImports', []))
         return [imp for imp in imports if imp not in self.go_stdlib]
     except ValueError as e:
         save_file = os.path.join(gopath, '.errors', pkg, 'list.json')
         with safe_open(save_file, 'w') as fp:
             fp.write(out)
         self.context.log.error(
             'Problem determining imports for {}, saved json response to {}'
             .format(pkg, save_file))
         raise TaskError(e)
예제 #56
0
    def test_no_address_no_family(self):
        spec = SingleAddress('a/c', 'c')
        # Does not exist.
        self.assertEqual(0, len(self.resolve(spec)))

        # Exists on disk, but not yet in memory.
        directory = 'a/c'
        build_file = os.path.join(self.build_root, directory, 'c.BUILD.json')
        with safe_open(build_file, 'w') as fp:
            fp.write('{"type_alias": "struct", "name": "c"}')
        self.assertEqual(0, len(self.resolve(spec)))

        # Success.
        self.scheduler.invalidate_files([directory])
        resolved = self.resolve(spec)
        self.assertEqual(1, len(resolved))
        self.assertEqual(Struct(name='c', type_alias='struct'),
                         resolved[0].struct)
예제 #57
0
    def _resolve_exact_plugin_locations(self):
        hasher = hashlib.sha1()
        for req in sorted(self._plugin_requirements):
            hasher.update(req)
        resolve_hash = hasher.hexdigest()
        resolved_plugins_list = os.path.join(
            self.plugin_cache_dir, 'plugins-{}.txt'.format(resolve_hash))

        if not os.path.exists(resolved_plugins_list):
            tmp_plugins_list = resolved_plugins_list + '~'
            with safe_open(tmp_plugins_list, 'w') as fp:
                for plugin in self._resolve_plugins():
                    fp.write(plugin.location)
                    fp.write('\n')
            os.rename(tmp_plugins_list, resolved_plugins_list)
        with open(resolved_plugins_list) as fp:
            for plugin_location in fp:
                yield plugin_location.strip()
예제 #58
0
 def __init__(self, *args, **kwargs):
     super(ConsoleTask, self).__init__(*args, **kwargs)
     separator_option = "console_%s_separator" % self.__class__.__name__
     self._console_separator = getattr(
         self.context.options, separator_option).decode('string-escape')
     if self.context.options.console_outstream:
         try:
             self._outstream = safe_open(
                 os.path.abspath(self.context.options.console_outstream),
                 'w')
         except IOError as e:
             raise TaskError(
                 'Error opening stream {out_file} due to'
                 ' {error_str}'.format(
                     out_file=self.context.options.console_outstream,
                     error_str=e))
     else:
         self._outstream = self.context.console_outstream
예제 #59
0
    def test_no_address_no_family(self) -> None:
        spec = SingleAddress("a/c", "c")

        # Does not exist.
        with self.assertRaises(Exception):
            self.resolve(spec)

        build_file = os.path.join(self.build_root, "a/c", "c.BUILD.json")
        with safe_open(build_file, "w") as fp:
            fp.write('{"type_alias": "struct", "name": "c"}')
        self.scheduler.invalidate_files(["a/c"])

        # Success.
        resolved = self.resolve(spec)
        self.assertEqual(1, len(resolved))
        self.assertEqual(
            [Struct(address=Address.parse("a/c"), type_alias="struct")],
            resolved)
예제 #60
0
  def execute(self):
    if not self.url:
      raise TaskError('Unable to proceed publishing to confluence. Please set the url option.')
    deprecated_conditional(
      lambda: True,
      '1.6.0.dev0',
      'pants.backend.docgen.tasks.confluence_publish.py',
      'Use contrib.confluence.tasks.confluence_publish.py instead'
    )
    pages = []
    targets = self.context.targets()
    for target in targets:
      if isinstance(target, Page):
        for wiki_artifact in target.payload.provides:
          pages.append((target, wiki_artifact))

    urls = list()

    genmap = self.context.products.get('wiki_html')
    for page, wiki_artifact in pages:
      html_info = genmap.get((wiki_artifact, page))
      if len(html_info) > 1:
        raise TaskError('Unexpected resources for {}: {}'.format(page, html_info))
      basedir, htmls = html_info.items()[0]
      if len(htmls) != 1:
        raise TaskError('Unexpected resources for {}: {}'.format(page, htmls))
      with safe_open(os.path.join(basedir, htmls[0])) as contents:
        url = self.publish_page(
          page.address,
          wiki_artifact.config['space'],
          wiki_artifact.config['title'],
          contents.read(),
          # Default to none if not present in the hash.
          parent=wiki_artifact.config.get('parent')
        )
        if url:
          urls.append(url)
          self.context.log.info('Published {} to {}'.format(page, url))

    if self.open and urls:
      try:
        desktop.ui_open(*urls)
      except desktop.OpenError as e:
        raise TaskError(e)