Example #1
0
  def create_target(category, target_name, target_index, targets):
    def name(name):
      return "%s-%s-%d" % (target_name, name, target_index)

    # TODO(John Sirois): JavaLibrary and ScalaLibrary can float here between src/ and tests/ - add
    # ant build support to allow the same treatment for JavaThriftLibrary and JavaProtobufLibrary
    # so that tests can house test IDL in tests/
    target_type, base = category
    with ParseContext.temp(base):
      if target_type == JavaProtobufLibrary:
        return _aggregate(JavaProtobufLibrary, name('protobuf'), targets, buildflags=buildflags)
      elif target_type == JavaThriftLibrary:
        return _aggregate(JavaThriftLibrary, name('thrift'), targets, buildflags=buildflags)
      elif target_type == AnnotationProcessor:
        return _aggregate(AnnotationProcessor, name('apt'), targets)
      elif target_type == JavaLibrary:
        return _aggregate(JavaLibrary, name('java'), targets, deployjar, buildflags)
      elif target_type == ScalaLibrary:
        return _aggregate(ScalaLibrary, name('scala'), targets, deployjar, buildflags)
      elif target_type == JavaTests:
        return _aggregate(JavaTests, name('java-tests'), targets, buildflags=buildflags)
      elif target_type == ScalaTests:
        return _aggregate(ScalaTests, name('scala-tests'), targets, buildflags=buildflags)
      else:
        raise Exception("Cannot aggregate targets of type: %s" % target_type)
Example #2
0
    def __init__(self, name, sources=None, resources=None):
        TargetWithSources.__init__(self, name, sources=sources)

        if resources is not None:

            def is_resources(item):
                return (isinstance(item, Target) and all(
                    map(lambda tgt: isinstance(tgt, Resources),
                        filter(lambda tgt: is_concrete(tgt), item.resolve()))))

            if is_resources(resources):
                self.resources = list(self.resolve_all(resources, Resources))
            elif isinstance(resources, Sequence) and all(
                    map(is_resources, resources)):
                self.resources = list(self.resolve_all(resources, Resources))
            else:
                # Handle parallel resource dir globs.
                # For example, for a java_library target base of src/main/java:
                #   src/main/java/com/twitter/base/BUILD
                # We get:
                #   sibling_resources_base = src/main/resources
                #   base_relpath = com/twitter/base
                #   resources_dir = src/main/resources/com/twitter/base
                #
                # TODO(John Sirois): migrate projects to Resources and remove support for old style assumed
                # parallel resources dirs
                sibling_resources_base = os.path.join(
                    os.path.dirname(self.target_base), 'resources')
                base_relpath = os.path.relpath(self.address.buildfile.relpath,
                                               self.target_base)
                resources_dir = os.path.join(sibling_resources_base,
                                             base_relpath)
                with ParseContext.temp(basedir=resources_dir):
                    self.resources = [Resources(name, resources)]
Example #3
0
    def compiled_idl(cls,
                     idl_dep,
                     generated_deps=None,
                     compiler=None,
                     language=None,
                     namespace_map=None):
        """Marks a jar as containing IDL files that should be fetched and processed locally.

    idl_dep:        A dependency resolvable to a single jar library.
    generated_deps: Dependencies for the code that will be generated from "idl_dep"
    compiler:       The thrift compiler to apply to the fetched thrift IDL files.
    language:       The language to generate code for - supported by some compilers
    namespace_map:  A mapping from IDL declared namespaces to custom namespaces - supported by some
                    compilers.
    """
        deps = list(filter(is_concrete, idl_dep.resolve()))
        if not len(deps) == 1:
            raise TaskError(
                'Can only arrange for compiled idl for a single dependency at a time, '
                'given:\n\t%s' % '\n\t'.join(map(str, deps)))
        jar = deps.pop()
        if not isinstance(jar, JarDependency):
            raise TaskError(
                'Can only arrange for compiled idl from a jar dependency, given: %s'
                % jar)

        request = (jar, compiler, language)
        namespace_signature = None
        if namespace_map:
            sha = hashlib.sha1()
            for ns_from, ns_to in sorted(namespace_map.items()):
                sha.update(ns_from)
                sha.update(ns_to)
            namespace_signature = sha.hexdigest()
        request += (namespace_signature, )

        if request not in cls._PLACEHOLDER_BY_REQUEST:
            if not cls._EXTRACT_BASE:
                config = Config.load()
                cls._EXTRACT_BASE = config.get('idl-extract', 'workdir')
                safe_mkdir(cls._EXTRACT_BASE)
                SourceRoot.register(cls._EXTRACT_BASE, JavaThriftLibrary)

            with ParseContext.temp(cls._EXTRACT_BASE):
                # TODO(John Sirois): abstract ivy specific configurations notion away
                jar._configurations.append('idl')
                jar.with_artifact(configuration='idl', classifier='idl')
                target_name = '-'.join(
                    filter(None,
                           (jar.id, compiler, language, namespace_signature)))
                placeholder = JavaThriftLibrary(target_name,
                                                sources=None,
                                                dependencies=[jar] +
                                                (generated_deps or []),
                                                compiler=compiler,
                                                language=language,
                                                namespace_map=namespace_map)
                cls._PLACEHOLDER_BY_REQUEST[request] = placeholder
                cls._PLACEHOLDERS_BY_JAR[jar].append(placeholder)
        return cls._PLACEHOLDER_BY_REQUEST[request]
Example #4
0
 def test_python_binary_with_source_no_entry_point(self):
     with ParseContext.temp('src'):
         assert PythonBinary(name='binary1',
                             source='blork.py').entry_point == 'blork'
         assert PythonBinary(
             name='binary2',
             source='bin/blork.py').entry_point == 'bin.blork'
Example #5
0
  def __init__(self, name, sources=None, resources=None, exclusives=None):
    TargetWithSources.__init__(self, name, sources=sources, exclusives=exclusives)

    if resources is not None:
      def is_resources(item):
        if not isinstance(item, Target):
          return False
        concrete_targets = [t for t in item.resolve() if t.is_concrete]
        return all(isinstance(t, Resources) for t in concrete_targets)

      if is_resources(resources):
        self.resources = list(self.resolve_all(resources, Resources))
      elif isinstance(resources, Sequence) and all(map(is_resources, resources)):
        self.resources = list(self.resolve_all(resources, Resources))
      else:
        # Handle parallel resource dir globs.
        # For example, for a java_library target base of src/main/java:
        #   src/main/java/com/twitter/base/BUILD
        # We get:
        #   sibling_resources_base = src/main/resources
        #   base_relpath = com/twitter/base
        #   resources_dir = src/main/resources/com/twitter/base
        #
        # TODO(John Sirois): migrate projects to Resources and remove support for old style assumed
        # parallel resources dirs
        sibling_resources_base = os.path.join(os.path.dirname(self.target_base), 'resources')
        base_relpath = os.path.relpath(self.address.buildfile.relpath, self.target_base)
        resources_dir = os.path.join(sibling_resources_base, base_relpath)
        with ParseContext.temp(basedir=resources_dir):
          self.resources = [Resources(name, resources)]
Example #6
0
  def test_binary_target_injected_into_minified_dependencies(self):
    with ParseContext.temp():
      foo = python_library(
        name = 'foo',
        provides = setup_py(
          name = 'foo',
          version = '0.0.0',
        ).with_binaries(
          foo_binary = pants(':foo_bin')
        )
      )

      foo_bin = python_binary(
        name = 'foo_bin',
        entry_point = 'foo.bin.foo',
        dependencies = [ pants(':foo_bin_dep') ]
      )

      foo_bin_dep = python_library(
        name = 'foo_bin_dep'
      )

    assert SetupPy.minified_dependencies(foo) == OrderedSet([foo_bin, foo_bin_dep])
    entry_points = dict(SetupPy.iter_entry_points(foo))
    assert entry_points == {'foo_binary': 'foo.bin.foo'}

    with self.run_execute(foo, recursive=False) as setup_py_command:
      setup_py_command.run_one.assert_called_with(foo)

    with self.run_execute(foo, recursive=True) as setup_py_command:
      setup_py_command.run_one.assert_called_with(foo)
Example #7
0
 def test_validation(self):
     with ParseContext.temp():
         repo = Repository(name="myRepo", url="myUrl", push_db="myPushDb")
         Artifact(org="testOrg",
                  name="testName",
                  repo=repo,
                  description="Test")
         self.assertRaises(ValueError,
                           Artifact,
                           org=1,
                           name="testName",
                           repo=repo,
                           description="Test")
         self.assertRaises(ValueError,
                           Artifact,
                           org="testOrg",
                           name=1,
                           repo=repo,
                           description="Test")
         self.assertRaises(ValueError,
                           Artifact,
                           org="testOrg",
                           name="testName",
                           repo=1,
                           description="Test")
         self.assertRaises(ValueError,
                           Artifact,
                           org="testOrg",
                           name="testName",
                           repo=repo,
                           description=1)
Example #8
0
    def test_binary_target_injected_into_minified_dependencies_with_provider(
            self):
        with ParseContext.temp():
            bar = python_library(name='bar',
                                 provides=setup_py(
                                     name='bar',
                                     version='0.0.0',
                                 ).with_binaries(bar_binary=pants(':bar_bin')))

            bar_bin = python_binary(name='bar_bin',
                                    entry_point='bar.bin.bar',
                                    dependencies=[pants(':bar_bin_dep')])

            bar_bin_dep = python_library(name='bar_bin_dep',
                                         provides=setup_py(
                                             name='bar_bin_dep',
                                             version='0.0.0',
                                         ))

        assert SetupPy.minified_dependencies(bar) == OrderedSet(
            [bar_bin, bar_bin_dep])
        entry_points = dict(SetupPy.iter_entry_points(bar))
        assert entry_points == {'bar_binary': 'bar.bin.bar'}

        with self.run_execute(bar, recursive=False) as setup_py_command:
            setup_py_command.run_one.assert_called_with(bar)

        with self.run_execute(bar, recursive=True) as setup_py_command:
            setup_py_command.run_one.assert_has_calls(
                [call(bar), call(bar_bin_dep)], any_order=True)
Example #9
0
 def test_validation(self):
     with ParseContext.temp('JarLibraryTest/test_validation'):
         target = Target(name='mybird')
         JarLibrary(name="test", dependencies=target)
         self.assertRaises(TargetDefinitionException,
                           JarLibrary,
                           name="test1",
                           dependencies=None)
 def test_python_binary_with_entry_point_and_source(self):
   with ParseContext.temp('src'):
     assert 'blork' == PythonBinary(
         name = 'binary1', entry_point = 'blork', source='blork.py').entry_point
     assert 'blork:main' == PythonBinary(
         name = 'binary2', entry_point = 'blork:main', source='blork.py').entry_point
     assert 'bin.blork:main' == PythonBinary(
         name = 'binary3', entry_point = 'bin.blork:main', source='bin/blork.py').entry_point
Example #11
0
 def generate_test_targets():
   if PythonTestBuilder.TESTING_TARGETS is None:
     with ParseContext.temp():
       PythonTestBuilder.TESTING_TARGETS = [
         PythonRequirement('pytest'),
         PythonRequirement('unittest2', version_filter=lambda:sys.version_info[0]==2),
         PythonRequirement('unittest2py3k', version_filter=lambda:sys.version_info[0]==3)
       ]
   return PythonTestBuilder.TESTING_TARGETS
Example #12
0
def create_dependencies(depmap):
    target_map = {}
    with ParseContext.temp():
        for name, deps in depmap.items():
            target_map[name] = python_library(
                name=name,
                provides=setup_py(name=name, version='0.0.0'),
                dependencies=[pants(':%s' % dep) for dep in deps])
    return target_map
Example #13
0
  def test_validation(self):
    with ParseContext.temp('InternalTargetTest/test_validation'):
      InternalTarget(name="valid", dependencies=None)
      self.assertRaises(TargetDefinitionException, InternalTarget,
                        name=1, dependencies=None)

      InternalTarget(name="valid2", dependencies=Target(name='mybird'))
      self.assertRaises(TargetDefinitionException, InternalTarget,
                        name='valid3', dependencies=1)
Example #14
0
File: py.py Project: alfss/commons
  def execute(self):
    if self.options.pex and self.options.ipython:
      self.error('Cannot specify both --pex and --ipython!')

    if self.options.entry_point and self.options.ipython:
      self.error('Cannot specify both --entry_point and --ipython!')

    if self.options.verbose:
      print('Build operating on target: %s %s' % (self.target,
        'Extra targets: %s' % ' '.join(map(str, self.extra_targets)) if self.extra_targets else ''))

    builder = PEXBuilder(tempfile.mkdtemp(), interpreter=self.interpreter,
        pex_info=self.target.pexinfo if isinstance(self.target, PythonBinary) else None)

    if self.options.entry_point:
      builder.set_entry_point(self.options.entry_point)

    if self.options.ipython:
      if not self.config.has_section('python-ipython'):
        self.error('No python-ipython sections defined in your pants.ini!')

      builder.info.entry_point = self.config.get('python-ipython', 'entry_point')
      if builder.info.entry_point is None:
        self.error('Must specify entry_point for IPython in the python-ipython section '
                   'of your pants.ini!')

      requirements = self.config.getlist('python-ipython', 'requirements', default=[])

      with ParseContext.temp():
        for requirement in requirements:
          self.extra_targets.append(PythonRequirement(requirement))

    executor = PythonChroot(
        self.target,
        self.root_dir,
        builder=builder,
        interpreter=self.interpreter,
        extra_targets=self.extra_targets,
        conn_timeout=self.options.conn_timeout)

    executor.dump()

    if self.options.pex:
      pex_name = os.path.join(self.root_dir, 'dist', '%s.pex' % self.target.name)
      builder.build(pex_name)
      print('Wrote %s' % pex_name)
      return 0
    else:
      builder.freeze()
      pex = PEX(builder.path(), interpreter=self.interpreter)
      po = pex.run(args=list(self.args), blocking=False)
      try:
        return po.wait()
      except KeyboardInterrupt:
        po.send_signal(signal.SIGINT)
        raise
Example #15
0
def create_dependencies(depmap):
  target_map = {}
  with ParseContext.temp():
    for name, deps in depmap.items():
      target_map[name] = python_library(
        name=name,
        provides=setup_py(name=name, version='0.0.0'),
        dependencies=[pants(':%s' % dep) for dep in deps]
      )
  return target_map
 def test_python_binary_with_entry_point_and_source_mismatch(self):
   with ParseContext.temp('src'):
     with pytest.raises(TargetDefinitionException):
       PythonBinary(name = 'binary1', entry_point = 'blork', source='hork.py')
     with pytest.raises(TargetDefinitionException):
       PythonBinary(name = 'binary2', entry_point = 'blork:main', source='hork.py')
     with pytest.raises(TargetDefinitionException):
       PythonBinary(name = 'binary3', entry_point = 'bin.blork', source='blork.py')
     with pytest.raises(TargetDefinitionException):
       PythonBinary(name = 'binary4', entry_point = 'bin.blork', source='bin.py')
Example #17
0
 def __init__(self, name, dependencies=None, num_sources=0, exclusives=None):
   with ParseContext.temp():
     InternalTarget.__init__(self, name, dependencies, exclusives=exclusives)
     TargetWithSources.__init__(self, name, exclusives=exclusives)
   self.num_sources = num_sources
   self.declared_exclusives = defaultdict(set)
   if exclusives is not None:
     for k in exclusives:
       self.declared_exclusives[k] = set([exclusives[k]])
   self.exclusives = None
Example #18
0
 def generate_test_targets():
     if PythonTestBuilder.TESTING_TARGETS is None:
         with ParseContext.temp():
             PythonTestBuilder.TESTING_TARGETS = [
                 PythonRequirement("pytest"),
                 PythonRequirement("pytest-cov"),
                 PythonRequirement("coverage"),
                 PythonRequirement("unittest2", version_filter=lambda: sys.version_info[0] == 2),
                 PythonRequirement("unittest2py3k", version_filter=lambda: sys.version_info[0] == 3),
             ]
     return PythonTestBuilder.TESTING_TARGETS
Example #19
0
 def generate_test_targets(cls):
   if cls.TESTING_TARGETS is None:
     with ParseContext.temp():
       cls.TESTING_TARGETS = [
         PythonRequirement('pytest'),
         PythonRequirement('pytest-cov'),
         PythonRequirement('coverage==3.6b1'),
         PythonRequirement('unittest2', version_filter=lambda py, pl: py.startswith('2')),
         PythonRequirement('unittest2py3k', version_filter=lambda py, pl: py.startswith('3'))
       ]
   return cls.TESTING_TARGETS
Example #20
0
 def generate_test_targets():
   if PythonTestBuilder.TESTING_TARGETS is None:
     with ParseContext.temp():
       PythonTestBuilder.TESTING_TARGETS = [
         PythonRequirement('pytest'),
         PythonRequirement('pytest-cov'),
         PythonRequirement('coverage'),
         PythonRequirement('unittest2', version_filter=lambda:sys.version_info[0]==2),
         PythonRequirement('unittest2py3k', version_filter=lambda:sys.version_info[0]==3)
       ]
   return PythonTestBuilder.TESTING_TARGETS
Example #21
0
 def test_validation(self):
   with ParseContext.temp():
     repo = Repository(name="myRepo", url="myUrl", push_db="myPushDb")
     Artifact(org="testOrg", name="testName", repo=repo, description="Test")
     self.assertRaises(ValueError, Artifact,
                       org=1, name="testName", repo=repo, description="Test")
     self.assertRaises(ValueError, Artifact,
                       org="testOrg", name=1, repo=repo, description="Test")
     self.assertRaises(ValueError, Artifact,
                       org="testOrg", name="testName", repo=1, description="Test")
     self.assertRaises(ValueError, Artifact,
                       org="testOrg", name="testName", repo=repo, description=1)
Example #22
0
    def dump(self):
        self.debug('Building PythonBinary %s:' % self._target)

        targets = self.resolve([self._target] + self._extra_targets)

        for lib in targets['libraries'] | targets['binaries']:
            self._dump_library(lib)

        generated_reqs = OrderedSet()
        if targets['thrifts']:
            for thr in set(targets['thrifts']):
                if thr not in self.MEMOIZED_THRIFTS:
                    self.MEMOIZED_THRIFTS[
                        thr] = self._generate_thrift_requirement(thr)
                generated_reqs.add(self.MEMOIZED_THRIFTS[thr])
            with ParseContext.temp():
                # trick pants into letting us add this python requirement, otherwise we get
                # TargetDefinitionException: Error in target BUILD.temp:thrift: duplicate to
                # PythonRequirement(thrift)
                #
                # TODO(wickman) Instead of just blindly adding a PythonRequirement for thrift, we
                # should first detect if any explicit thrift requirements have been added and use
                # those.  Only if they have not been supplied should we auto-inject it.
                generated_reqs.add(
                    PythonRequirement(
                        'thrift',
                        use_2to3=True,
                        name='thrift-' +
                        ''.join(random.sample('0123456789abcdef' * 8, 8))))

        for antlr in targets['antlrs']:
            generated_reqs.add(self._generate_antlr_requirement(antlr))

        targets['reqs'] |= generated_reqs
        for req in targets['reqs']:
            if not req.should_build(self._interpreter.python,
                                    Platform.current()):
                self.debug('Skipping %s based upon version filter' % req)
                continue
            self._dump_requirement(req._requirement, False, req._repository)

        reqs_to_build = (
            req for req in targets['reqs']
            if req.should_build(self._interpreter.python, Platform.current()))
        for dist in self._resolver.resolve(reqs_to_build,
                                           interpreter=self._interpreter):
            self._dump_distribution(dist)

        if len(targets['binaries']) > 1:
            print('WARNING: Target has multiple python_binary targets!',
                  file=sys.stderr)

        return self._builder
Example #23
0
    def test_validation(self):
        with ParseContext.temp('InternalTargetTest/test_validation'):
            InternalTarget(name="valid", dependencies=None)
            self.assertRaises(TargetDefinitionException,
                              InternalTarget,
                              name=1,
                              dependencies=None)

            InternalTarget(name="valid2", dependencies=Target(name='mybird'))
            self.assertRaises(TargetDefinitionException,
                              InternalTarget,
                              name='valid3',
                              dependencies=1)
Example #24
0
  def _generate_requirement(self, library, builder_cls):
    library_key = self._key_generator.key_for_target(library)
    builder = builder_cls(library, self._root, self._config, '-' + library_key.hash[:8])

    cache_dir = os.path.join(self._egg_cache_root, library_key.id)
    if self._build_invalidator.needs_update(library_key):
      sdist = builder.build(interpreter=self._interpreter)
      safe_mkdir(cache_dir)
      shutil.copy(sdist, os.path.join(cache_dir, os.path.basename(sdist)))
      self._build_invalidator.update(library_key)

    with ParseContext.temp():
      return PythonRequirement(builder.requirement_string(), repository=cache_dir, use_2to3=True)
Example #25
0
    def test_binary_cycle(self):
        with ParseContext.temp():
            foo = python_library(name='foo',
                                 provides=setup_py(
                                     name='foo',
                                     version='0.0.0',
                                 ).with_binaries(foo_binary=pants(':foo_bin')))

            foo_bin = python_binary(name='foo_bin',
                                    entry_point='foo.bin.foo',
                                    dependencies=[pants(':foo')])

        with pytest.raises(TargetDefinitionException):
            SetupPy.minified_dependencies(foo)
Example #26
0
  def compiled_idl(cls, idl_dep, generated_deps=None, compiler=None, language=None, namespace_map=None):
    """Marks a jar as containing IDL files that should be fetched and processed locally.

    idl_dep:        A dependency resolvable to a single jar library.
    generated_deps: Dependencies for the code that will be generated from "idl_dep"
    compiler:       The thrift compiler to apply to the fetched thrift IDL files.
    language:       The language to generate code for - supported by some compilers
    namespace_map:  A mapping from IDL declared namespaces to custom namespaces - supported by some
                    compilers.
    """
    deps = [t for t in idl_dep.resolve() if t.is_concrete]
    if not len(deps) == 1:
      raise TaskError('Can only arrange for compiled idl for a single dependency at a time, '
                      'given:\n\t%s' % '\n\t'.join(map(str, deps)))
    jar = deps.pop()
    if not isinstance(jar, JarDependency):
      raise TaskError('Can only arrange for compiled idl from a jar dependency, given: %s' % jar)

    request = (jar, compiler, language)
    namespace_signature = None
    if namespace_map:
      sha = hashlib.sha1()
      for ns_from, ns_to in sorted(namespace_map.items()):
        sha.update(ns_from)
        sha.update(ns_to)
      namespace_signature = sha.hexdigest()
    request += (namespace_signature,)

    if request not in cls._PLACEHOLDER_BY_REQUEST:
      if not cls._EXTRACT_BASE:
        config = Config.load()
        cls._EXTRACT_BASE = config.get('idl-extract', 'workdir')
        safe_mkdir(cls._EXTRACT_BASE)
        SourceRoot.register(cls._EXTRACT_BASE, JavaThriftLibrary)

      with ParseContext.temp(cls._EXTRACT_BASE):
        # TODO(John Sirois): abstract ivy specific configurations notion away
        jar._configurations.append('idl')
        jar.with_artifact(configuration='idl', classifier='idl')
        target_name = '-'.join(filter(None, (jar.id, compiler, language, namespace_signature)))
        placeholder = JavaThriftLibrary(target_name,
                                        sources=None,
                                        dependencies=[jar] + (generated_deps or []),
                                        compiler=compiler,
                                        language=language,
                                        namespace_map=namespace_map)
        cls._PLACEHOLDER_BY_REQUEST[request] = placeholder
        cls._PLACEHOLDERS_BY_JAR[jar].append(placeholder)
    return cls._PLACEHOLDER_BY_REQUEST[request]
Example #27
0
 def generate_test_targets(cls):
     if cls.TESTING_TARGETS is None:
         with ParseContext.temp():
             cls.TESTING_TARGETS = [
                 PythonRequirement('pytest'),
                 PythonRequirement('pytest-cov'),
                 PythonRequirement('coverage==3.6b1'),
                 PythonRequirement(
                     'unittest2',
                     version_filter=lambda py, pl: py.startswith('2')),
                 PythonRequirement(
                     'unittest2py3k',
                     version_filter=lambda py, pl: py.startswith('3'))
             ]
     return cls.TESTING_TARGETS
Example #28
0
    def _generate_requirement(self, library, builder_cls):
        library_key = self._key_generator.key_for_target(library)
        builder = builder_cls(library, self._root, self._config,
                              '-' + library_key.hash[:8])

        cache_dir = os.path.join(self._egg_cache_root, library_key.id)
        if self._build_invalidator.needs_update(library_key):
            sdist = builder.build(interpreter=self._interpreter)
            safe_mkdir(cache_dir)
            shutil.copy(sdist, os.path.join(cache_dir,
                                            os.path.basename(sdist)))
            self._build_invalidator.update(library_key)

        with ParseContext.temp():
            return PythonRequirement(builder.requirement_string(),
                                     repository=cache_dir,
                                     use_2to3=True)
Example #29
0
  def dump(self):
    self.debug('Building PythonBinary %s:' % self._target)

    targets = self.resolve([self._target] + self._extra_targets)

    for lib in targets['libraries'] | targets['binaries']:
      self._dump_library(lib)

    generated_reqs = OrderedSet()
    if targets['thrifts']:
      for thr in set(targets['thrifts']):
        if thr not in self.MEMOIZED_THRIFTS:
          self.MEMOIZED_THRIFTS[thr] = self._generate_thrift_requirement(thr)
        generated_reqs.add(self.MEMOIZED_THRIFTS[thr])
      with ParseContext.temp():
        # trick pants into letting us add this python requirement, otherwise we get
        # TargetDefinitionException: Error in target BUILD.temp:thrift: duplicate to
        # PythonRequirement(thrift)
        #
        # TODO(wickman) Instead of just blindly adding a PythonRequirement for thrift, we
        # should first detect if any explicit thrift requirements have been added and use
        # those.  Only if they have not been supplied should we auto-inject it.
        generated_reqs.add(PythonRequirement('thrift', use_2to3=True,
            name='thrift-' + ''.join(random.sample('0123456789abcdef' * 8, 8))))

    for antlr in targets['antlrs']:
      generated_reqs.add(self._generate_antlr_requirement(antlr))

    targets['reqs'] |= generated_reqs
    for req in targets['reqs']:
      if not req.should_build(self._interpreter.python, Platform.current()):
        self.debug('Skipping %s based upon version filter' % req)
        continue
      self._dump_requirement(req._requirement, False, req._repository)

    reqs_to_build = (req for req in targets['reqs']
        if req.should_build(self._interpreter.python, Platform.current()))
    for dist in self._resolver.resolve(reqs_to_build, interpreter=self._interpreter):
      self._dump_distribution(dist)

    if len(targets['binaries']) > 1:
      print('WARNING: Target has multiple python_binary targets!', file=sys.stderr)

    return self._builder
Example #30
0
  def test_binary_cycle(self):
    with ParseContext.temp():
      foo = python_library(
        name = 'foo',
        provides = setup_py(
          name = 'foo',
          version = '0.0.0',
        ).with_binaries(
          foo_binary = pants(':foo_bin')
        )
      )

      foo_bin = python_binary(
        name = 'foo_bin',
        entry_point = 'foo.bin.foo',
        dependencies = [ pants(':foo') ]
      )

    with pytest.raises(TargetDefinitionException):
      SetupPy.minified_dependencies(foo)
Example #31
0
  def test_jar_dependency(self):
    with ParseContext.temp():
      org, name = "org", "name"
      # thing to override
      nay = JarDependency(org, name, "0.0.1")
      yea = JarDependency(org, name, "0.0.8")
      # define targets depend on different 'org:c's
      JarLibrary("c", [nay])
      JarLibrary("b", [yea])
      # then depend on those targets transitively, and override to the correct version
      l = JarLibrary(
        "a",
        dependencies=[Pants(":c")],
        overrides=[":b"])

      # confirm that resolving includes the correct version
      resolved = set(l.resolve())
      self.assertTrue(yea in resolved)
      # and attaches an exclude directly to the JarDependency
      self.assertTrue(Exclude(org, name) in nay.excludes)
Example #32
0
    def create_target(category, target_name, target_index, targets):
        def name(name):
            return "%s-%s-%d" % (target_name, name, target_index)

        # TODO(John Sirois): JavaLibrary and ScalaLibrary can float here between src/ and tests/ - add
        # ant build support to allow the same treatment for JavaThriftLibrary and JavaProtobufLibrary
        # so that tests can house test IDL in tests/
        target_type, base = category
        with ParseContext.temp(base):
            if target_type == JavaProtobufLibrary:
                return _aggregate(JavaProtobufLibrary,
                                  name('protobuf'),
                                  targets,
                                  buildflags=buildflags)
            elif target_type == JavaThriftLibrary:
                return _aggregate(JavaThriftLibrary,
                                  name('thrift'),
                                  targets,
                                  buildflags=buildflags)
            elif target_type == AnnotationProcessor:
                return _aggregate(AnnotationProcessor, name('apt'), targets)
            elif target_type == JavaLibrary:
                return _aggregate(JavaLibrary, name('java'), targets,
                                  deployjar, buildflags)
            elif target_type == ScalaLibrary:
                return _aggregate(ScalaLibrary, name('scala'), targets,
                                  deployjar, buildflags)
            elif target_type == JavaTests:
                return _aggregate(JavaTests,
                                  name('java-tests'),
                                  targets,
                                  buildflags=buildflags)
            elif target_type == ScalaTests:
                return _aggregate(ScalaTests,
                                  name('scala-tests'),
                                  targets,
                                  buildflags=buildflags)
            else:
                raise Exception("Cannot aggregate targets of type: %s" %
                                target_type)
Example #33
0
  def test_binary_target_injected_into_minified_dependencies_with_provider(self):
    with ParseContext.temp():
      bar = python_library(
        name = 'bar',
        provides = setup_py(
          name = 'bar',
          version = '0.0.0',
        ).with_binaries(
          bar_binary = pants(':bar_bin')
        )
      )

      bar_bin = python_binary(
        name = 'bar_bin',
        entry_point = 'bar.bin.bar',
        dependencies = [ pants(':bar_bin_dep') ]
      )

      bar_bin_dep = python_library(
        name = 'bar_bin_dep',
        provides = setup_py(
          name = 'bar_bin_dep',
          version = '0.0.0',
        )
      )

    assert SetupPy.minified_dependencies(bar) == OrderedSet([bar_bin, bar_bin_dep])
    entry_points = dict(SetupPy.iter_entry_points(bar))
    assert entry_points == {'bar_binary': 'bar.bin.bar'}

    with self.run_execute(bar, recursive=False) as setup_py_command:
      setup_py_command.run_one.assert_called_with(bar)

    with self.run_execute(bar, recursive=True) as setup_py_command:
      setup_py_command.run_one.assert_has_calls([
          call(bar),
          call(bar_bin_dep)
      ], any_order=True)
Example #34
0
    def test_binary_target_injected_into_minified_dependencies(self):
        with ParseContext.temp():
            foo = python_library(name='foo',
                                 provides=setup_py(
                                     name='foo',
                                     version='0.0.0',
                                 ).with_binaries(foo_binary=pants(':foo_bin')))

            foo_bin = python_binary(name='foo_bin',
                                    entry_point='foo.bin.foo',
                                    dependencies=[pants(':foo_bin_dep')])

            foo_bin_dep = python_library(name='foo_bin_dep')

        assert SetupPy.minified_dependencies(foo) == OrderedSet(
            [foo_bin, foo_bin_dep])
        entry_points = dict(SetupPy.iter_entry_points(foo))
        assert entry_points == {'foo_binary': 'foo.bin.foo'}

        with self.run_execute(foo, recursive=False) as setup_py_command:
            setup_py_command.run_one.assert_called_with(foo)

        with self.run_execute(foo, recursive=True) as setup_py_command:
            setup_py_command.run_one.assert_called_with(foo)
Example #35
0
 def resolve(self, spec):
     """Returns an iterator over the target(s) the given address points to."""
     with ParseContext.temp():
         return Pants(spec).resolve()
Example #36
0
 def _create_new_target(self, target_base, target_type, *args, **kwargs):
     if not os.path.exists(target_base):
         os.makedirs(target_base)
     SourceRoot.register(target_base, target_type)
     with ParseContext.temp(target_base):
         return target_type(*args, **kwargs)
 def test_python_binary_must_have_some_entry_point(self):
   with ParseContext.temp('src'):
     with pytest.raises(TargetDefinitionException):
       PythonBinary(name = 'binary')
 def test_python_binary_with_entry_point_no_source(self):
   with ParseContext.temp('src'):
     assert PythonBinary(name = 'binary', entry_point = 'blork').entry_point == 'blork'
Example #39
0
 def test_legacy(self):
   self.create_dir('main')
   with ParseContext.temp('main'):
     target = WithLegacyResources('test', resources=['a/b.js'])
     self.assertEquals([self.resources], target.resources)
     self.assertEquals([self.resource_path], self.resources.sources)
Example #40
0
 def _create_new_target(self, target_base, target_type, *args, **kwargs):
   if not os.path.exists(target_base):
     os.makedirs(target_base)
   SourceRoot.register(target_base, target_type)
   with ParseContext.temp(target_base):
     return target_type(*args, **kwargs)
Example #41
0
  def execute(self, targets):
    self.check_clean_master()

    pushdbs = {}
    def get_db(target):
      if target.provides is None:
        raise TaskError('trying to publish target %r which does not provide an artifact' % target)
      dbfile = target.provides.repo.push_db
      result = pushdbs.get(dbfile)
      if not result:
        db = PushDb.load(dbfile)
        repo = self.repos[target.provides.repo.name]
        result = (db, dbfile, repo)
        pushdbs[dbfile] = result
      return result

    def fingerprint_internal(target):
      if not is_internal(target):
        raise ValueError('Expected an internal target for fingerprinting, got %s' % target)
      pushdb, _, _ = get_db(target)
      _, _, _, fingerprint = pushdb.as_jar_with_version(target)
      return fingerprint or '0.0.0'

    def stage_artifacts(target, jar, version, changelog, confs=None):
      def artifact_path(name=None, suffix='', extension='jar'):
        return os.path.join(self.outdir, jar.org, jar.name,
                            '%s-%s%s.%s' % ((name or jar.name), version, suffix, extension))

      with safe_open(artifact_path(suffix='-CHANGELOG', extension='txt'), 'w') as changelog_file:
        changelog_file.write(changelog)

      def get_pushdb(target):
        return get_db(target)[0]

      PomWriter(get_pushdb).write(target, artifact_path(extension='pom'))

      ivyxml = artifact_path(name='ivy', extension='xml')
      IvyWriter(get_pushdb).write(target, ivyxml, confs)

      def copy(typename, suffix=''):
        genmap = self.context.products.get(typename)
        for basedir, jars in genmap.get(target).items():
          for artifact in jars:
            shutil.copy(os.path.join(basedir, artifact), artifact_path(suffix=suffix))

      copy('jars')
      if is_java(target):
        copy('javadoc_jars', '-javadoc')
      copy('source_jars', '-sources')

      return ivyxml

    if self.overrides:
      print('Publishing with revision overrides:\n  %s' % '\n  '.join(
        '%s=%s' % (coordinate(org, name), rev) for (org, name), rev in self.overrides.items()
      ))

    head_sha = self.check_output(['git', 'rev-parse', 'HEAD']).strip()

    safe_rmtree(self.outdir)
    published = []
    skip = (self.restart_at is not None)
    for target in self.exported_targets():
      pushdb, dbfile, repo = get_db(target)
      jar, semver, sha, fingerprint = pushdb.as_jar_with_version(target)

      published.append(jar)

      if skip and (jar.org, jar.name) == self.restart_at:
        skip = False

      newver = self.overrides.get((jar.org, jar.name)) or semver.bump()
      if self.snapshot:
        newver = newver.make_snapshot()

      if newver <= semver:
        raise TaskError('Requested version %s must be greater than the current version %s' % (
          newver.version(), semver.version()
        ))

      newfingerprint = self.fingerprint(target, fingerprint_internal)
      no_changes = newfingerprint == fingerprint

      if no_changes:
        changelog = 'No changes for %s - forced push.\n' % jar_coordinate(jar, semver.version())
      else:
        changelog = self.changelog(target, sha) or 'Direct dependencies changed.\n'

      if no_changes and not self.force:
        print('No changes for %s' % jar_coordinate(jar, semver.version()))
        stage_artifacts(target, jar, (newver if self.force else semver).version(), changelog)
      elif skip:
        print('Skipping %s to resume at %s' % (
          jar_coordinate(jar, (newver if self.force else semver).version()),
          coordinate(self.restart_at[0], self.restart_at[1])
        ))
        stage_artifacts(target, jar, semver.version(), changelog)
      else:
        if not self.dryrun:
          # Confirm push looks good
          if no_changes:
            print(changelog)
          else:
            print('\nChanges for %s since %s @ %s:\n\n%s' % (
              coordinate(jar.org, jar.name), semver.version(), sha, changelog
            ))
          push = raw_input('Publish %s with revision %s ? [y|N] ' % (
            coordinate(jar.org, jar.name), newver.version()
          ))
          print('\n')
          if push.strip().lower() != 'y':
            raise TaskError('User aborted push')

        pushdb.set_version(target, newver, head_sha, newfingerprint)
        ivyxml = stage_artifacts(target, jar, newver.version(), changelog, confs=repo['confs'])
        if self.dryrun:
          print('Skipping publish of %s in test mode.' % jar_coordinate(jar, newver.version()))
        else:
          resolver = repo['resolver']
          path = repo.get('path')

          # Get authentication for the publish repo if needed
          jvmargs = []
          auth = repo['auth']
          if auth:
            with ParseContext.temp():
              credentials = pants(auth).resolve().next()
              jvmargs.append(credentials.username())
              jvmargs.append(credentials.password())

          # Do the publish
          ivysettings = self.generate_ivysettings(published, publish_local=path)
          args = [
            '-settings', ivysettings,
            '-ivy', ivyxml,
            '-deliverto', '%s/[organisation]/[module]/ivy-[revision].xml' % self.outdir,
            '-publish', resolver,
            '-publishpattern',
              '%s/[organisation]/[module]/[artifact]-[revision](-[classifier]).[ext]' % self.outdir,
            '-revision', newver.version(),
            '-m2compatible',
          ]
          if self.snapshot:
            args.append('-overwrite')

          result = binary_utils.runjava(jvmargs=jvmargs, classpath=self.ivycp, args=args)
          if result != 0:
            raise TaskError('Failed to push %s - ivy failed with %d' % (
              jar_coordinate(jar, newver.version()), result)
            )

          if self.commit:
            pushdb.dump(dbfile)
            self.commit_push(jar.org, jar.name, newver.version(), head_sha)
Example #42
0
 def union(cls, targets, name=None):
     name = name or (cls.synthetic_name(targets) + '-union')
     with ParseContext.temp():
         return cls(name, dependencies=targets)
Example #43
0
 def test_python_binary_with_source_no_entry_point(self):
   with ParseContext.temp('src'):
     assert PythonBinary(name = 'binary1', source = 'blork.py').entry_point == 'blork'
     assert PythonBinary(name = 'binary2', source = 'bin/blork.py').entry_point == 'bin.blork'
Example #44
0
 def union(cls, targets, name=None):
   name = name or (cls.synthetic_name(targets) + '-union')
   with ParseContext.temp():
     return cls(name, dependencies=targets)
Example #45
0
 def test_validation(self):
   with ParseContext.temp('JarLibraryTest/test_validation'):
     target = Target(name='mybird')
     JarLibrary(name="test", dependencies=target)
     self.assertRaises(TargetDefinitionException, JarLibrary,
                       name="test1", dependencies=None)
 def test_empty_dependencies(self):
   with ParseContext.temp():
     JarLibrary("test-jar-library-with-empty-dependencies", [])
Example #47
0
 def resolve(self, spec):
   """Returns an iterator over the target(s) the given address points to."""
   with ParseContext.temp():
     return Pants(spec).resolve()
 def test_no_dependencies(self):
   with pytest.raises(TargetDefinitionException):
     with ParseContext.temp():
       JarLibrary("test-jar-library-with-empty-dependencies", None)
Example #49
0
 def of(cls, target):
   with ParseContext.temp():
     return cls(target.name, dependencies=[target])
Example #50
0
 def test_validation(self):
   with ParseContext.temp('TargetTest/test_validation'):
     self.assertRaises(TargetDefinitionException, Target, name=None)
     name = "test"
     self.assertEquals(Target(name=name).name, name)
Example #51
0
File: py.py Project: alfss/commons
  def __init__(self, run_tracker, root_dir, parser, argv):
    Command.__init__(self, run_tracker, root_dir, parser, argv)

    self.target = None
    self.extra_targets = []
    self.config = Config.load()
    self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug)
    self.interpreter_cache.setup()
    interpreters = self.interpreter_cache.select_interpreter(
        list(self.interpreter_cache.matches([self.options.interpreter]
            if self.options.interpreter else [''])))
    if len(interpreters) != 1:
      self.error('Unable to detect suitable interpreter.')
    self.interpreter = interpreters[0]

    for req in self.options.extra_requirements:
      with ParseContext.temp():
        self.extra_targets.append(PythonRequirement(req, use_2to3=True))

    # We parse each arg in the context of the cli usage:
    #   ./pants command (options) [spec] (build args)
    #   ./pants command (options) [spec]... -- (build args)
    # Our command token and our options are parsed out so we see args of the form:
    #   [spec] (build args)
    #   [spec]... -- (build args)
    binaries = []
    for k in range(len(self.args)):
      arg = self.args.pop(0)
      if arg == '--':
        break

      def not_a_target(debug_msg):
        self.debug('Not a target, assuming option: %s.' % e)
        # We failed to parse the arg as a target or else it was in valid address format but did not
        # correspond to a real target.  Assume this is the 1st of the build args and terminate
        # processing args for target addresses.
        self.args.insert(0, arg)

      target = None
      try:
        address = Address.parse(root_dir, arg)
        target = Target.get(address)
        if target is None:
          not_a_target(debug_msg='Unrecognized target')
          break
      except Exception as e:
        not_a_target(debug_msg=e)
        break

      for resolved in filter(lambda t: t.is_concrete, target.resolve()):
        if isinstance(resolved, PythonBinary):
          binaries.append(resolved)
        else:
          self.extra_targets.append(resolved)

    if len(binaries) == 0:
      # treat as a chroot
      pass
    elif len(binaries) == 1:
      # We found a binary and are done, the rest of the args get passed to it
      self.target = binaries[0]
    else:
      self.error('Can only process 1 binary target, %s contains %d:\n\t%s' % (
        arg, len(binaries), '\n\t'.join(str(binary.address) for binary in binaries)
      ))

    if self.target is None:
      if not self.extra_targets:
        self.error('No valid target specified!')
      self.target = self.extra_targets.pop(0)
Example #52
0
    def execute(self, targets):
        self.check_clean_master()

        pushdbs = {}

        def get_db(target):
            if target.provides is None:
                raise TaskError(
                    'trying to publish target %r which does not provide an artifact'
                    % target)
            dbfile = target.provides.repo.push_db
            result = pushdbs.get(dbfile)
            if not result:
                db = PushDb.load(dbfile)
                repo = self.repos[target.provides.repo.name]
                result = (db, dbfile, repo)
                pushdbs[dbfile] = result
            return result

        def fingerprint_internal(target):
            if not is_internal(target):
                raise ValueError(
                    'Expected an internal target for fingerprinting, got %s' %
                    target)
            pushdb, _, _ = get_db(target)
            _, _, _, fingerprint = pushdb.as_jar_with_version(target)
            return fingerprint or '0.0.0'

        def stage_artifacts(target, jar, version, changelog, confs=None):
            def artifact_path(name=None, suffix='', extension='jar'):
                return os.path.join(
                    self.outdir, jar.org, jar.name, '%s-%s%s.%s' %
                    ((name or jar.name), version, suffix, extension))

            with safe_open(artifact_path(suffix='-CHANGELOG', extension='txt'),
                           'w') as changelog_file:
                changelog_file.write(changelog)

            def get_pushdb(target):
                return get_db(target)[0]

            PomWriter(get_pushdb).write(target, artifact_path(extension='pom'))

            ivyxml = artifact_path(name='ivy', extension='xml')
            IvyWriter(get_pushdb).write(target, ivyxml, confs)

            def copy(typename, suffix=''):
                genmap = self.context.products.get(typename)
                for basedir, jars in genmap.get(target).items():
                    for artifact in jars:
                        shutil.copy(os.path.join(basedir, artifact),
                                    artifact_path(suffix=suffix))

            copy('jars')
            if is_java(target):
                copy('javadoc_jars', '-javadoc')
            copy('source_jars', '-sources')

            return ivyxml

        if self.overrides:
            print('Publishing with revision overrides:\n  %s' %
                  '\n  '.join('%s=%s' % (coordinate(org, name), rev)
                              for (org, name), rev in self.overrides.items()))

        head_sha = self.check_output(['git', 'rev-parse', 'HEAD']).strip()

        safe_rmtree(self.outdir)
        published = []
        skip = (self.restart_at is not None)
        for target in self.exported_targets():
            pushdb, dbfile, repo = get_db(target)
            jar, semver, sha, fingerprint = pushdb.as_jar_with_version(target)

            published.append(jar)

            if skip and (jar.org, jar.name) == self.restart_at:
                skip = False

            newver = self.overrides.get((jar.org, jar.name)) or semver.bump()
            if self.snapshot:
                newver = newver.make_snapshot()

            if newver <= semver:
                raise TaskError(
                    'Requested version %s must be greater than the current version %s'
                    % (newver.version(), semver.version()))

            newfingerprint = self.fingerprint(target, fingerprint_internal)
            no_changes = newfingerprint == fingerprint

            if no_changes:
                changelog = 'No changes for %s - forced push.\n' % jar_coordinate(
                    jar, semver.version())
            else:
                changelog = self.changelog(
                    target, sha) or 'Direct dependencies changed.\n'

            if no_changes and not self.force:
                print('No changes for %s' %
                      jar_coordinate(jar, semver.version()))
                stage_artifacts(target, jar,
                                (newver if self.force else semver).version(),
                                changelog)
            elif skip:
                print('Skipping %s to resume at %s' %
                      (jar_coordinate(
                          jar, (newver if self.force else semver).version()),
                       coordinate(self.restart_at[0], self.restart_at[1])))
                stage_artifacts(target, jar, semver.version(), changelog)
            else:
                if not self.dryrun:
                    # Confirm push looks good
                    if no_changes:
                        print(changelog)
                    else:
                        print('\nChanges for %s since %s @ %s:\n\n%s' %
                              (coordinate(jar.org, jar.name), semver.version(),
                               sha, changelog))
                    push = raw_input(
                        'Publish %s with revision %s ? [y|N] ' %
                        (coordinate(jar.org, jar.name), newver.version()))
                    print('\n')
                    if push.strip().lower() != 'y':
                        raise TaskError('User aborted push')

                pushdb.set_version(target, newver, head_sha, newfingerprint)
                ivyxml = stage_artifacts(target,
                                         jar,
                                         newver.version(),
                                         changelog,
                                         confs=repo['confs'])
                if self.dryrun:
                    print('Skipping publish of %s in test mode.' %
                          jar_coordinate(jar, newver.version()))
                else:
                    resolver = repo['resolver']
                    path = repo.get('path')

                    # Get authentication for the publish repo if needed
                    jvmargs = []
                    auth = repo['auth']
                    if auth:
                        with ParseContext.temp():
                            credentials = pants(auth).resolve().next()
                            jvmargs.append(credentials.username())
                            jvmargs.append(credentials.password())

                    # Do the publish
                    ivysettings = self.generate_ivysettings(published,
                                                            publish_local=path)
                    args = [
                        '-settings',
                        ivysettings,
                        '-ivy',
                        ivyxml,
                        '-deliverto',
                        '%s/[organisation]/[module]/ivy-[revision].xml' %
                        self.outdir,
                        '-publish',
                        resolver,
                        '-publishpattern',
                        '%s/[organisation]/[module]/[artifact]-[revision](-[classifier]).[ext]'
                        % self.outdir,
                        '-revision',
                        newver.version(),
                        '-m2compatible',
                    ]
                    if self.snapshot:
                        args.append('-overwrite')

                    result = binary_utils.runjava(jvmargs=jvmargs,
                                                  classpath=self.ivycp,
                                                  args=args)
                    if result != 0:
                        raise TaskError(
                            'Failed to push %s - ivy failed with %d' %
                            (jar_coordinate(jar, newver.version()), result))

                    if self.commit:
                        pushdb.dump(dbfile)
                        self.commit_push(jar.org, jar.name, newver.version(),
                                         head_sha)
Example #53
0
 def of(cls, target):
     with ParseContext.temp():
         return cls(target.name, dependencies=[target])
Example #54
0
 def test_legacy(self):
     self.create_dir('main')
     with ParseContext.temp('main'):
         target = WithLegacyResources('test', resources=['a/b.js'])
         self.assertEquals([self.resources], target.resources)
         self.assertEquals([self.resource_path], self.resources.sources)