Beispiel #1
0
    def _coerce_to_targets(cls, from_str, to_str):
        if isinstance(from_str, Compatibility.string):
            if not isinstance(to_str, Compatibility.string):
                raise TaskError(
                    'Finding paths from string %s to non-string %s' %
                    (from_str, str(to_str)))

            from_address = Address.parse(get_buildroot(), from_str)
            to_address = Address.parse(get_buildroot(), to_str)

            from_target = Target.get(from_address)
            to_target = Target.get(to_address)

            if not from_target:
                raise TaskError('Target %s doesn\'t exist' %
                                from_address.reference())
            if not to_target:
                raise TaskError('Target %s doesn\'t exist' %
                                to_address.reference())

            return from_target, to_target

        elif isinstance(to_str, Compatibility.string):
            raise TaskError('Finding paths from string %s to non-string %s' %
                            (to_str, str(from_str)))
        return from_str, to_str
Beispiel #2
0
 def parse_jarcoordinate(coordinate):
     components = coordinate.split('#', 1)
     if len(components) == 2:
         org, name = components
         return org, name
     else:
         try:
             address = Address.parse(get_buildroot(), coordinate)
             try:
                 target = Target.get(address)
                 if not target:
                     siblings = Target.get_all_addresses(
                         address.buildfile)
                     prompt = 'did you mean' if len(
                         siblings
                     ) == 1 else 'maybe you meant one of these'
                     raise TaskError('%s => %s?:\n    %s' %
                                     (address, prompt, '\n    '.join(
                                         str(a) for a in siblings)))
                 if not is_exported(target):
                     raise TaskError('%s is not an exported target' %
                                     coordinate)
                 return target.provides.org, target.provides.name
             except (ImportError, SyntaxError, TypeError):
                 raise TaskError('Failed to parse %s' %
                                 address.buildfile.relpath)
         except IOError:
             raise TaskError('No BUILD file could be found at %s' %
                             coordinate)
Beispiel #3
0
  def find_plugins(self, plugin_names):
    """Returns a map from plugin name to plugin jar."""
    plugin_names = set(plugin_names)
    plugins = {}
    buildroot = get_buildroot()
    # plugin_jars is the universe of all possible plugins and their transitive deps.
    # Here we select the ones to actually use.
    for jar in self.plugin_jars():
      with open_jar(jar, 'r') as jarfile:
        try:
          with closing(jarfile.open(_PLUGIN_INFO_FILE, 'r')) as plugin_info_file:
            plugin_info = ElementTree.parse(plugin_info_file).getroot()
          if plugin_info.tag != 'plugin':
            raise TaskError(
              'File %s in %s is not a valid scalac plugin descriptor' % (_PLUGIN_INFO_FILE, jar))
          name = plugin_info.find('name').text
          if name in plugin_names:
            if name in plugins:
              raise TaskError('Plugin %s defined in %s and in %s' % (name, plugins[name], jar))
            # It's important to use relative paths, as the compiler flags get embedded in the zinc
            # analysis file, and we port those between systems via the artifact cache.
            plugins[name] = os.path.relpath(jar, buildroot)
        except KeyError:
          pass

    unresolved_plugins = plugin_names - set(plugins.keys())
    if unresolved_plugins:
      raise TaskError('Could not find requested plugins: %s' % list(unresolved_plugins))
    return plugins
Beispiel #4
0
    def genlang(self, lang, targets):
        if lang != 'java':
            raise TaskError('Unrecognized antlr gen lang: %s' % lang)

        # TODO: Instead of running the compiler for each target, collect the targets
        # by type and invoke it twice, once for antlr3 and once for antlr4.

        for target in targets:
            java_out = self._java_out(target)
            safe_mkdir(java_out)

            antlr_profile = self._antlr_profile(target)
            antlr_classpath = self.profile_classpath(antlr_profile)
            antlr_opts = ["-o", java_out]

            java_main = None

            if target.compiler == 'antlr3':
                java_main = 'org.antlr.Tool'
            elif target.compiler == 'antlr4':
                antlr_opts.append(
                    "-visitor")  # Generate Parse Tree Vistor As Well
                java_main = 'org.antlr.v4.Tool'
            else:
                raise TaskError("Unknown ANTLR compiler: {}".format(
                    target.compiler))

            sources = self._calculate_sources([target])
            result = self.runjava_indivisible(java_main,
                                              classpath=antlr_classpath,
                                              opts=antlr_opts,
                                              args=sources,
                                              workunit_name='antlr')
            if result != 0:
                raise TaskError
Beispiel #5
0
def setup_virtualenv_py(context):
  virtualenv_cache = context.config.get('python-setup', 'bootstrap_cache')
  virtualenv_target = context.config.get('python-setup', 'virtualenv_target')
  if not os.path.exists(virtualenv_cache):
    safe_mkdir(virtualenv_cache)
  if os.path.exists(os.path.join(virtualenv_target, 'virtualenv.py')):
    return True
  else:
    safe_mkdir(virtualenv_target)

  virtualenv_urls = context.config.getlist('python-setup', 'virtualenv_urls')
  tf = None
  for url in virtualenv_urls:
    try:
      ve_tgz = urlopen(url, timeout=5)
      ve_tgz_fp = StringIO(ve_tgz.read())
      ve_tgz_fp.seek(0)
      tf = tarfile.open(fileobj=ve_tgz_fp, mode='r:gz')
      break
    except Exception as e:
      context.log.warn('Failed to pull virtualenv from %s' % url)
      continue
  if not tf:
    raise TaskError('Could not download virtualenv!')
  try:
    tf.extractall(path=virtualenv_cache)
  except Exception as e:
    raise TaskError('Could not install virtualenv: %s' % e)
  context.log.info('Extracted %s' % url)
Beispiel #6
0
  def map_internal_jars(self, targets):
    internal_jar_dir = os.path.join(self.work_dir, 'internal-libs')
    safe_mkdir(internal_jar_dir, clean=True)

    internal_source_jar_dir = os.path.join(self.work_dir, 'internal-libsources')
    safe_mkdir(internal_source_jar_dir, clean=True)

    internal_jars = self.context.products.get('jars')
    internal_source_jars = self.context.products.get('source_jars')
    for target in targets:
      mappings = internal_jars.get(target)
      if mappings:
        for base, jars in mappings.items():
          if len(jars) != 1:
            raise TaskError('Unexpected mapping, multiple jars for %s: %s' % (target, jars))

          jar = jars[0]
          cp_jar = os.path.join(internal_jar_dir, jar)
          shutil.copy(os.path.join(base, jar), cp_jar)

          cp_source_jar = None
          mappings = internal_source_jars.get(target)
          if mappings:
            for base, jars in mappings.items():
              if len(jars) != 1:
                raise TaskError(
                  'Unexpected mapping, multiple source jars for %s: %s' % (target, jars)
                )
              jar = jars[0]
              cp_source_jar = os.path.join(internal_source_jar_dir, jar)
              shutil.copy(os.path.join(base, jar), cp_source_jar)

          self._project.internal_jars.add(ClasspathEntry(cp_jar, cp_source_jar))
  def execute(self, targets):
    pages = []
    for target in targets:
      if isinstance(target, Page):
        wikiconfig = target.wiki_config(self.wiki())
        if wikiconfig:
          pages.append((target, wikiconfig))

    urls = list()

    genmap = self.context.products.get('wiki_html')
    for page, wikiconfig in pages:
      html_info = genmap.get((self.wiki(), page))
      if len(html_info) > 1:
        raise TaskError('Unexpected resources for %s: %s' % (page, html_info))
      basedir, htmls = html_info.items()[0]
      if len(htmls) != 1:
        raise TaskError('Unexpected resources for %s: %s' % (page, htmls))
      with safe_open(os.path.join(basedir, htmls[0])) as contents:
        url = self.publish_page(
          page.address,
          wikiconfig['space'],
          wikiconfig['title'],
          contents.read(),
          parent=wikiconfig.get('parent')
        )
        if url:
          urls.append(url)
          self.context.log.info('Published %s to %s' % (page, url))

    if self.open and urls:
      binary_util.ui_open(*urls)
Beispiel #8
0
    def compiled_idl(cls,
                     idl_dep,
                     generated_deps=None,
                     compiler=None,
                     language=None,
                     namespace_map=None):
        """Marks a jar as containing IDL files that should be fetched and processed locally.

    idl_dep:        A dependency resolvable to a single jar library.
    generated_deps: Dependencies for the code that will be generated from "idl_dep"
    compiler:       The thrift compiler to apply to the fetched thrift IDL files.
    language:       The language to generate code for - supported by some compilers
    namespace_map:  A mapping from IDL declared namespaces to custom namespaces - supported by some
                    compilers.
    """
        deps = list(filter(is_concrete, idl_dep.resolve()))
        if not len(deps) == 1:
            raise TaskError(
                'Can only arrange for compiled idl for a single dependency at a time, '
                'given:\n\t%s' % '\n\t'.join(map(str, deps)))
        jar = deps.pop()
        if not isinstance(jar, JarDependency):
            raise TaskError(
                'Can only arrange for compiled idl from a jar dependency, given: %s'
                % jar)

        request = (jar, compiler, language)
        namespace_signature = None
        if namespace_map:
            sha = hashlib.sha1()
            for ns_from, ns_to in sorted(namespace_map.items()):
                sha.update(ns_from)
                sha.update(ns_to)
            namespace_signature = sha.hexdigest()
        request += (namespace_signature, )

        if request not in cls._PLACEHOLDER_BY_REQUEST:
            if not cls._EXTRACT_BASE:
                config = Config.load()
                cls._EXTRACT_BASE = config.get('idl-extract', 'workdir')
                safe_mkdir(cls._EXTRACT_BASE)
                SourceRoot.register(cls._EXTRACT_BASE, JavaThriftLibrary)

            with ParseContext.temp(cls._EXTRACT_BASE):
                # TODO(John Sirois): abstract ivy specific configurations notion away
                jar._configurations.append('idl')
                jar.with_artifact(configuration='idl', classifier='idl')
                target_name = '-'.join(
                    filter(None,
                           (jar.id, compiler, language, namespace_signature)))
                placeholder = JavaThriftLibrary(target_name,
                                                sources=None,
                                                dependencies=[jar] +
                                                (generated_deps or []),
                                                compiler=compiler,
                                                language=language,
                                                namespace_map=namespace_map)
                cls._PLACEHOLDER_BY_REQUEST[request] = placeholder
                cls._PLACEHOLDERS_BY_JAR[jar].append(placeholder)
        return cls._PLACEHOLDER_BY_REQUEST[request]
Beispiel #9
0
    def genlang(self, lang, targets):
        if lang != 'java':
            raise TaskError('Unrecognized antlr gen lang: %s' % lang)

        # TODO: Instead of running the compiler for each target, collect the targets
        # by type and invoke it twice, once for antlr3 and once for antlr4.

        for target in targets:
            java_out = self._java_out(target)
            safe_mkdir(java_out)

            antlr_classpath = self._classpath_by_compiler[target.compiler]
            args = ["-o", java_out]

            if target.compiler == 'antlr3':
                java_main = 'org.antlr.Tool'
            elif target.compiler == 'antlr4':
                args.append("-visitor")  # Generate Parse Tree Vistor As Well
                java_main = 'org.antlr.v4.Tool'
            else:
                raise TaskError("Unknown ANTLR compiler: {}".format(
                    target.compiler))

            sources = self._calculate_sources([target])
            args.extend(sources)
            result = self.runjava(classpath=antlr_classpath,
                                  main=java_main,
                                  args=args,
                                  workunit_name='antlr')
            if result != 0:
                raise TaskError('java %s ... exited non-zero (%i)' %
                                (java_main, result))
Beispiel #10
0
 def _resolve_conflict(self, existing, proposed):
     if proposed == existing:
         return existing
     elif existing.force and proposed.force:
         raise TaskError(
             'Cannot force %s#%s to both rev %s and %s' %
             (proposed.org, proposed.name, existing.rev, proposed.rev))
     elif existing.force:
         self.context.log.debug(
             'Ignoring rev %s for %s#%s already forced to %s' %
             (proposed.rev, proposed.org, proposed.name, existing.rev))
         return existing
     elif proposed.force:
         self.context.log.debug(
             'Forcing %s#%s from %s to %s' %
             (proposed.org, proposed.name, existing.rev, proposed.rev))
         return proposed
     else:
         try:
             if Revision.lenient(proposed.rev) > Revision.lenient(
                     existing.rev):
                 self.context.log.debug(
                     'Upgrading %s#%s from rev %s  to %s' % (
                         proposed.org,
                         proposed.name,
                         existing.rev,
                         proposed.rev,
                     ))
                 return proposed
             else:
                 return existing
         except Revision.BadRevision as e:
             raise TaskError('Failed to parse jar revision', e)
Beispiel #11
0
 def analysis_cache_full_path(analysis_cache_product):
   # We expect the argument to be { analysis_cache_dir, [ analysis_cache_file ]}.
   if len(analysis_cache_product) != 1:
     raise TaskError('There can only be one analysis cache file per output directory')
   analysis_cache_dir, analysis_cache_files = analysis_cache_product.iteritems().next()
   if len(analysis_cache_files) != 1:
     raise TaskError('There can only be one analysis cache file per output directory')
   return os.path.join(analysis_cache_dir, analysis_cache_files[0])
Beispiel #12
0
    def genlang(self, lang, targets):
        bases, sources = calculate_compile_roots(targets, self.is_gentarget)

        if lang == 'java':
            gen = self.gen_java.gen
        elif lang == 'python':
            gen = self.gen_python.gen
        else:
            raise TaskError('Unrecognized thrift gen lang: %s' % lang)

        args = [
            self.thrift_binary,
            '--gen',
            gen,
            '-recurse',
        ]

        if self.strict:
            args.append('-strict')
        if self.verbose:
            args.append('-verbose')
        for base in bases:
            args.extend(('-I', base))

        sessions = []
        for source in sources:
            self.context.log.info('Generating thrift for %s\n' % source)
            # Create a unique session dir for this thrift root.  Sources may be full paths but we only
            # need the path relative to the build root to ensure uniqueness.
            # TODO(John Sirois): file paths should be normalized early on and uniformly, fix the need to
            # relpath here at all.
            relsource = os.path.relpath(source, get_buildroot())
            outdir = os.path.join(self.session_dir,
                                  '.'.join(relsource.split(os.path.sep)))
            safe_mkdir(outdir)

            cmd = args[:]
            cmd.extend(('-o', outdir))
            cmd.append(source)
            log.debug('Executing: %s' % ' '.join(cmd))
            sessions.append(
                self.ThriftSession(outdir, cmd, subprocess.Popen(cmd)))

        result = 0
        for session in sessions:
            if result != 0:
                session.process.kill()
            else:
                result = session.process.wait()
                if result != 0:
                    self.context.log.error('Failed: %s' %
                                           ' '.join(session.cmd))
                else:
                    _copytree(session.outdir, self.combined_dir)
        if result != 0:
            raise TaskError('%s ... exited non-zero (%i)' %
                            (self.thrift_binary, result))
Beispiel #13
0
def _get_target(address):
    try:
        address = Address.parse(get_buildroot(), address, is_relative=False)
    except IOError as e:
        raise TaskError('Failed to parse address: %s: %s' % (address, e))
    match = Target.get(address)
    if not match:
        raise TaskError('Invalid target address: %s' % address)
    return match
Beispiel #14
0
 def parse_override(override):
     try:
         coordinate, rev = override.split('=', 1)
         try:
             rev = Semver.parse(rev)
         except ValueError as e:
             raise TaskError('Invalid version %s: %s' % (rev, e))
         return parse_jarcoordinate(coordinate), rev
     except ValueError:
         raise TaskError('Invalid override: %s' % override)
Beispiel #15
0
    def check(self, srcs, actual_deps):
        """Check for missing deps.

    See docstring for _compute_missing_deps for details.
    """
        if self._check_missing_deps or self._check_missing_direct_deps or self._check_unnecessary_deps:
            missing_file_deps, missing_tgt_deps, missing_direct_tgt_deps = \
              self._compute_missing_deps(srcs, actual_deps)

            buildroot = get_buildroot()

            def shorten(path):  # Make the output easier to read.
                for prefix in [buildroot, self._context.ivy_home]:
                    if path.startswith(prefix):
                        return os.path.relpath(path, prefix)
                return path

            if self._check_missing_deps and (missing_file_deps
                                             or missing_tgt_deps):
                for (tgt_pair, evidence) in missing_tgt_deps:
                    evidence_str = '\n'.join([
                        '    %s uses %s' % (shorten(e[0]), shorten(e[1]))
                        for e in evidence
                    ])
                    self._context.log.error(
                        'Missing BUILD dependency %s -> %s because:\n%s' %
                        (tgt_pair[0].address.reference(),
                         tgt_pair[1].address.reference(), evidence_str))
                for (src_tgt, dep) in missing_file_deps:
                    self._context.log.error(
                        'Missing BUILD dependency %s -> %s' %
                        (src_tgt.address.reference(), shorten(dep)))
                if self._check_missing_deps == 'fatal':
                    raise TaskError('Missing deps.')

            if self._check_missing_direct_deps:
                for (tgt_pair, evidence) in missing_direct_tgt_deps:
                    evidence_str = '\n'.join([
                        '    %s uses %s' % (shorten(e[0]), shorten(e[1]))
                        for e in evidence
                    ])
                    self._context.log.warn(
                        'Missing direct BUILD dependency %s -> %s because:\n%s'
                        % (tgt_pair[0].address, tgt_pair[1].address,
                           evidence_str))
                if self._check_missing_direct_deps == 'fatal':
                    raise TaskError('Missing direct deps.')

            if self._check_unnecessary_deps:
                raise TaskError(
                    'Unnecessary dep warnings not implemented yet.')
Beispiel #16
0
  def check_clean_master(self):
    if self.dryrun or not self.commit:
      print('Skipping check for a clean master in test mode.')
    else:
      if self.restrict_push_branches:
        branch = self.scm.branch_name
        if branch not in self.restrict_push_branches:
          raise TaskError('Can only push from %s, currently on branch: %s' % (
            ' '.join(sorted(self.restrict_push_branches)), branch
          ))

      changed_files = self.scm.changed_files()
      if changed_files:
        raise TaskError('Can only push from a clean branch, found : %s' % ' '.join(changed_files))
Beispiel #17
0
 def filter_for_type(name):
     try:
         # Try to do a fully qualified import 1st for filtering on custom types.
         from_list, module, type_name = name.rsplit('.', 2)
         module = __import__('%s.%s' % (from_list, module),
                             fromlist=[from_list])
         target_type = getattr(module, type_name)
     except (ImportError, ValueError):
         # Fall back on pants provided target types.
         if not hasattr(twitter.pants, name):
             raise TaskError('Invalid type name: %s' % name)
         target_type = getattr(twitter.pants, name)
     if not issubclass(target_type, Target):
         raise TaskError('Not a Target type: %s' % name)
     return lambda target: isinstance(target, target_type)
Beispiel #18
0
def calculate_basedir(filepath):
    with open(filepath, 'r') as source:
        for line in source:
            match = PACKAGE_PARSER.match(line)
            if match:
                package = match.group(1)
                packagedir = package.replace('.', '/')
                dirname = os.path.dirname(filepath)
                if not dirname.endswith(packagedir):
                    raise TaskError(
                        'File %s declares a mismatching package %s' %
                        (file, package))
                return dirname[:-len(packagedir)]

    raise TaskError('Could not calculate a base dir for: %s' % file)
Beispiel #19
0
                def run_tests(classpath, main, jvmargs=None):
                    def test_workunit_factory(name, labels=list(), cmd=''):
                        return self.context.new_workunit(
                            name=name,
                            labels=[WorkUnit.TEST] + labels,
                            cmd=cmd)

                    # TODO(John Sirois): Integrated batching with the test runner.  As things stand we get
                    # results summaries for example for each batch but no overall summary.
                    # http://jira.local.twitter.com/browse/AWESOME-1114
                    result = 0
                    for batch in self._partition(tests):
                        with binary_util.safe_args(batch) as batch_tests:
                            result += abs(
                                binary_util.runjava_indivisible(
                                    jvmargs=(jvmargs or []) + self.java_args,
                                    classpath=classpath,
                                    main=main,
                                    opts=self.opts,
                                    args=batch_tests,
                                    workunit_factory=test_workunit_factory,
                                    workunit_name='run'))
                            if result != 0 and self.fail_fast:
                                break
                    if result != 0:
                        raise TaskError()
Beispiel #20
0
    def compile(self, args, classpath, sources, classes_output_dir,
                analysis_file):
        jmake_classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(
            self._jmake_bootstrap_key)
        args = [
            '-classpath',
            ':'.join(classpath + [self._classes_dir]),
            '-d',
            self._classes_dir,
            '-pdb',
            analysis_file,
            '-pdb-text-format',
        ]

        compiler_classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(
            self._compiler_bootstrap_key)
        args.extend([
            '-jcpath',
            ':'.join(compiler_classpath),
            '-jcmainclass',
            'com.twitter.common.tools.Compiler',
        ])
        args.extend(map(lambda arg: '-C%s' % arg, self._javac_opts))

        args.extend(self._args)
        args.extend(sources)
        result = self.runjava(classpath=jmake_classpath,
                              main=JavaCompile._JMAKE_MAIN,
                              jvm_options=self._jvm_options,
                              args=args,
                              workunit_name='jmake',
                              workunit_labels=[WorkUnit.COMPILER])
        if result:
            default_message = 'Unexpected error - JMake returned %d' % result
            raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))
 def login(self):
   if not self._wiki:
     try:
       self._wiki = Confluence.login(self.url, self.user, self.api())
     except ConfluenceError as e:
       raise TaskError('Failed to login to confluence: %s' % e)
   return self._wiki
  def publish_page(self, address, space, title, content, parent=None):
    body = textwrap.dedent('''

    <!-- DO NOT EDIT - generated by pants from %s -->

    %s
    ''').strip() % (address, content)

    pageopts = dict(
      versionComment = 'updated by pants!'
    )
    wiki = self.login()
    existing = wiki.getpage(space, title)
    if existing:
      if not self.force and existing['content'].strip() == body.strip():
        self.context.log.warn("Skipping publish of '%s' - no changes" % title)
        return

      pageopts['id'] = existing['id']
      pageopts['version'] = existing['version']

    try:
      page = wiki.create_html_page(space, title, body, parent, **pageopts)
      return page['url']
    except ConfluenceError as e:
      raise TaskError('Failed to update confluence: %s' % e)
Beispiel #23
0
 def test_execute_code(self):
     engine = self.RecordingEngine(
         action=self._throw(TaskError(exit_code=42)))
     result = engine.execute(self.context,
                             self.as_phases('four', 'five', 'six'))
     self.assertEqual(42, result)
     self.assert_attempt(engine, 'four', 'five', 'six')
Beispiel #24
0
  def _generate_individual(self, classpath, targets, create_jvmdoc_command):
    jobs = {}
    for target in targets:
      gendir = self._gendir(target)
      command = create_jvmdoc_command(classpath, gendir, target)
      if command:
        jobs[gendir] = (target, command)

    if jobs:
      with contextlib.closing(
            multiprocessing.Pool(processes=min(len(jobs), multiprocessing.cpu_count()))) as pool:
        # map would be a preferable api here but fails after the 1st batch with an internal:
        # ...
        #  File "...src/python/twitter/pants/tasks/jar_create.py", line 170, in javadocjar
        #      pool.map(createjar, jobs)
        #    File "...lib/python2.6/multiprocessing/pool.py", line 148, in map
        #      return self.map_async(func, iterable, chunksize).get()
        #    File "...lib/python2.6/multiprocessing/pool.py", line 422, in get
        #      raise self._value
        #  NameError: global name 'self' is not defined
        futures = []
        for gendir, (target, command) in jobs.items():
          futures.append(pool.apply_async(create_jvmdoc, args=(command, gendir)))

        for future in futures:
          result, gendir = future.get()
          target, command = jobs[gendir]
          if result != 0:
            message = 'Failed to process %s for %s [%d]: %s' % (
                      self._jvmdoc.tool_name, target, result, command)
            if self.ignore_failure:
              self.context.log.warn(message)
            else:
              raise TaskError(message)
Beispiel #25
0
  def execute_single_compilation(self, vt, cp):
    depfile = self.create_depfile_path(vt.targets)

    self.merge_depfile(vt)  # Get what we can from previous builds.
    sources_by_target, fingerprint = self.calculate_sources(vt.targets)
    if sources_by_target:
      sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
      if not sources:
        self.context.log.warn('Skipping java compile for targets with no sources:\n  %s' %
                              '\n  '.join(str(t) for t in sources_by_target.keys()))
      else:
        classpath = [jar for conf, jar in cp if conf in self._confs]
        result = self.compile(classpath, sources, fingerprint, depfile)
        if result != 0:
          default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result)
          raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))
        self.split_depfile(vt)

      all_artifact_files = [depfile]

      if self._artifact_cache and self.context.options.write_to_artifact_cache:
        deps = Dependencies(self._classes_dir)
        deps.load(depfile)
        vts_artifactfile_pairs = []
        for single_vt in vt.versioned_targets:
          per_target_depfile = self.create_depfile_path([single_vt.target])
          per_target_artifact_files = [per_target_depfile]
          for _, classes_by_source in deps.findclasses([single_vt.target]).items():
            for _, classes in classes_by_source.items():
              classfile_paths = [os.path.join(self._classes_dir, cls) for cls in classes]
              per_target_artifact_files.extend(classfile_paths)
              all_artifact_files.extend(classfile_paths)
            vts_artifactfile_pairs.append((single_vt, per_target_artifact_files))
        vts_artifactfile_pairs.append((vt, all_artifact_files))
        self.update_artifact_cache(vts_artifactfile_pairs)
Beispiel #26
0
 def execute(self, _):
     ifilters = self.context.options.python_interpreter
     self._cache.setup(force=self.context.options.python_setup_force,
                       paths=self.context.options.python_setup_paths,
                       filters=ifilters or [''])
     all_interpreters = set(self._cache.interpreters)
     for target in self.context.targets(is_python_root):
         self.context.log.info('Setting up interpreters for %s' % target)
         closure = target.closure()
         self.context.log.debug('  - Target closure: %d targets' %
                                len(closure))
         target_compatibilities = [
             set(
                 self._cache.matches(
                     getattr(closure_target, 'compatibility', [''])))
             for closure_target in closure
         ]
         target_compatibilities = reduce(set.intersection,
                                         target_compatibilities,
                                         all_interpreters)
         self.context.log.debug(
             '  - Target minimum compatibility: %s' %
             (' '.join(interp.version_string
                       for interp in target_compatibilities)))
         interpreters = self._cache.select_interpreter(
             target_compatibilities,
             allow_multiple=self.context.options.python_multi)
         self.context.log.debug('  - Selected: %s' % interpreters)
         if not interpreters:
             raise TaskError('No compatible interpreters for %s' % target)
         target.interpreters = interpreters
Beispiel #27
0
 def createtarget(self, lang, gentarget, dependees):
     if lang == 'java':
         return self._create_java_target(gentarget, dependees)
     elif lang == 'python':
         return self._create_python_target(gentarget, dependees)
     else:
         raise TaskError('Unrecognized thrift gen lang: %s' % lang)
Beispiel #28
0
    def execute(self, targets):
        # For rewriting JDK classes to work, the JAR file has to be listed specifically in
        # the JAR manifest as something that goes in the bootclasspath.
        # The MANIFEST list a jar 'allocation.jar' this is why we have to rename it
        agent_tools_classpath = self._bootstrap_utils.get_jvm_build_tools_classpath(
            self._agent_bootstrap_key)
        agent_jar = agent_tools_classpath[0]
        allocation_jar = os.path.join(os.path.dirname(agent_jar),
                                      "allocation.jar")

        # TODO(Steve Gury): Find a solution to avoid copying the jar every run and being resilient
        # to version upgrade
        shutil.copyfile(agent_jar, allocation_jar)
        os.environ['ALLOCATION_JAR'] = str(allocation_jar)

        benchmark_tools_classpath = self._bootstrap_utils.get_jvm_build_tools_classpath(
            self._benchmark_bootstrap_key)

        exit_code = runjava_indivisible(
            jvmargs=self.java_args,
            classpath=self.classpath(benchmark_tools_classpath),
            main='com.google.caliper.Runner',
            opts=self.caliper_args,
            workunit_name='caliper')
        if exit_code != 0:
            raise TaskError()
Beispiel #29
0
        def parse_override(override):
            match = re.match(r'^([^#]+)#([^=]+)=([^\s]+)$', override)
            if not match:
                raise TaskError('Invalid dependency override: %s' % override)

            org, name, rev_or_url = match.groups()

            def fmt_message(message, template):
                return message % dict(
                    overridden='%s#%s;%s' %
                    (template.org, template.module, template.version),
                    rev=rev_or_url,
                    url=rev_or_url)

            def replace_rev(template):
                context.log.info(
                    fmt_message('Overrode %(overridden)s with rev %(rev)s',
                                template))
                return template.extend(version=rev_or_url,
                                       url=None,
                                       force=True)

            def replace_url(template):
                context.log.info(
                    fmt_message(
                        'Overrode %(overridden)s with snapshot at %(url)s',
                        template))
                return template.extend(version='SNAPSHOT',
                                       url=rev_or_url,
                                       force=True)

            replace = replace_url if re.match(r'^\w+://.+',
                                              rev_or_url) else replace_rev
            return (org, name), replace
Beispiel #30
0
def entry_for_one_class(nom, klas):
    """  Generate a BUILD dictionary entry for a class.
  nom: name like 'python_binary'
  klas: class like twitter.pants.python_binary"""
    try:
        args, varargs, varkw, defaults = inspect.getargspec(klas.__init__)
        argspec = inspect.formatargspec(args[1:], varargs, varkw, defaults)
        funcdoc = klas.__init__.__doc__

        methods = []
        for attrname in dir(klas):
            attr = getattr(klas, attrname)
            attr_bdi = twitter.pants.base.get_builddict_info(attr)
            if not attr_bdi: continue
            if inspect.ismethod(attr):
                methods.append(entry_for_one_method(attrname, attr))
                continue
            raise TaskError(
                '@manual.builddict on non-method %s within class %s '
                'but I only know what to do with methods' % (attrname, nom))

    except TypeError:  # __init__ might not be a Python function
        argspec = None
        funcdoc = None
        methods = None

    return entry(nom,
                 classdoc=klas.__doc__,
                 argspec=argspec,
                 funcdoc=funcdoc,
                 methods=methods)