示例#1
0
    def __init__(self,
                 target,
                 root_dir,
                 run_tracker,
                 interpreter=None,
                 conn_timeout=None):
        self.target = target
        self.interpreter = interpreter or PythonInterpreter.get()
        if not isinstance(target, PythonBinary):
            raise PythonBinaryBuilder.NotABinaryTargetException(
                "Target %s is not a PythonBinary!" % target)

        config = Config.load()
        self.distdir = config.getdefault('pants_distdir')
        distpath = tempfile.mktemp(dir=self.distdir, prefix=target.name)

        run_info = run_tracker.run_info
        build_properties = {}
        build_properties.update(
            run_info.add_basic_info(run_id=None, timestamp=time.time()))
        build_properties.update(run_info.add_scm_info())

        pexinfo = target.pexinfo.copy()
        pexinfo.build_properties = build_properties
        builder = PEXBuilder(distpath,
                             pex_info=pexinfo,
                             interpreter=self.interpreter)

        self.chroot = PythonChroot(target,
                                   root_dir,
                                   builder=builder,
                                   interpreter=self.interpreter,
                                   conn_timeout=conn_timeout)
示例#2
0
文件: py.py 项目: xianxu/pants
 def setup_parser(self, parser, args):
     parser.set_usage("\n" "  %prog py (options) [spec] args\n")
     parser.disable_interspersed_args()
     parser.add_option(
         "-t",
         "--timeout",
         dest="conn_timeout",
         type="int",
         default=Config.load().getdefault('connection_timeout'),
         help="Number of seconds to wait for http connections.")
     parser.add_option("--pex",
                       dest="pex",
                       default=False,
                       action='store_true',
                       help="dump a .pex of this chroot")
     parser.add_option("--resolve",
                       dest="resolve",
                       default=False,
                       action='store_true',
                       help="resolve targets instead of building.")
     parser.add_option("-v",
                       "--verbose",
                       dest="verbose",
                       default=False,
                       action='store_true',
                       help="show verbose output.")
     parser.epilog = """Interact with the chroot of the specified target."""
示例#3
0
    def __init__(self, target, root_dir, extra_targets=None):
        self._config = Config.load()
        self._target = target
        self._root = root_dir
        self._cache = BuildCache(
            os.path.join(self._config.get("python-setup", "artifact_cache"), "%s" % PythonIdentity.get())
        )
        self._extra_targets = list(extra_targets) if extra_targets is not None else []
        self._extra_targets.append(self._get_common_python())

        cachedir = self._config.get("python-setup", "cache")
        safe_mkdir(cachedir)
        self._eggcache = cachedir

        local_repo = "file://%s" % os.path.realpath(cachedir)
        self._repos = [local_repo] + self._config.getlist("python-setup", "repos")
        self._fetcher = ReqFetcher(repos=self._repos, cache=cachedir)
        self._index = None
        for index in self._config.getlist("python-setup", "indices"):
            if PythonChroot.can_contact_index(index):
                self._index = index
                break
        self._additional_reqs = set()

        distdir = self._config.getdefault("pants_distdir")
        distpath = tempfile.mktemp(dir=distdir, prefix=target.name)
        self.env = PythonEnvironment(distpath)
示例#4
0
  def setUp(self):
    with temporary_file() as ini:
      ini.write(
'''
[DEFAULT]
answer: 42
scale: 1.2
path: /a/b/%(answer)s
embed: %(path)s::foo
disclaimer:
  Let it be known
  that.

[a]
fast: True
list: [1, 2, 3, %(answer)s]

[b]
preempt: False
dict: {
    'a': 1,
    'b': %(answer)s,
    'c': ['%(answer)s', %(answer)s]
  }
''')
      ini.close()
      self.config = Config.load(configpath=ini.name)
示例#5
0
    def __init__(self,
                 target,
                 root_dir,
                 extra_targets=None,
                 builder=None,
                 interpreter=None,
                 conn_timeout=None):
        self._config = Config.load()
        self._target = target
        self._root = root_dir
        self._interpreter = interpreter or PythonInterpreter.get()
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = MultiResolver(self._config,
                                       target,
                                       conn_timeout=conn_timeout)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp(),
                                              interpreter=self._interpreter)

        # Note: unrelated to the general pants artifact cache.
        self._egg_cache_root = os.path.join(
            self._config.get('python-setup', 'artifact_cache'),
            str(self._interpreter.identity))

        self._key_generator = CacheKeyGenerator()
        self._build_invalidator = BuildInvalidator(self._egg_cache_root)
示例#6
0
 def __init__(self, targets):
   self._config = Config.load()
   self._targets = targets
   self._resolver = SilentResolver(
     caches = self._config.getlist('python-setup', 'local_eggs') +
               [self._config.get('python-setup', 'install_cache')],
     install_cache = self._config.get('python-setup', 'install_cache'),
     fetcher = fetcher_from_config(self._config))
示例#7
0
 def __init__(self, targets):
   self._config = Config.load()
   self._targets = targets
   self._resolver = SilentResolver(
     caches = self._config.getlist('python-setup', 'local_eggs') +
               [self._config.get('python-setup', 'install_cache')],
     install_cache = self._config.get('python-setup', 'install_cache'),
     fetcher = fetcher_from_config(self._config))
示例#8
0
def _run():
    version = get_version()
    if len(sys.argv) == 2 and sys.argv[1] == _VERSION_OPTION:
        _do_exit(version)

    root_dir = get_buildroot()
    if not os.path.exists(root_dir):
        _exit_and_fail('PANTS_BUILD_ROOT does not point to a valid path: %s' %
                       root_dir)

    if len(sys.argv) < 2 or (len(sys.argv) == 2
                             and sys.argv[1] in _HELP_ALIASES):
        _help(version, root_dir)

    command_class, command_args = _parse_command(root_dir, sys.argv[1:])

    parser = optparse.OptionParser(version=version)
    RcFile.install_disable_rc_option(parser)
    parser.add_option(_LOG_EXIT_OPTION,
                      action='store_true',
                      default=False,
                      dest='log_exit',
                      help='Log an exit message on success or failure.')

    config = Config.load()
    run_tracker = RunTracker(config)
    report = default_report(config, run_tracker)
    run_tracker.start(report)

    url = run_tracker.run_info.get_info('report_url')
    run_tracker.log(Report.INFO, 'See a report at: %s' % url)
    run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)')

    try:
        command = command_class(run_tracker, root_dir, parser, command_args)

        if command.serialized():

            def onwait(pid):
                print('Waiting on pants process %s to complete' %
                      _process_info(pid),
                      file=sys.stderr)
                return True

            runfile = os.path.join(root_dir, '.pants.run')
            lock = Lock.acquire(runfile, onwait=onwait)
        else:
            lock = Lock.unlocked()
        try:
            result = command.run(lock)
            _do_exit(result)
        except KeyboardInterrupt:
            command.cleanup()
            raise
        finally:
            lock.release()
    finally:
        run_tracker.end()
示例#9
0
def _run():
  version = get_version()
  if len(sys.argv) == 2 and sys.argv[1] == _VERSION_OPTION:
    _do_exit(version)

  root_dir = get_buildroot()
  if not os.path.exists(root_dir):
    _exit_and_fail('PANTS_BUILD_ROOT does not point to a valid path: %s' % root_dir)

  if len(sys.argv) < 2 or (len(sys.argv) == 2 and sys.argv[1] in _HELP_ALIASES):
    _help(version, root_dir)

  command_class, command_args = _parse_command(root_dir, sys.argv[1:])

  parser = optparse.OptionParser(version=version)
  RcFile.install_disable_rc_option(parser)
  parser.add_option(_LOG_EXIT_OPTION,
                    action='store_true',
                    default=False,
                    dest='log_exit',
                    help = 'Log an exit message on success or failure.')

  config = Config.load()
  run_tracker = RunTracker(config)
  report = initial_reporting(config, run_tracker)
  run_tracker.start(report)

  url = run_tracker.run_info.get_info('report_url')
  if url:
    run_tracker.log(Report.INFO, 'See a report at: %s' % url)
  else:
    run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)')

  command = command_class(run_tracker, root_dir, parser, command_args)
  try:
    if command.serialized():
      def onwait(pid):
        print('Waiting on pants process %s to complete' % _process_info(pid), file=sys.stderr)
        return True
      runfile = os.path.join(root_dir, '.pants.run')
      lock = Lock.acquire(runfile, onwait=onwait)
    else:
      lock = Lock.unlocked()
    try:
      result = command.run(lock)
      _do_exit(result)
    except KeyboardInterrupt:
      command.cleanup()
      raise
    finally:
      lock.release()
  finally:
    run_tracker.end()
    # Must kill nailguns only after run_tracker.end() is called, because there may still
    # be pending background work that needs a nailgun.
    if (hasattr(command.options, 'cleanup_nailguns') and command.options.cleanup_nailguns) \
        or config.get('nailgun', 'autokill', default=False):
      NailgunTask.killall(None)
示例#10
0
def generate_coverage_config(target):
  cp = configparser.ConfigParser()
  cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG))
  cp.add_section('html')
  target_dir = os.path.join(Config.load().getdefault('pants_distdir'), 'coverage',
      os.path.dirname(target.address.buildfile.relpath), target.name)
  safe_mkdir(target_dir)
  cp.set('html', 'directory', target_dir)
  return cp
示例#11
0
 def setup_parser(self, parser, args):
   parser.set_usage("\n"
                    "  %prog build (options) [spec] (build args)\n"
                    "  %prog build (options) [spec]... -- (build args)")
   parser.add_option("-t", "--timeout", dest="conn_timeout", type="int",
                     default=Config.load().getdefault('connection_timeout'),
                     help="Number of seconds to wait for http connections.")
   parser.disable_interspersed_args()
   parser.epilog = """Builds the specified Python target(s). Use ./pants goal for JVM and other targets."""
示例#12
0
def generate_coverage_config(target):
  cp = configparser.ConfigParser()
  cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG))
  cp.add_section('html')
  target_dir = os.path.join(Config.load().getdefault('pants_distdir'), 'coverage',
      os.path.dirname(target.address.buildfile.relpath), target.name)
  safe_mkdir(target_dir)
  cp.set('html', 'directory', target_dir)
  return cp
示例#13
0
    def setUp(self):
        with temporary_file() as ini:
            ini.write('''
[python-setup]
platforms: [
  'current',
  'linux-x86_64']
''')
            ini.close()
            self.config = Config.load(configpath=ini.name)
示例#14
0
def profile_classpath(profile,
                      java_runner=None,
                      config=None,
                      ivy_jar=None,
                      ivy_settings=None,
                      workunit_factory=None):
    # TODO(John Sirois): consider rework when ant backend is gone and there is no more need to share
    # path structure

    java_runner = java_runner or runjava_indivisible

    config = config or Config.load()

    profile_dir = config.get('ivy-profiles', 'workdir')
    profile_libdir = os.path.join(profile_dir, '%s.libs' % profile)
    profile_check = '%s.checked' % profile_libdir
    if not os.path.exists(profile_check):
        # TODO(John Sirois): refactor IvyResolve to share ivy invocation command line bits
        ivy_classpath = [ivy_jar] if ivy_jar else config.getlist(
            'ivy', 'classpath')

        safe_mkdir(profile_libdir)
        ivy_settings = ivy_settings or config.get('ivy', 'ivy_settings')
        ivy_xml = os.path.join(profile_dir, '%s.ivy.xml' % profile)
        ivy_opts = [
            '-settings',
            ivy_settings,
            '-ivy',
            ivy_xml,

            # TODO(John Sirois): this pattern omits an [organisation]- prefix to satisfy IDEA jar naming
            # needs for scala - isolate this hack to idea.py where it belongs
            '-retrieve',
            '%s/[artifact]-[revision](-[classifier]).[ext]' % profile_libdir,
            '-sync',
            '-symlink',
            '-types',
            'jar',
            'bundle',
            '-confs',
            'default'
        ]
        result = java_runner(classpath=ivy_classpath,
                             main='org.apache.ivy.Main',
                             workunit_factory=workunit_factory,
                             workunit_name='%s:bootstrap' % profile,
                             opts=ivy_opts)
        if result != 0:
            raise TaskError('Failed to load profile %s, ivy exit code %d' %
                            (profile, result))
        touch(profile_check)

    return [
        os.path.join(profile_libdir, jar) for jar in os.listdir(profile_libdir)
    ]
示例#15
0
  def setUp(self):
    with temporary_file() as ini:
      ini.write(
'''
[python-setup]
platforms: [
  'current',
  'linux-x86_64']
''')
      ini.close()
      self.config = Config.load(configpath=ini.name)
示例#16
0
  def __init__(self, target, root_dir, extra_targets=None, builder=None, conn_timeout=None):
    self._config = Config.load()
    self._target = target
    self._root = root_dir
    self._key_generator = CacheKeyGenerator()
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = MultiResolver.from_target(self._config, target, conn_timeout=conn_timeout)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp())

    # Note: unrelated to the general pants artifact cache.
    self._egg_cache_root = os.path.join(self._config.get('python-setup', 'artifact_cache'),
                                        '%s' % PythonIdentity.get())
示例#17
0
文件: build.py 项目: xianxu/pants
 def setup_parser(self, parser, args):
     parser.set_usage("\n"
                      "  %prog build (options) [spec] (build args)\n"
                      "  %prog build (options) [spec]... -- (build args)")
     parser.add_option(
         "-t",
         "--timeout",
         dest="conn_timeout",
         type="int",
         default=Config.load().getdefault('connection_timeout'),
         help="Number of seconds to wait for http connections.")
     parser.disable_interspersed_args()
     parser.epilog = """Builds the specified Python target(s). Use ./pants goal for JVM and other targets."""
示例#18
0
  def __init__(self, target, root_dir, extra_targets=None, builder=None, conn_timeout=None):
    self._config = Config.load()
    self._target = target
    self._root = root_dir
    self._key_generator = CacheKeyGenerator()
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = MultiResolver.from_target(self._config, target, conn_timeout=conn_timeout)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp())

    artifact_cache_root = os.path.join(self._config.get('python-setup', 'artifact_cache'),
                                       '%s' % PythonIdentity.get())
    self._artifact_cache = FileBasedArtifactCache(None, self._root, artifact_cache_root,
                                                  self._builder.add_dependency_file)
示例#19
0
 def setup_parser(self, parser, args):
   parser.set_usage("\n"
                    "  %prog py (options) [spec] args\n")
   parser.disable_interspersed_args()
   parser.add_option("-t", "--timeout", dest="conn_timeout", type="int",
                     default=Config.load().getdefault('connection_timeout'),
                     help="Number of seconds to wait for http connections.")
   parser.add_option("--pex", dest="pex", default=False, action='store_true',
                     help="dump a .pex of this chroot")
   parser.add_option("--resolve", dest="resolve", default=False, action='store_true',
                     help="resolve targets instead of building.")
   parser.add_option("-v", "--verbose", dest="verbose", default=False, action='store_true',
                     help="show verbose output.")
   parser.epilog = """Interact with the chroot of the specified target."""
示例#20
0
文件: build.py 项目: alfss/commons
 def setup_parser(self, parser, args):
   parser.set_usage("\n"
                    "  %prog build (options) [spec] (build args)\n"
                    "  %prog build (options) [spec]... -- (build args)")
   parser.add_option("-t", "--timeout", dest="conn_timeout", type="int",
                     default=Config.load().getdefault('connection_timeout'),
                     help="Number of seconds to wait for http connections.")
   parser.add_option('-i', '--interpreter', dest='interpreter', default=None,
                     help='The interpreter requirement for this chroot.')
   parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true',
                     help='Show verbose output.')
   parser.disable_interspersed_args()
   parser.epilog = ('Builds the specified Python target(s). Use ./pants goal for JVM and other '
                    'targets.')
示例#21
0
文件: extract.py 项目: alfss/commons
  def compiled_idl(cls, idl_dep, generated_deps=None, compiler=None, language=None, namespace_map=None):
    """Marks a jar as containing IDL files that should be fetched and processed locally.

    idl_dep:        A dependency resolvable to a single jar library.
    generated_deps: Dependencies for the code that will be generated from "idl_dep"
    compiler:       The thrift compiler to apply to the fetched thrift IDL files.
    language:       The language to generate code for - supported by some compilers
    namespace_map:  A mapping from IDL declared namespaces to custom namespaces - supported by some
                    compilers.
    """
    deps = [t for t in idl_dep.resolve() if t.is_concrete]
    if not len(deps) == 1:
      raise TaskError('Can only arrange for compiled idl for a single dependency at a time, '
                      'given:\n\t%s' % '\n\t'.join(map(str, deps)))
    jar = deps.pop()
    if not isinstance(jar, JarDependency):
      raise TaskError('Can only arrange for compiled idl from a jar dependency, given: %s' % jar)

    request = (jar, compiler, language)
    namespace_signature = None
    if namespace_map:
      sha = hashlib.sha1()
      for ns_from, ns_to in sorted(namespace_map.items()):
        sha.update(ns_from)
        sha.update(ns_to)
      namespace_signature = sha.hexdigest()
    request += (namespace_signature,)

    if request not in cls._PLACEHOLDER_BY_REQUEST:
      if not cls._EXTRACT_BASE:
        config = Config.load()
        cls._EXTRACT_BASE = config.get('idl-extract', 'workdir')
        safe_mkdir(cls._EXTRACT_BASE)
        SourceRoot.register(cls._EXTRACT_BASE, JavaThriftLibrary)

      with ParseContext.temp(cls._EXTRACT_BASE):
        # TODO(John Sirois): abstract ivy specific configurations notion away
        jar._configurations.append('idl')
        jar.with_artifact(configuration='idl', classifier='idl')
        target_name = '-'.join(filter(None, (jar.id, compiler, language, namespace_signature)))
        placeholder = JavaThriftLibrary(target_name,
                                        sources=None,
                                        dependencies=[jar] + (generated_deps or []),
                                        compiler=compiler,
                                        language=language,
                                        namespace_map=namespace_map)
        cls._PLACEHOLDER_BY_REQUEST[request] = placeholder
        cls._PLACEHOLDERS_BY_JAR[jar].append(placeholder)
    return cls._PLACEHOLDER_BY_REQUEST[request]
示例#22
0
  def __init__(self, run_tracker, root_dir, parser, argv):
    Command.__init__(self, run_tracker, root_dir, parser, argv)

    if not self.args:
      self.error("A spec argument is required")

    self._config = Config.load()
    self._root = root_dir

    address = Address.parse(root_dir, self.args[0])
    self.target = Target.get(address)
    if self.target is None:
      self.error('%s is not a valid target!' % self.args[0])

    if not self.target.provides:
      self.error('Target must provide an artifact.')
示例#23
0
def profile_classpath(profile, java_runner=None, config=None, ivy_jar=None, ivy_settings=None, workunit_factory=None):
    # TODO(John Sirois): consider rework when ant backend is gone and there is no more need to share
    # path structure

    java_runner = java_runner or runjava_indivisible

    config = config or Config.load()

    profile_dir = config.get("ivy-profiles", "workdir")
    profile_libdir = os.path.join(profile_dir, "%s.libs" % profile)
    profile_check = "%s.checked" % profile_libdir
    if not os.path.exists(profile_check):
        # TODO(John Sirois): refactor IvyResolve to share ivy invocation command line bits
        ivy_classpath = [ivy_jar] if ivy_jar else config.getlist("ivy", "classpath")

        safe_mkdir(profile_libdir)
        ivy_settings = ivy_settings or config.get("ivy", "ivy_settings")
        ivy_xml = os.path.join(profile_dir, "%s.ivy.xml" % profile)
        ivy_opts = [
            "-settings",
            ivy_settings,
            "-ivy",
            ivy_xml,
            # TODO(John Sirois): this pattern omits an [organisation]- prefix to satisfy IDEA jar naming
            # needs for scala - isolate this hack to idea.py where it belongs
            "-retrieve",
            "%s/[artifact]-[revision](-[classifier]).[ext]" % profile_libdir,
            "-sync",
            "-symlink",
            "-types",
            "jar",
            "bundle",
            "-confs",
            "default",
        ]
        result = java_runner(
            classpath=ivy_classpath,
            main="org.apache.ivy.Main",
            workunit_factory=workunit_factory,
            workunit_name="%s:bootstrap" % profile,
            opts=ivy_opts,
        )
        if result != 0:
            raise TaskError("Failed to load profile %s, ivy exit code %s" % (profile, str(result)))
        touch(profile_check)

    return [os.path.join(profile_libdir, jar) for jar in os.listdir(profile_libdir)]
示例#24
0
  def __init__(self, target, root_dir, extra_targets=None, builder=None):
    self._config = Config.load()

    self._target = target
    self._root = root_dir
    self._cache = BuildCache(os.path.join(self._config.get('python-setup', 'artifact_cache'),
      '%s' % PythonIdentity.get()))
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = PythonResolver([self._target] + self._extra_targets)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp())
    self._platforms = (Platform.current(),)
    self._pythons = (sys.version[:3],)

    # TODO(wickman) Should this be in the binary builder?
    if isinstance(self._target, PythonBinary):
      self._platforms = self._target._platforms
      self._pythons = self._target._interpreters
示例#25
0
文件: setup_py.py 项目: xianxu/pants
    def execute(self):
        config = Config.load()
        distdir = config.getdefault('pants_distdir')
        setup_dir = os.path.join(
            distdir, '%s-%s' %
            (self.target.provides._name, self.target.provides._version))
        chroot = Chroot(distdir, name=self.target.provides._name)
        self.write_sources(chroot)
        self.write_setup(chroot)
        if os.path.exists(setup_dir):
            import shutil
            shutil.rmtree(setup_dir)
        os.rename(chroot.path(), setup_dir)

        with pushd(setup_dir):
            cmd = '%s setup.py %s' % (sys.executable, self.options.run
                                      or 'sdist')
            print('Running "%s" in %s' % (cmd, setup_dir))
            extra_args = {} if self.options.run else dict(
                stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            po = subprocess.Popen(cmd, shell=True, **extra_args)
            po.wait()

        if self.options.run:
            print('Ran %s' % cmd)
            print('Output in %s' % setup_dir)
            return po.returncode
        elif po.returncode != 0:
            print('Failed to run %s!' % cmd)
            for line in po.stdout.read().splitlines():
                print('stdout: %s' % line)
            for line in po.stderr.read().splitlines():
                print('stderr: %s' % line)
            return po.returncode

        expected_tgz = '%s-%s.tar.gz' % (self.target.provides._name,
                                         self.target.provides._version)
        expected_target = os.path.join(setup_dir, 'dist', expected_tgz)
        dist_tgz = os.path.join(distdir, expected_tgz)
        if not os.path.exists(expected_target):
            print('Could not find expected target %s!' % expected_target)
            sys.exit(1)
        safe_delete(dist_tgz)
        os.rename(expected_target, dist_tgz)
        print('Wrote %s' % dist_tgz)
        safe_rmtree(setup_dir)
示例#26
0
  def iter_generated_sources(cls, target, root, config=None):
    config = config or Config.load()
    # This is sort of facepalmy -- python.new will make this much better.
    for target_type, target_builder in cls.GENERATED_TARGETS.items():
      if isinstance(target, target_type):
        builder_cls = target_builder
        break
    else:
      raise TypeError(
          'write_generated_sources could not find suitable code generator for %s' % type(target))

    builder = builder_cls(target, root, config)
    builder.generate()
    for root, _, files in os.walk(builder.package_root):
      for fn in files:
        target_file = os.path.join(root, fn)
        yield os.path.relpath(target_file, builder.package_root), target_file
示例#27
0
def select_binary(base_path, version, name, config=None):
    """Selects a binary matching the current os and architecture.

  Raises TaskError if no binary of the given version and name could be found.
  """
    # TODO(John Sirois): finish doc of the path structure expexcted under base_path
    config = config or Config.load()
    cachedir = config.getdefault('pants_cachedir',
                                 default=os.path.expanduser('~/.pants.d'))
    baseurl = config.getdefault('pants_support_baseurl')
    timeout_secs = config.getdefault('pants_support_fetch_timeout_secs',
                                     type=int,
                                     default=30)

    sysname, _, release, _, machine = os.uname()
    os_id = _ID_BY_OS[sysname.lower()]
    if os_id:
        middle_path = _PATH_BY_ID[os_id(release, machine)]
        if middle_path:
            binary_path = os.path.join(base_path,
                                       *(middle_path + [version, name]))
            cached_binary_path = os.path.join(cachedir, binary_path)
            if not os.path.exists(cached_binary_path):
                url = posixpath.join(baseurl, binary_path)
                log.info('Fetching %s binary from: %s' % (name, url))
                downloadpath = cached_binary_path + '~'
                try:
                    with closing(
                            urllib_request.urlopen(
                                url, timeout=timeout_secs)) as binary:
                        with safe_open(downloadpath, 'wb') as cached_binary:
                            cached_binary.write(binary.read())

                    os.rename(downloadpath, cached_binary_path)
                    chmod_plus_x(cached_binary_path)
                except (IOError, urllib_error.HTTPError,
                        urllib_error.URLError) as e:
                    raise TaskError('Failed to fetch binary from %s: %s' %
                                    (url, e))
                finally:
                    safe_delete(downloadpath)
            log.debug('Selected %s binary cached at: %s' %
                      (name, cached_binary_path))
            return cached_binary_path
    raise TaskError('No %s binary found for: %s' %
                    (name, (sysname, release, machine)))
示例#28
0
def safe_args(args,
              max_args=None,
              config=None,
              argfile=None,
              delimiter='\n',
              quoter=None,
              delete=True):
    """
    Yields args if there are less than a limit otherwise writes args to an argfile and yields an
    argument list with one argument formed from the path of the argfile.

    :args The args to work with.
    :max_args The maximum number of args to let though without writing an argfile.  If not specified
              then the maximum will be loaded from config.
    :config Used to lookup the configured maximum number of args that can be passed to a subprocess;
            defaults to the default config and looks for key 'max_subprocess_args' in the DEFAULTS.
    :argfile The file to write args to when there are too many; defaults to a temporary file.
    :delimiter The delimiter to insert between args written to the argfile, defaults to '\n'
    :quoter A function that can take the argfile path and return a single argument value;
            defaults to:
            <code>lambda f: '@' + f<code>
    :delete If True deletes any arg files created upon exit from this context; defaults to True.
  """
    max_args = max_args or (config or Config.load()).getdefault(
        'max_subprocess_args', int, 10)
    if len(args) > max_args:

        def create_argfile(fp):
            fp.write(delimiter.join(args))
            fp.close()
            return [quoter(fp.name) if quoter else '@%s' % fp.name]

        if argfile:
            try:
                with safe_open(argfile, 'w') as fp:
                    yield create_argfile(fp)
            finally:
                if delete and os.path.exists(argfile):
                    os.unlink(argfile)
        else:
            with temporary_file(cleanup=delete) as fp:
                yield create_argfile(fp)
    else:
        yield args
示例#29
0
    def __init__(self, target, root_dir, extra_targets=None, builder=None):
        self._config = Config.load()

        self._target = target
        self._root = root_dir
        self._cache = BuildCache(
            os.path.join(self._config.get('python-setup', 'artifact_cache'),
                         '%s' % PythonIdentity.get()))
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = PythonResolver([self._target] + self._extra_targets)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp())
        self._platforms = (Platform.current(), )
        self._pythons = (sys.version[:3], )

        # TODO(wickman) Should this be in the binary builder?
        if isinstance(self._target, PythonBinary):
            self._platforms = self._target._platforms
            self._pythons = self._target._interpreters
示例#30
0
文件: build.py 项目: alfss/commons
  def __init__(self, run_tracker, root_dir, parser, argv):
    Command.__init__(self, run_tracker, root_dir, parser, argv)

    if not self.args:
      self.error("A spec argument is required")

    self.config = Config.load()
    self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug)
    self.interpreter_cache.setup()
    interpreters = self.interpreter_cache.select_interpreter(
        list(self.interpreter_cache.matches([self.options.interpreter]
            if self.options.interpreter else [''])))
    if len(interpreters) != 1:
      self.error('Unable to detect suitable interpreter.')
    else:
      self.debug('Selected %s' % interpreters[0])
    self.interpreter = interpreters[0]

    try:
      specs_end = self.args.index('--')
      if len(self.args) > specs_end:
        self.build_args = self.args[specs_end+1:len(self.args)+1]
      else:
        self.build_args = []
    except ValueError:
      specs_end = 1
      self.build_args = self.args[1:] if len(self.args) > 1 else []

    self.targets = OrderedSet()
    for spec in self.args[0:specs_end]:
      try:
        address = Address.parse(root_dir, spec)
      except:
        self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc()))

      try:
        target = Target.get(address)
      except:
        self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc()))

      if not target:
        self.error("Target %s does not exist" % address)
      self.targets.update(tgt for tgt in target.resolve() if tgt.is_concrete)
示例#31
0
  def execute(self):
    config = Config.load()
    distdir = config.getdefault('pants_distdir')
    setup_dir = os.path.join(distdir, '%s-%s' % (
        self.target.provides._name, self.target.provides._version))
    chroot = Chroot(distdir, name=self.target.provides._name)
    self.write_sources(chroot)
    self.write_setup(chroot)
    if os.path.exists(setup_dir):
      import shutil
      shutil.rmtree(setup_dir)
    os.rename(chroot.path(), setup_dir)

    with pushd(setup_dir):
      cmd = '%s setup.py %s' % (sys.executable, self.options.run or 'sdist')
      print('Running "%s" in %s' % (cmd, setup_dir))
      extra_args = {} if self.options.run else dict(stdout=subprocess.PIPE, stderr=subprocess.PIPE)
      po = subprocess.Popen(cmd, shell=True, **extra_args)
      po.wait()

    if self.options.run:
      print('Ran %s' % cmd)
      print('Output in %s' % setup_dir)
      return po.returncode
    elif po.returncode != 0:
      print('Failed to run %s!' % cmd)
      for line in po.stdout.read().splitlines():
        print('stdout: %s' % line)
      for line in po.stderr.read().splitlines():
        print('stderr: %s' % line)
      return po.returncode

    expected_tgz = '%s-%s.tar.gz' % (self.target.provides._name, self.target.provides._version)
    expected_target = os.path.join(setup_dir, 'dist', expected_tgz)
    dist_tgz = os.path.join(distdir, expected_tgz)
    if not os.path.exists(expected_target):
      print('Could not find expected target %s!' % expected_target)
      sys.exit(1)
    safe_delete(dist_tgz)
    os.rename(expected_target, dist_tgz)
    print('Wrote %s' % dist_tgz)
    safe_rmtree(setup_dir)
示例#32
0
def safe_args(args,
              max_args=None,
              config=None,
              argfile=None,
              delimiter='\n',
              quoter=None,
              delete=True):
  """
    Yields args if there are less than a limit otherwise writes args to an argfile and yields an
    argument list with one argument formed from the path of the argfile.

    :args The args to work with.
    :max_args The maximum number of args to let though without writing an argfile.  If not specified
              then the maximum will be loaded from config.
    :config Used to lookup the configured maximum number of args that can be passed to a subprocess;
            defaults to the default config and looks for key 'max_subprocess_args' in the DEFAULTS.
    :argfile The file to write args to when there are too many; defaults to a temporary file.
    :delimiter The delimiter to insert between args written to the argfile, defaults to '\n'
    :quoter A function that can take the argfile path and return a single argument value;
            defaults to:
            <code>lambda f: '@' + f<code>
    :delete If True deletes any arg files created upon exit from this context; defaults to True.
  """
  max_args = max_args or (config or Config.load()).getdefault('max_subprocess_args', int, 10)
  if len(args) > max_args:
    def create_argfile(fp):
      fp.write(delimiter.join(args))
      fp.close()
      return [quoter(fp.name) if quoter else '@%s' % fp.name]

    if argfile:
      try:
        with safe_open(argfile, 'w') as fp:
          yield create_argfile(fp)
      finally:
        if delete and os.path.exists(argfile):
          os.unlink(argfile)
    else:
      with temporary_file(cleanup=delete) as fp:
        yield create_argfile(fp)
  else:
    yield args
示例#33
0
文件: py.py 项目: alfss/commons
 def setup_parser(self, parser, args):
   parser.set_usage('\n'
                    '  %prog py (options) [spec] args\n')
   parser.disable_interspersed_args()
   parser.add_option('-t', '--timeout', dest='conn_timeout', type='int',
                     default=Config.load().getdefault('connection_timeout'),
                     help='Number of seconds to wait for http connections.')
   parser.add_option('--pex', dest='pex', default=False, action='store_true',
                     help='Dump a .pex of this chroot instead of attempting to execute it.')
   parser.add_option('--ipython', dest='ipython', default=False, action='store_true',
                     help='Run the target environment in an IPython interpreter.')
   parser.add_option('-r', '--req', dest='extra_requirements', default=[], action='append',
                     help='Additional Python requirements to add to this chroot.')
   parser.add_option('-i', '--interpreter', dest='interpreter', default=None,
                     help='The interpreter requirement for this chroot.')
   parser.add_option('-e', '--entry_point', dest='entry_point', default=None,
                     help='The entry point for the generated PEX.')
   parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true',
                     help='Show verbose output.')
   parser.epilog = """Interact with the chroot of the specified target."""
示例#34
0
def profile_classpath(profile, java_runner=None, config=None, ivy_jar=None, ivy_settings=None,
                      workunit_factory=None):
  # TODO(John Sirois): consider rework when ant backend is gone and there is no more need to share
  # path structure

  java_runner = java_runner or runjava_indivisible

  config = config or Config.load()

  profile_dir = config.get('ivy-profiles', 'workdir')
  profile_libdir = os.path.join(profile_dir, '%s.libs' % profile)
  profile_check = '%s.checked' % profile_libdir
  if not os.path.exists(profile_check):
    # TODO(John Sirois): refactor IvyResolve to share ivy invocation command line bits
    ivy_classpath = [ivy_jar] if ivy_jar else config.getlist('ivy', 'classpath')

    safe_mkdir(profile_libdir)
    ivy_settings = ivy_settings or config.get('ivy', 'ivy_settings')
    ivy_xml = os.path.join(profile_dir, '%s.ivy.xml' % profile)
    ivy_opts = [
      '-settings', ivy_settings,
      '-ivy', ivy_xml,

      # TODO(John Sirois): this pattern omits an [organisation]- prefix to satisfy IDEA jar naming
      # needs for scala - isolate this hack to idea.py where it belongs
      '-retrieve', '%s/[artifact]-[revision](-[classifier]).[ext]' % profile_libdir,

      '-sync',
      '-symlink',
      '-types', 'jar', 'bundle',
      '-confs', 'default'
    ]
    result = java_runner(classpath=ivy_classpath, main='org.apache.ivy.Main',
                         workunit_factory=workunit_factory,
                         workunit_name='%s:bootstrap' % profile, opts=ivy_opts)
    if result != 0:
      raise TaskError('Failed to load profile %s, ivy exit code %s' % (profile, str(result)))
    touch(profile_check)

  return [os.path.join(profile_libdir, jar) for jar in os.listdir(profile_libdir)]
示例#35
0
    def __init__(self, target, args, root_dir):
        self.target = target
        if not isinstance(target, PythonBinary):
            raise PythonBinaryBuilder.NotABinaryTargetException(
                "Target %s is not a PythonBinary!" % target)
        config = Config.load()
        self.distdir = config.getdefault('pants_distdir')
        distpath = tempfile.mktemp(dir=self.distdir, prefix=target.name)
        self.builder = PEXBuilder(distpath)

        # configure builder PexInfo options
        for repo in target._repositories:
            self.builder.info().add_repository(repo)
        for index in target._indices:
            self.builder.info().add_index(index)
        self.builder.info().allow_pypi = target._allow_pypi
        self.builder.info().zip_safe = target._zip_safe
        self.builder.info().inherit_path = target._inherit_path
        self.builder.info().entry_point = target._entry_point
        self.builder.info().ignore_errors = target._ignore_errors

        self.chroot = PythonChroot(target, root_dir, builder=self.builder)
示例#36
0
  def __init__(self, target, args, root_dir, conn_timeout=None):
    self.target = target
    if not isinstance(target, PythonBinary):
      raise PythonBinaryBuilder.NotABinaryTargetException(
        "Target %s is not a PythonBinary!" % target)
    config = Config.load()
    self.distdir = config.getdefault('pants_distdir')
    distpath = tempfile.mktemp(dir=self.distdir, prefix=target.name)
    self.builder = PEXBuilder(distpath)

    # configure builder PexInfo options
    for repo in target._repositories:
      self.builder.info().add_repository(repo)
    for index in target._indices:
      self.builder.info().add_index(index)
    self.builder.info().allow_pypi = target._allow_pypi
    self.builder.info().zip_safe = target._zip_safe
    self.builder.info().inherit_path = target._inherit_path
    self.builder.info().entry_point = target._entry_point
    self.builder.info().ignore_errors = target._ignore_errors

    self.chroot = PythonChroot(target, root_dir, builder=self.builder, conn_timeout=conn_timeout)
示例#37
0
    def __init__(self, target, root_dir, run_tracker, interpreter=None, conn_timeout=None):
        self.target = target
        self.interpreter = interpreter or PythonInterpreter.get()
        if not isinstance(target, PythonBinary):
            raise PythonBinaryBuilder.NotABinaryTargetException("Target %s is not a PythonBinary!" % target)

        config = Config.load()
        self.distdir = config.getdefault("pants_distdir")
        distpath = tempfile.mktemp(dir=self.distdir, prefix=target.name)

        run_info = run_tracker.run_info
        build_properties = {}
        build_properties.update(run_info.add_basic_info(run_id=None, timestamp=time.time()))
        build_properties.update(run_info.add_scm_info())

        pexinfo = target.pexinfo.copy()
        pexinfo.build_properties = build_properties
        builder = PEXBuilder(distpath, pex_info=pexinfo, interpreter=self.interpreter)

        self.chroot = PythonChroot(
            target, root_dir, builder=builder, interpreter=self.interpreter, conn_timeout=conn_timeout
        )
示例#38
0
    def __init__(self,
                 target,
                 root_dir,
                 extra_targets=None,
                 builder=None,
                 conn_timeout=None):
        self._config = Config.load()
        self._target = target
        self._root = root_dir
        self._key_generator = CacheKeyGenerator()
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = MultiResolver.from_target(self._config,
                                                   target,
                                                   conn_timeout=conn_timeout)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp())

        artifact_cache_root = os.path.join(
            self._config.get('python-setup', 'artifact_cache'),
            '%s' % PythonIdentity.get())
        self._artifact_cache = FileBasedArtifactCache(
            None, self._root, artifact_cache_root,
            self._builder.add_dependency_file)
示例#39
0
def select_binary(base_path, version, name, config=None):
  """Selects a binary matching the current os and architecture.

  Raises TaskError if no binary of the given version and name could be found.
  """
  # TODO(John Sirois): finish doc of the path structure expexcted under base_path
  config = config or Config.load()
  cachedir = config.getdefault('pants_cachedir', default=os.path.expanduser('~/.pants.d'))
  baseurl = config.getdefault('pants_support_baseurl')
  timeout_secs = config.getdefault('pants_support_fetch_timeout_secs', type=int, default=30)

  sysname, _, release, _, machine = os.uname()
  os_id = _ID_BY_OS[sysname.lower()]
  if os_id:
    middle_path = _PATH_BY_ID[os_id(release, machine)]
    if middle_path:
      binary_path = os.path.join(base_path, *(middle_path + [version, name]))
      cached_binary_path = os.path.join(cachedir, binary_path)
      if not os.path.exists(cached_binary_path):
        url = posixpath.join(baseurl, binary_path)
        log.info('Fetching %s binary from: %s' % (name, url))
        downloadpath = cached_binary_path + '~'
        try:
          with closing(urllib_request.urlopen(url, timeout=timeout_secs)) as binary:
            with safe_open(downloadpath, 'wb') as cached_binary:
              cached_binary.write(binary.read())

          os.rename(downloadpath, cached_binary_path)
          chmod_plus_x(cached_binary_path)
        except (IOError, urllib_error.HTTPError, urllib_error.URLError) as e:
          raise TaskError('Failed to fetch binary from %s: %s' % (url, e))
        finally:
          safe_delete(downloadpath)
      log.debug('Selected %s binary cached at: %s' % (name, cached_binary_path))
      return cached_binary_path
  raise TaskError('No %s binary found for: %s' % (name, (sysname, release, machine)))
示例#40
0
文件: py.py 项目: alfss/commons
  def __init__(self, run_tracker, root_dir, parser, argv):
    Command.__init__(self, run_tracker, root_dir, parser, argv)

    self.target = None
    self.extra_targets = []
    self.config = Config.load()
    self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug)
    self.interpreter_cache.setup()
    interpreters = self.interpreter_cache.select_interpreter(
        list(self.interpreter_cache.matches([self.options.interpreter]
            if self.options.interpreter else [''])))
    if len(interpreters) != 1:
      self.error('Unable to detect suitable interpreter.')
    self.interpreter = interpreters[0]

    for req in self.options.extra_requirements:
      with ParseContext.temp():
        self.extra_targets.append(PythonRequirement(req, use_2to3=True))

    # We parse each arg in the context of the cli usage:
    #   ./pants command (options) [spec] (build args)
    #   ./pants command (options) [spec]... -- (build args)
    # Our command token and our options are parsed out so we see args of the form:
    #   [spec] (build args)
    #   [spec]... -- (build args)
    binaries = []
    for k in range(len(self.args)):
      arg = self.args.pop(0)
      if arg == '--':
        break

      def not_a_target(debug_msg):
        self.debug('Not a target, assuming option: %s.' % e)
        # We failed to parse the arg as a target or else it was in valid address format but did not
        # correspond to a real target.  Assume this is the 1st of the build args and terminate
        # processing args for target addresses.
        self.args.insert(0, arg)

      target = None
      try:
        address = Address.parse(root_dir, arg)
        target = Target.get(address)
        if target is None:
          not_a_target(debug_msg='Unrecognized target')
          break
      except Exception as e:
        not_a_target(debug_msg=e)
        break

      for resolved in filter(lambda t: t.is_concrete, target.resolve()):
        if isinstance(resolved, PythonBinary):
          binaries.append(resolved)
        else:
          self.extra_targets.append(resolved)

    if len(binaries) == 0:
      # treat as a chroot
      pass
    elif len(binaries) == 1:
      # We found a binary and are done, the rest of the args get passed to it
      self.target = binaries[0]
    else:
      self.error('Can only process 1 binary target, %s contains %d:\n\t%s' % (
        arg, len(binaries), '\n\t'.join(str(binary.address) for binary in binaries)
      ))

    if self.target is None:
      if not self.extra_targets:
        self.error('No valid target specified!')
      self.target = self.extra_targets.pop(0)
示例#41
0
 def setUpClass(cls):
   cls.config = Config.load()
示例#42
0
 def setUpClass(cls):
     cls.config = Config.load()
示例#43
0
class Goal(Command):
    """Lists installed goals or else executes a named goal."""

    __command__ = 'goal'

    GLOBAL_OPTIONS = [
        Option("-t",
               "--timeout",
               dest="conn_timeout",
               type='int',
               default=Config.load().getdefault('connection_timeout'),
               help="Number of seconds to wait for http connections."),
        Option("-x",
               "--time",
               action="store_true",
               dest="time",
               default=False,
               help="Times goal phases and outputs a report."),
        Option("-e",
               "--explain",
               action="store_true",
               dest="explain",
               default=False,
               help="Explain the execution of goals."),
        Option("-k",
               "--kill-nailguns",
               action="store_true",
               dest="cleanup_nailguns",
               default=False,
               help="Kill nailguns before exiting"),
        Option("-d",
               "--logdir",
               dest="logdir",
               help="[%default] Forks logs to files under this directory."),
        Option(
            "-l",
            "--level",
            dest="log_level",
            type="choice",
            choices=['debug', 'info', 'warn'],
            help=
            "[info] Sets the logging level to one of 'debug', 'info' or 'warn'."
            "if set."),
        Option("-q",
               "--quiet",
               action="store_true",
               dest="quiet",
               default=False,
               help="Squelches all console output apart from errors."),
        Option("--no-colors",
               dest="no_color",
               action="store_true",
               default=turn_off_colored_logging,
               help="Do not colorize log messages."),
        Option(
            "-n",
            "--dry-run",
            action="store_true",
            dest="dry_run",
            default=False,
            help=
            "Print the commands that would be run, without actually running them."
        ),
        Option(
            "--read-from-artifact-cache",
            "--no-read-from-artifact-cache",
            action="callback",
            callback=_set_bool,
            dest="read_from_artifact_cache",
            default=True,
            help=
            "Whether to read artifacts from cache instead of building them, if configured to do so."
        ),
        Option(
            "--write-to-artifact-cache",
            "--no-write-to-artifact-cache",
            action="callback",
            callback=_set_bool,
            dest="write_to_artifact_cache",
            default=True,
            help="Whether to write artifacts to cache if configured to do so."
        ),

        # NONE OF THE ARTIFACT CACHE FLAGS BELOW DO ANYTHING ANY MORE.
        # TODO: Remove them once all uses of them are killed.
        Option(
            "--verify-artifact-cache",
            "--no-verify-artifact-cache",
            action="callback",
            callback=_set_bool,
            dest="verify_artifact_cache",
            default=False,
            help=
            "Whether to verify that cached artifacts are identical after rebuilding them."
        ),
        Option(
            "--local-artifact-cache-readonly",
            "--no-local-artifact-cache-readonly",
            action="callback",
            callback=_set_bool,
            dest="local_artifact_cache_readonly",
            default=False,
            help=
            "If set, we don't write to local artifact caches, even when writes are enabled."
        ),
        # Note that remote writes are disabled by default, so you have control over who's populating
        # the shared cache.
        Option(
            "--remote-artifact-cache-readonly",
            "--no-remote-artifact-cache-readonly",
            action="callback",
            callback=_set_bool,
            dest="remote_artifact_cache_readonly",
            default=True,
            help=
            "If set, we don't write to remote artifact caches, even when writes are enabled."
        ),
        Option(
            "--all",
            dest="target_directory",
            action="append",
            help=
            "DEPRECATED: Use [dir]: with no flag in a normal target position on the command "
            "line. (Adds all targets found in the given directory's BUILD file. Can be "
            "specified more than once.)"),
        Option(
            "--all-recursive",
            dest="recursive_directory",
            action="append",
            help=
            "DEPRECATED: Use [dir]:: with no flag in a normal target position on the command "
            "line. (Adds all targets found recursively under the given directory. Can be "
            "specified more than once to add more than one root target directory to scan.)"
        ),
    ]

    output = None

    @staticmethod
    def add_global_options(parser):
        for option in Goal.GLOBAL_OPTIONS:
            parser.add_option(option)

    @staticmethod
    def parse_args(args):
        goals = OrderedSet()
        specs = OrderedSet()
        help = False
        explicit_multi = False

        def is_spec(spec):
            return os.sep in spec or ':' in spec

        for i, arg in enumerate(args):
            help = help or 'help' == arg
            if not arg.startswith('-'):
                specs.add(arg) if is_spec(arg) else goals.add(arg)
            elif '--' == arg:
                if specs:
                    raise GoalError(
                        'Cannot intermix targets with goals when using --. Targets should '
                        'appear on the right')
                explicit_multi = True
                del args[i]
                break

        if explicit_multi:
            spec_offset = len(goals) + 1 if help else len(goals)
            specs.update(arg for arg in args[spec_offset:]
                         if not arg.startswith('-'))

        return goals, specs

    @classmethod
    def execute(cls, context, *names):
        parser = OptionParser()
        cls.add_global_options(parser)
        phases = [Phase(name) for name in names]
        Phase.setup_parser(parser, [], phases)
        options, _ = parser.parse_args([])
        context = Context(context.config,
                          options,
                          context.run_tracker,
                          context.target_roots,
                          requested_goals=list(names))
        return cls._execute(context, phases, print_timing=False)

    @staticmethod
    def _execute(context, phases, print_timing):
        engine = GroupEngine(print_timing=print_timing)
        return engine.execute(context, phases)

    # TODO(John Sirois): revisit wholesale locking when we move py support into pants new
    @classmethod
    def serialized(cls):
        # Goal serialization is now handled in goal execution during group processing.
        # The goal command doesn't need to hold the serialization lock; individual goals will
        # acquire the lock if they need to be serialized.
        return False

    def __init__(self, run_tracker, root_dir, parser, args):
        self.targets = []
        Command.__init__(self, run_tracker, root_dir, parser, args)

    @contextmanager
    def check_errors(self, banner):
        errors = {}

        def error(key, include_traceback=False):
            exc_type, exc_value, _ = sys.exc_info()
            msg = StringIO()
            if include_traceback:
                frame = inspect.trace()[-2]
                filename = frame[1]
                lineno = frame[2]
                funcname = frame[3]
                code = ''.join(frame[4]) if frame[4] else None
                traceback.print_list([(filename, lineno, funcname, code)],
                                     file=msg)
            if exc_type:
                msg.write(''.join(
                    traceback.format_exception_only(exc_type, exc_value)))
            errors[key] = msg.getvalue()
            sys.exc_clear()

        yield error

        if errors:
            msg = StringIO()
            msg.write(banner)
            invalid_keys = [key for key, exc in errors.items() if not exc]
            if invalid_keys:
                msg.write('\n  %s' % '\n  '.join(invalid_keys))
            for key, exc in errors.items():
                if exc:
                    msg.write('\n  %s =>\n    %s' %
                              (key, '\n      '.join(exc.splitlines())))
            # The help message for goal is extremely verbose, and will obscure the
            # actual error message, so we don't show it in this case.
            self.error(msg.getvalue(), show_help=False)

    def setup_parser(self, parser, args):
        self.config = Config.load()
        Goal.add_global_options(parser)

        # We support attempting zero or more goals.  Multiple goals must be delimited from further
        # options and non goal args with a '--'.  The key permutations we need to support:
        # ./pants goal => goals
        # ./pants goal goals => goals
        # ./pants goal compile src/java/... => compile
        # ./pants goal compile -x src/java/... => compile
        # ./pants goal compile src/java/... -x => compile
        # ./pants goal compile run -- src/java/... => compile, run
        # ./pants goal compile run -- src/java/... -x => compile, run
        # ./pants goal compile run -- -x src/java/... => compile, run

        if not args:
            args.append('goals')

        if len(args) == 1 and args[0] in set(['-h', '--help', 'help']):

            def format_usage(usages):
                left_colwidth = 0
                for left, right in usages:
                    left_colwidth = max(left_colwidth, len(left))
                lines = []
                for left, right in usages:
                    lines.append('  %s%s%s' %
                                 (left, ' ' *
                                  (left_colwidth - len(left) + 1), right))
                return '\n'.join(lines)

            usages = [
                ("%prog goal goals ([spec]...)", Phase('goals').description),
                ("%prog goal help [goal] ([spec]...)",
                 Phase('help').description),
                ("%prog goal [goal] [spec]...",
                 "Attempt goal against one or more targets."),
                ("%prog goal [goal] ([goal]...) -- [spec]...",
                 "Attempts all the specified goals."),
            ]
            parser.set_usage("\n%s" % format_usage(usages))
            parser.epilog = (
                "Either lists all installed goals, provides extra help for a goal or else "
                "attempts to achieve the specified goal for the listed targets."
                """
                       Note that target specs accept two special forms:
                         [dir]:  to include all targets in the specified directory
                         [dir]:: to include all targets found in all BUILD files recursively under
                                 the directory""")

            parser.print_help()
            sys.exit(0)
        else:
            goals, specs = Goal.parse_args(args)
            self.requested_goals = goals

            with self.run_tracker.new_workunit(name='setup',
                                               labels=[WorkUnit.SETUP]):
                # Bootstrap goals by loading any configured bootstrap BUILD files
                with self.check_errors(
                        'The following bootstrap_buildfiles cannot be loaded:'
                ) as error:
                    with self.run_tracker.new_workunit(name='bootstrap',
                                                       labels=[WorkUnit.SETUP
                                                               ]):
                        for path in self.config.getlist('goals',
                                                        'bootstrap_buildfiles',
                                                        default=[]):
                            try:
                                buildfile = BuildFile(
                                    get_buildroot(),
                                    os.path.relpath(path, get_buildroot()))
                                ParseContext(buildfile).parse()
                            except (TypeError, ImportError, TaskError,
                                    GoalError):
                                error(path, include_traceback=True)
                            except (IOError, SyntaxError):
                                error(path)
                # Now that we've parsed the bootstrap BUILD files, and know about the SCM system.
                self.run_tracker.run_info.add_scm_info()

                # Bootstrap user goals by loading any BUILD files implied by targets.
                spec_parser = SpecParser(self.root_dir)
                with self.check_errors(
                        'The following targets could not be loaded:') as error:
                    with self.run_tracker.new_workunit(name='parse',
                                                       labels=[WorkUnit.SETUP
                                                               ]):
                        for spec in specs:
                            try:
                                for target, address in spec_parser.parse(spec):
                                    if target:
                                        self.targets.append(target)
                                        # Force early BUILD file loading if this target is an alias that expands
                                        # to others.
                                        unused = list(target.resolve())
                                    else:
                                        siblings = Target.get_all_addresses(
                                            address.buildfile)
                                        prompt = 'did you mean' if len(
                                            siblings
                                        ) == 1 else 'maybe you meant one of these'
                                        error('%s => %s?:\n    %s' %
                                              (address, prompt, '\n    '.join(
                                                  str(a) for a in siblings)))
                            except (TypeError, ImportError, TaskError,
                                    GoalError):
                                error(spec, include_traceback=True)
                            except (IOError, SyntaxError,
                                    TargetDefinitionException):
                                error(spec)

            self.phases = [Phase(goal) for goal in goals]

            rcfiles = self.config.getdefault('rcfiles', type=list, default=[])
            if rcfiles:
                rcfile = RcFile(rcfiles,
                                default_prepend=False,
                                process_default=True)

                # Break down the goals specified on the command line to the full set that will be run so we
                # can apply default flags to inner goal nodes.  Also break down goals by Task subclass and
                # register the task class hierarchy fully qualified names so we can apply defaults to
                # baseclasses.

                sections = OrderedSet()
                for phase in Engine.execution_order(self.phases):
                    for goal in phase.goals():
                        sections.add(goal.name)
                        for clazz in goal.task_type.mro():
                            if clazz == Task:
                                break
                            sections.add('%s.%s' %
                                         (clazz.__module__, clazz.__name__))

                augmented_args = rcfile.apply_defaults(sections, args)
                if augmented_args != args:
                    del args[:]
                    args.extend(augmented_args)
                    sys.stderr.write(
                        "(using pantsrc expansion: pants goal %s)\n" %
                        ' '.join(augmented_args))

            Phase.setup_parser(parser, args, self.phases)

    def run(self, lock):
        # TODO(John Sirois): Consider moving to straight python logging.  The divide between the
        # context/work-unit logging and standard python logging doesn't buy us anything.

        # Enable standard python logging for code with no handle to a context/work-unit.
        if self.options.log_level:
            LogOptions.set_stderr_log_level((self.options.log_level
                                             or 'info').upper())
            logdir = self.options.logdir or self.config.get(
                'goals', 'logdir', default=None)
            if logdir:
                safe_mkdir(logdir)
                LogOptions.set_log_dir(logdir)
                log.init('goals')
            else:
                log.init()

        # Update the reporting settings, now that we have flags etc.
        def is_console_task():
            for phase in self.phases:
                for goal in phase.goals():
                    if issubclass(goal.task_type, ConsoleTask):
                        return True
            return False

        is_explain = self.options.explain
        update_reporting(self.options,
                         is_console_task() or is_explain, self.run_tracker)

        if self.options.dry_run:
            print('****** Dry Run ******')

        context = Context(self.config,
                          self.options,
                          self.run_tracker,
                          self.targets,
                          requested_goals=self.requested_goals,
                          lock=lock)

        if self.options.recursive_directory:
            context.log.warn(
                '--all-recursive is deprecated, use a target spec with the form [dir]:: instead'
            )
            for dir in self.options.recursive_directory:
                self.add_target_recursive(dir)

        if self.options.target_directory:
            context.log.warn(
                '--all is deprecated, use a target spec with the form [dir]: instead'
            )
            for dir in self.options.target_directory:
                self.add_target_directory(dir)

        unknown = []
        for phase in self.phases:
            if not phase.goals():
                unknown.append(phase)

        if unknown:
            _list_goals(
                context,
                'Unknown goal(s): %s' % ' '.join(phase.name
                                                 for phase in unknown))
            return 1

        return Goal._execute(context,
                             self.phases,
                             print_timing=self.options.time)

    def cleanup(self):
        # TODO: Make this more selective? Only kill nailguns that affect state? E.g., checkstyle
        # may not need to be killed.
        NailgunTask.killall(log.info)
        sys.exit(1)
示例#44
0
  def setup_parser(self, parser, args):
    self.config = Config.load()
    Goal.add_global_options(parser)

    # We support attempting zero or more goals.  Multiple goals must be delimited from further
    # options and non goal args with a '--'.  The key permutations we need to support:
    # ./pants goal => goals
    # ./pants goal goals => goals
    # ./pants goal compile src/java/... => compile
    # ./pants goal compile -x src/java/... => compile
    # ./pants goal compile src/java/... -x => compile
    # ./pants goal compile run -- src/java/... => compile, run
    # ./pants goal compile run -- src/java/... -x => compile, run
    # ./pants goal compile run -- -x src/java/... => compile, run

    if not args:
      args.append('goals')

    if len(args) == 1 and args[0] in set(['-h', '--help', 'help']):
      def format_usage(usages):
        left_colwidth = 0
        for left, right in usages:
          left_colwidth = max(left_colwidth, len(left))
        lines = []
        for left, right in usages:
          lines.append('  %s%s%s' % (left, ' ' * (left_colwidth - len(left) + 1), right))
        return '\n'.join(lines)

      usages = [
        ("%prog goal goals ([spec]...)", Phase('goals').description),
        ("%prog goal help [goal] ([spec]...)", Phase('help').description),
        ("%prog goal [goal] [spec]...", "Attempt goal against one or more targets."),
        ("%prog goal [goal] ([goal]...) -- [spec]...", "Attempts all the specified goals."),
      ]
      parser.set_usage("\n%s" % format_usage(usages))
      parser.epilog = ("Either lists all installed goals, provides extra help for a goal or else "
                       "attempts to achieve the specified goal for the listed targets." """
                       Note that target specs accept two special forms:
                         [dir]:  to include all targets in the specified directory
                         [dir]:: to include all targets found in all BUILD files recursively under
                                 the directory""")

      parser.print_help()
      sys.exit(0)
    else:
      goals, specs = Goal.parse_args(args)
      self.requested_goals = goals

      with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]):
        # Bootstrap goals by loading any configured bootstrap BUILD files
        with self.check_errors('The following bootstrap_buildfiles cannot be loaded:') as error:
          with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]):
            for path in self.config.getlist('goals', 'bootstrap_buildfiles', default = []):
              try:
                buildfile = BuildFile(get_buildroot(), os.path.relpath(path, get_buildroot()))
                ParseContext(buildfile).parse()
              except (TypeError, ImportError, TaskError, GoalError):
                error(path, include_traceback=True)
              except (IOError, SyntaxError):
                error(path)
        # Now that we've parsed the bootstrap BUILD files, and know about the SCM system.
        self.run_tracker.run_info.add_scm_info()

        # Bootstrap user goals by loading any BUILD files implied by targets.
        spec_parser = SpecParser(self.root_dir)
        with self.check_errors('The following targets could not be loaded:') as error:
          with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]):
            for spec in specs:
              try:
                for target, address in spec_parser.parse(spec):
                  if target:
                    self.targets.append(target)
                    # Force early BUILD file loading if this target is an alias that expands
                    # to others.
                    unused = list(target.resolve())
                  else:
                    siblings = Target.get_all_addresses(address.buildfile)
                    prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these'
                    error('%s => %s?:\n    %s' % (address, prompt,
                                                  '\n    '.join(str(a) for a in siblings)))
              except (TypeError, ImportError, TaskError, GoalError):
                error(spec, include_traceback=True)
              except (IOError, SyntaxError):
                error(spec)

      self.phases = [Phase(goal) for goal in goals]

      rcfiles = self.config.getdefault('rcfiles', type=list, default=[])
      if rcfiles:
        rcfile = RcFile(rcfiles, default_prepend=False, process_default=True)

        # Break down the goals specified on the command line to the full set that will be run so we
        # can apply default flags to inner goal nodes.  Also break down goals by Task subclass and
        # register the task class hierarchy fully qualified names so we can apply defaults to
        # baseclasses.

        all_goals = Phase.execution_order(Phase(goal) for goal in goals)
        sections = OrderedSet()
        for goal in all_goals:
          sections.add(goal.name)
          for clazz in goal.task_type.mro():
            if clazz == Task:
              break
            sections.add('%s.%s' % (clazz.__module__, clazz.__name__))

        augmented_args = rcfile.apply_defaults(sections, args)
        if augmented_args != args:
          del args[:]
          args.extend(augmented_args)
          sys.stderr.write("(using pantsrc expansion: pants goal %s)\n" % ' '.join(augmented_args))

      Phase.setup_parser(parser, args, self.phases)
示例#45
0
class InternalTarget(Target):
    """A baseclass for targets that support an optional dependency set."""
    class CycleException(Exception):
        """Thrown when a circular dependency is detected."""
        def __init__(self, cycle):
            Exception.__init__(
                self, 'Cycle detected:\n\t%s' %
                (' ->\n\t'.join(str(target.address) for target in cycle)))

    _config = Config.load()

    @classmethod
    def sort_targets(cls, internal_targets):
        """Returns a list of targets that internal_targets depend on sorted from most dependent to
    least."""

        roots = OrderedSet()
        inverted_deps = collections.defaultdict(
            OrderedSet)  # target -> dependent targets
        visited = set()
        path = OrderedSet()

        def invert(target):
            if target in path:
                path_list = list(path)
                cycle_head = path_list.index(target)
                cycle = path_list[cycle_head:] + [target]
                raise InternalTarget.CycleException(cycle)
            path.add(target)
            if target not in visited:
                visited.add(target)
                if getattr(target, 'internal_dependencies', None):
                    for internal_dependency in target.internal_dependencies:
                        if hasattr(internal_dependency,
                                   'internal_dependencies'):
                            inverted_deps[internal_dependency].add(target)
                            invert(internal_dependency)
                else:
                    roots.add(target)
            path.remove(target)

        for internal_target in internal_targets:
            invert(internal_target)

        sorted = []
        visited.clear()

        def topological_sort(target):
            if target not in visited:
                visited.add(target)
                if target in inverted_deps:
                    for dep in inverted_deps[target]:
                        topological_sort(dep)
                sorted.append(target)

        for root in roots:
            topological_sort(root)

        return sorted

    @classmethod
    def coalesce_targets(cls, internal_targets, discriminator):
        """Returns a list of targets internal_targets depend on sorted from most dependent to least and
    grouped where possible by target type as categorized by the given discriminator."""

        sorted_targets = InternalTarget.sort_targets(internal_targets)

        # can do no better for any of these:
        # []
        # [a]
        # [a,b]
        if len(sorted_targets) <= 2:
            return sorted_targets

        # For these, we'd like to coalesce if possible, like:
        # [a,b,a,c,a,c] -> [a,a,a,b,c,c]
        # adopt a quadratic worst case solution, when we find a type change edge, scan forward for
        # the opposite edge and then try to swap dependency pairs to move the type back left to its
        # grouping.  If the leftwards migration fails due to a dependency constraint, we just stop
        # and move on leaving "type islands".
        current_type = None

        # main scan left to right no backtracking
        for i in range(len(sorted_targets) - 1):
            current_target = sorted_targets[i]
            if current_type != discriminator(current_target):
                scanned_back = False

                # scan ahead for next type match
                for j in range(i + 1, len(sorted_targets)):
                    look_ahead_target = sorted_targets[j]
                    if current_type == discriminator(look_ahead_target):
                        scanned_back = True

                        # swap this guy as far back as we can
                        for k in range(j, i, -1):
                            previous_target = sorted_targets[k - 1]
                            mismatching_types = current_type != discriminator(
                                previous_target)
                            not_a_dependency = look_ahead_target not in previous_target.internal_dependencies
                            if mismatching_types and not_a_dependency:
                                sorted_targets[k] = sorted_targets[k - 1]
                                sorted_targets[k - 1] = look_ahead_target
                            else:
                                break  # out of k

                        break  # out of j

                if not scanned_back:  # done with coalescing the current type, move on to next
                    current_type = discriminator(current_target)

        return sorted_targets

    def sort(self):
        """Returns a list of targets this target depends on sorted from most dependent to least."""

        return InternalTarget.sort_targets([self])

    def coalesce(self, discriminator):
        """Returns a list of targets this target depends on sorted from most dependent to least and
    grouped where possible by target type as categorized by the given discriminator."""

        return InternalTarget.coalesce_targets([self], discriminator)

    def __init__(self, name, dependencies, is_meta):
        Target.__init__(self, name, is_meta)

        self._injected_deps = []
        self.processed_dependencies = resolve(dependencies)

        self.add_label('internal')
        self.dependency_addresses = OrderedSet()
        self.dependencies = OrderedSet()
        self.internal_dependencies = OrderedSet()
        self.jar_dependencies = OrderedSet()

        # TODO(John Sirois): if meta targets were truly built outside parse contexts - we could instead
        # just use the more general check: if parsing: delay(doit) else: doit()
        # Fix how target _ids are built / addresses to not require a BUILD file - ie: support anonymous,
        # non-addressable targets - which is what meta-targets really are once created.
        if is_meta:
            # Meta targets are built outside any parse context - so update dependencies immediately
            self.update_dependencies(self.processed_dependencies)
        else:
            # Defer dependency resolution after parsing the current BUILD file to allow for forward
            # references
            self._post_construct(self.update_dependencies,
                                 self.processed_dependencies)

        self._post_construct(self.inject_dependencies)

    def add_injected_dependency(self, spec):
        self._injected_deps.append(spec)

    def inject_dependencies(self):
        self.update_dependencies(resolve(self._injected_deps))

    def update_dependencies(self, dependencies):
        if dependencies:
            for dependency in dependencies:
                if hasattr(dependency, 'address'):
                    self.dependency_addresses.add(dependency.address)
                for resolved_dependency in dependency.resolve():
                    self.dependencies.add(resolved_dependency)
                    if isinstance(resolved_dependency, InternalTarget):
                        self.internal_dependencies.add(resolved_dependency)
                    if hasattr(resolved_dependency, '_as_jar_dependencies'):
                        self.jar_dependencies.update(
                            resolved_dependency._as_jar_dependencies())

    def replace_dependency(self, dependency, replacement):
        self.dependencies.discard(dependency)
        self.internal_dependencies.discard(dependency)
        self.jar_dependencies.discard(dependency)
        self.update_dependencies([replacement])

    def _walk(self, walked, work, predicate=None):
        Target._walk(self, walked, work, predicate)
        for dep in self.dependencies:
            if isinstance(dep, Target) and not dep in walked:
                walked.add(dep)
                if not predicate or predicate(dep):
                    additional_targets = work(dep)
                    dep._walk(walked, work, predicate)
                    if additional_targets:
                        for additional_target in additional_targets:
                            additional_target._walk(walked, work, predicate)
示例#46
0
文件: goal.py 项目: ugodiggi/commons
  def setup_parser(self, parser, args):
    self.config = Config.load()

    Goal.add_global_options(parser)

    # We support attempting zero or more goals.  Multiple goals must be delimited from further
    # options and non goal args with a '--'.  The key permutations we need to support:
    # ./pants goal => goals
    # ./pants goal goals => goals
    # ./pants goal compile src/java/... => compile
    # ./pants goal compile -x src/java/... => compile
    # ./pants goal compile src/java/... -x => compile
    # ./pants goal compile run -- src/java/... => compile, run
    # ./pants goal compile run -- src/java/... -x => compile, run
    # ./pants goal compile run -- -x src/java/... => compile, run

    if not args:
      args.append('goals')

    if len(args) == 1 and args[0] in set(['-h', '--help', 'help']):
      def format_usage(usages):
        left_colwidth = 0
        for left, right in usages:
          left_colwidth = max(left_colwidth, len(left))
        lines = []
        for left, right in usages:
          lines.append('  %s%s%s' % (left, ' ' * (left_colwidth - len(left) + 1), right))
        return '\n'.join(lines)

      usages = [
        ("%prog goal goals ([spec]...)", Phase('goals').description),
        ("%prog goal help [goal] ([spec]...)", Phase('help').description),
        ("%prog goal [goal] [spec]...", "Attempt goal against one or more targets."),
        ("%prog goal [goal] ([goal]...) -- [spec]...", "Attempts all the specified goals."),
      ]
      parser.set_usage("\n%s" % format_usage(usages))
      parser.epilog = ("Either lists all installed goals, provides extra help for a goal or else "
                       "attempts to achieve the specified goal for the listed targets." """
                       Note that target specs accept two special forms:
                         [dir]:  to include all targets in the specified directory
                         [dir]:: to include all targets found in all BUILD files recursively under
                                 the directory""")

      parser.print_help()
      sys.exit(0)
    else:
      goals, specs = Goal.parse_args(args)

      # TODO(John Sirois): kill PANTS_NEW and its usages when pants.new is rolled out
      ParseContext.enable_pantsnew()

      # Bootstrap goals by loading any configured bootstrap BUILD files
      with self.check_errors('The following bootstrap_buildfiles cannot be loaded:') as error:
        with self.timer.timing('parse:bootstrap'):
          for path in self.config.getlist('goals', 'bootstrap_buildfiles', default = []):
            try:
              buildfile = BuildFile(get_buildroot(), os.path.relpath(path, get_buildroot()))
              ParseContext(buildfile).parse()
            except (TypeError, ImportError, TaskError, GoalError):
              error(path, include_traceback=True)
            except (IOError, SyntaxError):
              error(path)

      # Bootstrap user goals by loading any BUILD files implied by targets
      with self.check_errors('The following targets could not be loaded:') as error:
        with self.timer.timing('parse:BUILD'):
          for spec in specs:
            self.parse_spec(error, spec)

      self.phases = [Phase(goal) for goal in goals]

      rcfiles = self.config.getdefault('rcfiles', type=list, default=[])
      if rcfiles:
        rcfile = RcFile(rcfiles, default_prepend=False, process_default=True)

        # Break down the goals specified on the command line to the full set that will be run so we
        # can apply default flags to inner goal nodes.  Also break down goals by Task subclass and
        # register the task class hierarchy fully qualified names so we can apply defaults to
        # baseclasses.

        all_goals = Phase.execution_order(Phase(goal) for goal in goals)
        sections = OrderedSet()
        for goal in all_goals:
          sections.add(goal.name)
          for clazz in goal.task_type.mro():
            if clazz == Task:
              break
            sections.add('%s.%s' % (clazz.__module__, clazz.__name__))

        augmented_args = rcfile.apply_defaults(sections, args)
        if augmented_args != args:
          del args[:]
          args.extend(augmented_args)
          print("(using pantsrc expansion: pants goal %s)" % ' '.join(augmented_args))

      Phase.setup_parser(parser, args, self.phases)
示例#47
0
 def load_config():
   with temporary_file() as ini:
     ini.write(config or '')
     ini.close()
     return Config.load()
示例#48
0
文件: goal.py 项目: soheilhy/commons
  def setup_parser(self, parser, args):
    self.config = Config.load()

    Goal.add_global_options(parser)

    # We support attempting zero or more goals.  Multiple goals must be delimited from further
    # options and non goal args with a '--'.  The key permutations we need to support:
    # ./pants goal => goals
    # ./pants goal goals => goals
    # ./pants goal compile src/java/... => compile
    # ./pants goal compile -x src/java/... => compile
    # ./pants goal compile src/java/... -x => compile
    # ./pants goal compile run -- src/java/... => compile, run
    # ./pants goal compile run -- src/java/... -x => compile, run
    # ./pants goal compile run -- -x src/java/... => compile, run

    if not args:
      args.append('goals')

    if len(args) == 1 and args[0] in set(['-h', '--help', 'help']):
      def format_usage(usages):
        left_colwidth = 0
        for left, right in usages:
          left_colwidth = max(left_colwidth, len(left))
        lines = []
        for left, right in usages:
          lines.append('  %s%s%s' % (left, ' ' * (left_colwidth - len(left) + 1), right))
        return '\n'.join(lines)

      usages = [
        ("%prog goal goals ([spec]...)", Phase('goals').description),
        ("%prog goal help [goal] ([spec]...)", Phase('help').description),
        ("%prog goal [goal] [spec]...", "Attempt goal against one or more targets."),
        ("%prog goal [goal] ([goal]...) -- [spec]...", "Attempts all the specified goals."),
      ]
      parser.set_usage("\n%s" % format_usage(usages))
      parser.epilog = ("Either lists all installed goals, provides extra help for a goal or else "
                       "attempts to achieve the specified goal for the listed targets." """
                       Note that target specs accept two special forms:
                         [dir]:  to include all targets in the specified directory
                         [dir]:: to include all targets found in all BUILD files recursively under
                                 the directory""")

      parser.print_help()
      sys.exit(0)
    else:
      goals, specs = Goal.parse_args(args)

      # TODO(John Sirois): kill PANTS_NEW and its usages when pants.new is rolled out
      ParseContext.enable_pantsnew()

      # Bootstrap goals by loading any configured bootstrap BUILD files
      with self.check_errors('The following bootstrap_buildfiles cannot be loaded:') as error:
        for path in self.config.getlist('goals', 'bootstrap_buildfiles', default = []):
          try:
            buildfile = BuildFile(get_buildroot(), os.path.relpath(path, get_buildroot()))
            ParseContext(buildfile).parse()
          except (TypeError, ImportError, TaskError, GoalError):
            error(path, include_traceback=True)
          except (IOError, SyntaxError):
            error(path)

      # Bootstrap user goals by loading any BUILD files implied by targets
      with self.check_errors('The following targets could not be loaded:') as error:
        for spec in specs:
          self.parse_spec(error, spec)

      self.phases = [Phase(goal) for goal in goals]

      rcfiles = self.config.getdefault('rcfiles', type=list, default=[])
      if rcfiles:
        rcfile = RcFile(rcfiles, default_prepend=False, process_default=True)

        # Break down the goals specified on the command line to the full set that will be run so we
        # can apply default flags to inner goal nodes.  Also break down goals by Task subclass and
        # register the task class hierarchy fully qualified names so we can apply defaults to
        # baseclasses.

        all_goals = Phase.execution_order(Phase(goal) for goal in goals)
        sections = OrderedSet()
        for goal in all_goals:
          sections.add(goal.name)
          for clazz in goal.task_type.mro():
            if clazz == Task:
              break
            sections.add('%s.%s' % (clazz.__module__, clazz.__name__))

        augmented_args = rcfile.apply_defaults(sections, args)
        if augmented_args != args:
          del args[:]
          args.extend(augmented_args)
          print("(using pantsrc expansion: pants goal %s)" % ' '.join(augmented_args))

      Phase.setup_parser(parser, args, self.phases)
示例#49
0
    def setup_parser(self, parser, args):
        self.config = Config.load()
        Goal.add_global_options(parser)

        # We support attempting zero or more goals.  Multiple goals must be delimited from further
        # options and non goal args with a '--'.  The key permutations we need to support:
        # ./pants goal => goals
        # ./pants goal goals => goals
        # ./pants goal compile src/java/... => compile
        # ./pants goal compile -x src/java/... => compile
        # ./pants goal compile src/java/... -x => compile
        # ./pants goal compile run -- src/java/... => compile, run
        # ./pants goal compile run -- src/java/... -x => compile, run
        # ./pants goal compile run -- -x src/java/... => compile, run

        if not args:
            args.append('goals')

        if len(args) == 1 and args[0] in set(['-h', '--help', 'help']):

            def format_usage(usages):
                left_colwidth = 0
                for left, right in usages:
                    left_colwidth = max(left_colwidth, len(left))
                lines = []
                for left, right in usages:
                    lines.append('  %s%s%s' %
                                 (left, ' ' *
                                  (left_colwidth - len(left) + 1), right))
                return '\n'.join(lines)

            usages = [
                ("%prog goal goals ([spec]...)", Phase('goals').description),
                ("%prog goal help [goal] ([spec]...)",
                 Phase('help').description),
                ("%prog goal [goal] [spec]...",
                 "Attempt goal against one or more targets."),
                ("%prog goal [goal] ([goal]...) -- [spec]...",
                 "Attempts all the specified goals."),
            ]
            parser.set_usage("\n%s" % format_usage(usages))
            parser.epilog = (
                "Either lists all installed goals, provides extra help for a goal or else "
                "attempts to achieve the specified goal for the listed targets."
                """
                       Note that target specs accept two special forms:
                         [dir]:  to include all targets in the specified directory
                         [dir]:: to include all targets found in all BUILD files recursively under
                                 the directory""")

            parser.print_help()
            sys.exit(0)
        else:
            goals, specs = Goal.parse_args(args)
            self.requested_goals = goals

            with self.run_tracker.new_workunit(name='setup',
                                               labels=[WorkUnit.SETUP]):
                # Bootstrap goals by loading any configured bootstrap BUILD files
                with self.check_errors(
                        'The following bootstrap_buildfiles cannot be loaded:'
                ) as error:
                    with self.run_tracker.new_workunit(name='bootstrap',
                                                       labels=[WorkUnit.SETUP
                                                               ]):
                        for path in self.config.getlist('goals',
                                                        'bootstrap_buildfiles',
                                                        default=[]):
                            try:
                                buildfile = BuildFile(
                                    get_buildroot(),
                                    os.path.relpath(path, get_buildroot()))
                                ParseContext(buildfile).parse()
                            except (TypeError, ImportError, TaskError,
                                    GoalError):
                                error(path, include_traceback=True)
                            except (IOError, SyntaxError):
                                error(path)
                # Now that we've parsed the bootstrap BUILD files, and know about the SCM system.
                self.run_tracker.run_info.add_scm_info()

                # Bootstrap user goals by loading any BUILD files implied by targets.
                spec_parser = SpecParser(self.root_dir)
                with self.check_errors(
                        'The following targets could not be loaded:') as error:
                    with self.run_tracker.new_workunit(name='parse',
                                                       labels=[WorkUnit.SETUP
                                                               ]):
                        for spec in specs:
                            try:
                                for target, address in spec_parser.parse(spec):
                                    if target:
                                        self.targets.append(target)
                                        # Force early BUILD file loading if this target is an alias that expands
                                        # to others.
                                        unused = list(target.resolve())
                                    else:
                                        siblings = Target.get_all_addresses(
                                            address.buildfile)
                                        prompt = 'did you mean' if len(
                                            siblings
                                        ) == 1 else 'maybe you meant one of these'
                                        error('%s => %s?:\n    %s' %
                                              (address, prompt, '\n    '.join(
                                                  str(a) for a in siblings)))
                            except (TypeError, ImportError, TaskError,
                                    GoalError):
                                error(spec, include_traceback=True)
                            except (IOError, SyntaxError,
                                    TargetDefinitionException):
                                error(spec)

            self.phases = [Phase(goal) for goal in goals]

            rcfiles = self.config.getdefault('rcfiles', type=list, default=[])
            if rcfiles:
                rcfile = RcFile(rcfiles,
                                default_prepend=False,
                                process_default=True)

                # Break down the goals specified on the command line to the full set that will be run so we
                # can apply default flags to inner goal nodes.  Also break down goals by Task subclass and
                # register the task class hierarchy fully qualified names so we can apply defaults to
                # baseclasses.

                sections = OrderedSet()
                for phase in Engine.execution_order(self.phases):
                    for goal in phase.goals():
                        sections.add(goal.name)
                        for clazz in goal.task_type.mro():
                            if clazz == Task:
                                break
                            sections.add('%s.%s' %
                                         (clazz.__module__, clazz.__name__))

                augmented_args = rcfile.apply_defaults(sections, args)
                if augmented_args != args:
                    del args[:]
                    args.extend(augmented_args)
                    sys.stderr.write(
                        "(using pantsrc expansion: pants goal %s)\n" %
                        ' '.join(augmented_args))

            Phase.setup_parser(parser, args, self.phases)
示例#50
0
文件: __init__.py 项目: xianxu/pants
 def load_config():
     with temporary_file() as ini:
         ini.write(config or '')
         ini.close()
         return Config.load()
示例#51
0
文件: goal.py 项目: adamsxu/commons
  def setup_parser(self, parser, args):
    self.config = Config.load()

    parser.add_option("-x", "--time", action="store_true", dest = "time", default = False,
                      help = "Times goal phases and outputs a report.")

    parser.add_option("-v", "--log", action="store_true", dest = "log", default = False,
                      help = "[%default] Logs extra build output.")
    parser.add_option("-l", "--level", dest = "log_level",
                      type="choice", choices=['debug', 'info', 'warn'],
                      help = "[info] Sets the logging level to one of 'debug', 'info' or 'warn', "
                             "implies -v if set.")

    parser.add_option("--all", dest="target_directory", action="append",
                      help = "Adds all targets found in the given directory's BUILD file.  Can "
                             "be specified more than once.")
    parser.add_option("--all-recursive", dest="recursive_directory", action="append",
                      help = "Adds all targets found recursively under the given directory.  Can "
                             "be specified more than once to add more than one root target "
                             "directory to scan.")

    # We support attempting zero or more goals.  Multiple goals must be delimited from further
    # options and non goal args with a '--'.  The key permutations we need to support:
    # ./pants goal => goals
    # ./pants goal goals => goals
    # ./pants goal compile src/java/... => compile
    # ./pants goal compile -x src/java/... => compile
    # ./pants goal compile src/java/... -x => compile
    # ./pants goal compile run -- src/java/... => compile, run
    # ./pants goal compile run -- src/java/... -x => compile, run
    # ./pants goal compile run -- -x src/java/... => compile, run

    if not args:
      args.append('goals')

    if len(args) == 1 and args[0] in set(['-h', '--help', 'help']):
      def format_usage(usages):
        left_colwidth = 0
        for left, right in usages:
          left_colwidth = max(left_colwidth, len(left))
        lines = []
        for left, right in usages:
          lines.append('  %s%s%s' % (left, ' ' * (left_colwidth - len(left) + 1), right))
        return '\n'.join(lines)

      usages = [
        ("%prog goal goals ([spec]...)", Phase('goals').description),
        ("%prog goal help [goal] ([spec]...)", Phase('help').description),
        ("%prog goal [goal] [spec]...", "Attempt goal against one or more targets."),
        ("%prog goal [goal] ([goal]...) -- [spec]...", "Attempts all the specified goals."),
      ]
      parser.set_usage("\n%s" % format_usage(usages))
      parser.epilog = ("Either lists all installed goals, provides extra help for a goal or else "
                       "attempts to achieve the specified goal for the listed targets.")

      parser.print_help()
      sys.exit(0)
    else:
      goals = []
      help = False
      multi = False
      for i, arg in enumerate(args):
        help = help or 'help' == arg
        goals.append(arg)
        if '--' == arg:
          multi = True
          del args[i]
          goals.pop()
          break
        if arg.startswith('-'):
          break
      if not multi:
        goals = [goals[0]]

      spec_offset = len(goals) + 1 if help else len(goals)
      specs = [arg for arg in args[spec_offset:] if not arg.startswith('-')]

      def parse_build(buildfile):
        # TODO(John Sirois): kill PANTS_NEW and its usages when pants.new is rolled out
        ParseContext(buildfile).parse(PANTS_NEW=True)

      # Bootstrap goals by loading any configured bootstrap BUILD files
      with self.check_errors('The following bootstrap_buildfiles cannot be loaded:') as error:
        for path in self.config.getlist('goals', 'bootstrap_buildfiles', default = []):
          try:
            buildfile = BuildFile(get_buildroot(), os.path.relpath(path, get_buildroot()))
            parse_build(buildfile)
          except (TypeError, ImportError, TaskError, GoalError):
            error(path, include_traceback=True)
          except (IOError, SyntaxError):
            error(path)

      # Bootstrap user goals by loading any BUILD files implied by targets
      self.targets = []
      with self.check_errors('The following targets could not be loaded:') as error:
        for spec in specs:
          try:
            address = Address.parse(get_buildroot(), spec)
            parse_build(address.buildfile)
            target = Target.get(address)
            if target:
              self.targets.append(target)
            else:
              siblings = Target.get_all_addresses(address.buildfile)
              prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these'
              error('%s => %s?:\n    %s' % (address, prompt,
                                            '\n    '.join(str(a) for a in siblings)))
          except (TypeError, ImportError, TaskError, GoalError):
            error(spec, include_traceback=True)
          except (IOError, SyntaxError):
            error(spec)

      self.phases = [Phase(goal) for goal in goals]
      Phase.setup_parser(parser, args, self.phases)