def test_empty_trash(self):
     with self.temporary_workdir() as work_dir:
         trash_dir = os.path.join(work_dir, "trash")
         subprocess.call(["touch", trash_dir + "foo.txt"])
         self.assert_success(
             self.run_pants_with_workdir(["clean-all"], work_dir))
         self.assertFalse(os._exists(trash_dir))
Esempio n. 2
0
def _linux_open(files):
  cmd = "xdg-open"
  if not _cmd_exists(cmd):
    raise OpenError("The program '{}' isn't in your PATH. Please install and re-run this "
                    "goal.".format(cmd))
  for f in list(files):
    subprocess.call([cmd, f])
Esempio n. 3
0
def _linux_open(files):
    cmd = "xdg-open"
    if not _cmd_exists(cmd):
        raise OpenError(
            "The program '{}' isn't in your PATH. Please install and re-run this "
            "goal.".format(cmd))
    for f in list(files):
        subprocess.call([cmd, f])
Esempio n. 4
0
    def has_python_version(cls, version):
        """Returns true if the current system has the specified version of python.

    :param version: A python version string, such as 2.7, 3.
    """
        try:
            subprocess.call(['python%s' % version, '-V'])
            return True
        except OSError:
            return False
Esempio n. 5
0
    def execute_codegen(self, target, target_workdir):
        target_cmd = self._thrift_cmd[:]

        bases = OrderedSet(tgt.target_base for tgt in target.closure()
                           if self.is_gentarget(tgt))
        for base in bases:
            target_cmd.extend(('-I', base))

        if hasattr(target, 'compiler_args'):
            target_cmd.extend(list(target.compiler_args or []))

        target_cmd.extend(('-o', target_workdir))

        for source in target.sources_relative_to_buildroot():
            cmd = target_cmd[:]
            cmd.append(os.path.join(get_buildroot(), source))
            with self.context.new_workunit(name=source,
                                           labels=[WorkUnitLabel.TOOL],
                                           cmd=' '.join(cmd)) as workunit:
                result = subprocess.call(cmd,
                                         stdout=workunit.output('stdout'),
                                         stderr=workunit.output('stderr'))
                if result != 0:
                    raise TaskError('{} ... exited non-zero ({})'.format(
                        self._thrift_binary, result))

        # The thrift compiler generates sources to a gen-[lang] subdir of the `-o` argument.  We
        # relocate the generated sources to the root of the `target_workdir` so that our base class
        # maps them properly.
        gen_dir = os.path.join(target_workdir,
                               'gen-{}'.format(self.thrift_generator))
        for path in os.listdir(gen_dir):
            shutil.move(os.path.join(gen_dir, path), target_workdir)
        os.rmdir(gen_dir)
Esempio n. 6
0
    def execute_codegen(self, target, target_workdir):
        target_cmd = [self._protoc]

        protoc_gen_go = ProtocGenGo.global_instance().select(self.context)
        env = os.environ.copy()
        env['PATH'] = ':'.join([os.path.dirname(protoc_gen_go), env['PATH']])

        bases = OrderedSet(tgt.target_base for tgt in target.closure()
                           if self.is_gentarget(tgt))
        for base in bases:
            target_cmd.append('-I={}'.format(
                os.path.join(get_buildroot(), base)))

        outdir = os.path.join(target_workdir, 'src', 'go')
        safe_mkdir(outdir)
        target_cmd.append('--go_out={}'.format(outdir))

        all_sources = list(target.sources_relative_to_buildroot())
        for source in all_sources:
            file_cmd = target_cmd + [os.path.join(get_buildroot(), source)]
            with self.context.new_workunit(name=source,
                                           labels=[WorkUnitLabel.TOOL],
                                           cmd=' '.join(file_cmd)) as workunit:
                self.context.log.info(' '.join(file_cmd))
                result = subprocess.call(file_cmd,
                                         env=env,
                                         stdout=workunit.output('stdout'),
                                         stderr=workunit.output('stderr'))
                if result != 0:
                    raise TaskError('{} ... exited non-zero ({})'.format(
                        self._protoc, result))
  def execute_codegen(self, target, target_workdir):
    target_cmd = self._thrift_cmd[:]

    bases = OrderedSet(tgt.target_base for tgt in target.closure() if self.is_gentarget(tgt))
    for base in bases:
      target_cmd.extend(('-I', base))

    if hasattr(target, 'compiler_args'):
      target_cmd.extend(list(target.compiler_args or []))

    target_cmd.extend(('-o', target_workdir))

    for source in target.sources_relative_to_buildroot():
      cmd = target_cmd[:]
      cmd.append(os.path.join(get_buildroot(), source))
      with self.context.new_workunit(name=source,
                                     labels=[WorkUnitLabel.TOOL],
                                     cmd=' '.join(cmd)) as workunit:
        result = subprocess.call(cmd,
                                 stdout=workunit.output('stdout'),
                                 stderr=workunit.output('stderr'))
        if result != 0:
          raise TaskError('{} ... exited non-zero ({})'.format(self._thrift_binary, result))

    # The thrift compiler generates sources to a gen-[lang] subdir of the `-o` argument.  We
    # relocate the generated sources to the root of the `target_workdir` so that our base class
    # maps them properly.
    gen_dir = os.path.join(target_workdir, 'gen-{}'.format(self.thrift_generator))
    for path in os.listdir(gen_dir):
      shutil.move(os.path.join(gen_dir, path), target_workdir)
    os.rmdir(gen_dir)
Esempio n. 8
0
  def execute_codegen(self, target, target_workdir):
    target_cmd = [self._protoc]

    protoc_gen_go = ProtocGenGo.global_instance().select(self.context)
    env = os.environ.copy()
    env['PATH'] = ':'.join([os.path.dirname(protoc_gen_go), env['PATH']])

    bases = OrderedSet(tgt.target_base for tgt in target.closure() if self.is_gentarget(tgt))
    for base in bases:
      target_cmd.append('-I={}'.format(os.path.join(get_buildroot(), base)))

    outdir = os.path.join(target_workdir, 'src', 'go')
    safe_mkdir(outdir)
    target_cmd.append('--go_out={}'.format(outdir))

    all_sources = list(target.sources_relative_to_buildroot())
    for source in all_sources:
      file_cmd = target_cmd + [os.path.join(get_buildroot(), source)]
      with self.context.new_workunit(name=source,
                                     labels=[WorkUnitLabel.TOOL],
                                     cmd=' '.join(file_cmd)) as workunit:
        self.context.log.info(' '.join(file_cmd))
        result = subprocess.call(file_cmd,
                                 env=env,
                                 stdout=workunit.output('stdout'),
                                 stderr=workunit.output('stderr'))
        if result != 0:
          raise TaskError('{} ... exited non-zero ({})'.format(self._protoc, result))
Esempio n. 9
0
  def _generate_thrift(self, target, target_workdir):
    target_cmd = self._thrift_cmd[:]

    bases = OrderedSet(tgt.target_base for tgt in target.closure() if self.is_gentarget(tgt))
    for base in bases:
      target_cmd.extend(('-I', base))

    target_cmd.extend(('-o', target_workdir))

    all_sources = list(target.sources_relative_to_buildroot())
    if len(all_sources) != 1:
      self._validate_supports_more_than_one_source()

    for source in all_sources:
      file_cmd = target_cmd + [os.path.join(get_buildroot(), source)]
      with self.context.new_workunit(name=source,
                                     labels=[WorkUnitLabel.TOOL],
                                     cmd=' '.join(file_cmd)) as workunit:
        result = subprocess.call(file_cmd,
                                 stdout=workunit.output('stdout'),
                                 stderr=workunit.output('stderr'))
        if result != 0:
          raise TaskError('{} ... exited non-zero ({})'.format(self._thrift_binary, result))

    gen_dir = os.path.join(target_workdir, 'gen-go')
    src_dir = os.path.join(target_workdir, 'src')
    safe_mkdir(src_dir)
    go_dir = os.path.join(target_workdir, 'src', 'go')
    os.rename(gen_dir, go_dir)
Esempio n. 10
0
  def execute(self):
    binaries = self.context.targets(self.is_android_binary)
    with self.invalidated(binaries) as invalidation_check:
      invalid_targets = []
      for vt in invalidation_check.invalid_vts:
        invalid_targets.extend(vt.targets)
      for binary in invalid_targets:

        dex_files = []
        mapping = self.context.products.get('dex')
        for dex in mapping.get(binary):
          dex_files.append(dex)

        resource_deps = self.context.build_graph.transitive_subgraph_of_addresses([binary.address])
        resource_dirs = [t.resource_dir for t in resource_deps if isinstance(t, AndroidResources)]

        # Priority for resources is left to right so dependency order matters.
        # TODO(mateo): Make resources a first class AndroidBinary attribute to limit the order-dependent parts of the
        # BUILD files.
        args = self._render_args(binary, resource_dirs, dex_files)
        with self.context.new_workunit(name='apk-bundle',
                                       labels=[WorkUnitLabel.MULTITOOL]) as workunit:
          returncode = subprocess.call(args, stdout=workunit.output('stdout'),
                                       stderr=workunit.output('stderr'))
          if returncode:
            raise TaskError('Android aapt tool exited non-zero: {0}'.format(returncode))
    for binary in binaries:
      apk_name = self.package_name(binary)
      self.context.products.get('apk').add(binary, self.workdir).append(apk_name)
Esempio n. 11
0
    def _generate_thrift(self, target, target_workdir):
        target_cmd = self._thrift_cmd[:]

        bases = OrderedSet(tgt.target_base for tgt in target.closure()
                           if self.is_gentarget(tgt))
        for base in bases:
            target_cmd.extend(('-I', base))

        target_cmd.extend(('-o', target_workdir))

        all_sources = list(target.sources_relative_to_buildroot())
        if len(all_sources) != 1:
            self._validate_supports_more_than_one_source()

        for source in all_sources:
            file_cmd = target_cmd + [os.path.join(get_buildroot(), source)]
            with self.context.new_workunit(name=source,
                                           labels=[WorkUnitLabel.TOOL],
                                           cmd=' '.join(file_cmd)) as workunit:
                result = subprocess.call(file_cmd,
                                         stdout=workunit.output('stdout'),
                                         stderr=workunit.output('stderr'))
                if result != 0:
                    raise TaskError('{} ... exited non-zero ({})'.format(
                        self._thrift_binary, result))

        gen_dir = os.path.join(target_workdir, 'gen-go')
        src_dir = os.path.join(target_workdir, 'src')
        safe_mkdir(src_dir)
        go_dir = os.path.join(target_workdir, 'src', 'go')
        os.rename(gen_dir, go_dir)
Esempio n. 12
0
    def console_output(self, targets):
        if not self.get_options().transitive:
            targets = self.context.target_roots

        buildroot = get_buildroot()
        with temporary_dir() as tmpdir:
            # Write the paths of all files we want cloc to process to the so-called 'list file'.
            # TODO: 1) list_file, report_file and ignored_file should be relative files within the
            # execution "chroot", 2) list_file should be part of an input files Snapshot, and
            # 3) report_file and ignored_file should be part of an output files Snapshot, when we have
            # that capability.
            list_file = os.path.join(tmpdir, 'list_file')
            with open(list_file, 'w') as list_file_out:
                for target in targets:
                    for source in target.sources_relative_to_buildroot():
                        list_file_out.write(os.path.join(buildroot, source))
                        list_file_out.write(b'\n')

            report_file = os.path.join(tmpdir, 'report_file')
            ignored_file = os.path.join(tmpdir, 'ignored')

            # TODO: Look at how to make BinaryUtil support Snapshots - such as adding an instrinsic to do
            # network fetch directly into a Snapshot.
            # See http://cloc.sourceforge.net/#options for cloc cmd-line options.
            cmd = (self._get_cloc_script(), '--skip-uniqueness',
                   '--ignored={}'.format(ignored_file),
                   '--list-file={}'.format(list_file),
                   '--report-file={}'.format(report_file))
            if self.context._scheduler is None:
                with self.context.new_workunit(name='cloc',
                                               labels=[WorkUnitLabel.TOOL],
                                               cmd=' '.join(cmd)) as workunit:
                    result = subprocess.call(cmd,
                                             stdout=workunit.output('stdout'),
                                             stderr=workunit.output('stderr'))
            else:
                # TODO: Longer term we need to figure out what to put on $PATH in a remote execution env.
                # Currently, we are adding everything within $PATH to the request.
                env_path = ['PATH', os.environ.get('PATH')]
                req = ExecuteProcessRequest(cmd, env_path)
                execute_process_result, = self.context._scheduler.product_request(
                    ExecuteProcessResult, [req])
                exit_code = execute_process_result.exit_code
                if exit_code != 0:
                    raise TaskError('{} ... exited non-zero ({}).'.format(
                        ' '.join(cmd), result))

            with open(report_file, 'r') as report_file_in:
                for line in report_file_in.read().split('\n'):
                    yield line

            if self.get_options().ignored:
                yield 'Ignored the following files:'
                with open(ignored_file, 'r') as ignored_file_in:
                    for line in ignored_file_in.read().split('\n'):
                        yield line
Esempio n. 13
0
  def console_output(self, targets):
    if not self.get_options().transitive:
      targets = self.context.target_roots

    buildroot = get_buildroot()
    with temporary_dir() as tmpdir:
      # Write the paths of all files we want cloc to process to the so-called 'list file'.
      # TODO: 1) list_file, report_file and ignored_file should be relative files within the
      # execution "chroot", 2) list_file should be part of an input files Snapshot, and
      # 3) report_file and ignored_file should be part of an output files Snapshot, when we have
      # that capability.
      list_file = os.path.join(tmpdir, 'list_file')
      with open(list_file, 'w') as list_file_out:
        for target in targets:
          for source in target.sources_relative_to_buildroot():
            list_file_out.write(os.path.join(buildroot, source))
            list_file_out.write(b'\n')

      report_file = os.path.join(tmpdir, 'report_file')
      ignored_file = os.path.join(tmpdir, 'ignored')

      # TODO: Look at how to make BinaryUtil support Snapshots - such as adding an instrinsic to do
      # network fetch directly into a Snapshot.
      # See http://cloc.sourceforge.net/#options for cloc cmd-line options.
      cmd = (
        self._get_cloc_script(),
        '--skip-uniqueness',
        '--ignored={}'.format(ignored_file),
        '--list-file={}'.format(list_file),
        '--report-file={}'.format(report_file)
      )
      with self.context.new_workunit(
        name='cloc',
        labels=[WorkUnitLabel.TOOL],
        cmd=' '.join(cmd)) as workunit:
        exit_code = subprocess.call(
          cmd,
          stdout=workunit.output('stdout'),
          stderr=workunit.output('stderr')
        )

        if exit_code != 0:
          raise TaskError('{} ... exited non-zero ({}).'.format(' '.join(cmd), exit_code))

      with open(report_file, 'r') as report_file_in:
        for line in report_file_in.read().split('\n'):
          yield line

      if self.get_options().ignored:
        yield 'Ignored the following files:'
        with open(ignored_file, 'r') as ignored_file_in:
          for line in ignored_file_in.read().split('\n'):
            yield line
Esempio n. 14
0
    def execute(self):
        # One time setup of the default keystore config file.
        if not os.path.isfile(self.default_config_location):
            self.setup_default_config(self.default_config_location)

        targets = self.context.targets(self.is_signtarget)
        with self.invalidated(targets) as invalidation_check:
            invalid_targets = []
            for vt in invalidation_check.invalid_vts:
                invalid_targets.extend(vt.targets)
            for target in invalid_targets:

                def get_products_path(target):
                    """Get path of target's unsigned apks as created by AaptBuilder."""
                    unsigned_apks = self.context.products.get('apk')
                    packages = unsigned_apks.get(target)
                    if packages:
                        for tgts, products in packages.items():
                            for prod in products:
                                yield os.path.join(tgts, prod)

                packages = list(get_products_path(target))
                for unsigned_apk in packages:
                    keystores = KeystoreResolver.resolve(self.config_file)

                    for key in keystores:
                        outdir = self.sign_apk_out(target,
                                                   keystores[key].build_type)
                        safe_mkdir(outdir)
                        args = self._render_args(target, keystores[key],
                                                 unsigned_apk, outdir)
                        with self.context.new_workunit(
                                name='sign_apk',
                                labels=[WorkUnitLabel.MULTITOOL]) as workunit:
                            returncode = subprocess.call(
                                args,
                                stdout=workunit.output('stdout'),
                                stderr=workunit.output('stderr'))
                            if returncode:
                                raise TaskError(
                                    'The SignApk jarsigner process exited non-zero: {0}'
                                    .format(returncode))

        for target in targets:
            release_path = self.sign_apk_out(target, 'release')
            release_apk = self.signed_package_name(target, 'release')

            if os.path.isfile(os.path.join(release_path, release_apk)):
                self.context.products.get('release_apk').add(
                    target, release_path).append(release_apk)
Esempio n. 15
0
    def execute_codegen(self, target, target_workdir):
        sources_by_base = self._calculate_sources(target)
        sources = target.sources_relative_to_buildroot()

        bases = OrderedSet()
        # Note that the root import must come first, otherwise protoc can get confused
        # when trying to resolve imports from the root against the import's source root.
        if self.get_options().import_from_root:
            bases.add('.')
        bases.update(sources_by_base.keys())
        bases.update(self._proto_path_imports([target]))

        gen_flag = '--java_out'

        gen = '{0}={1}'.format(gen_flag, target_workdir)

        args = [self.protobuf_binary, gen]

        if self.plugins:
            for plugin in self.plugins:
                args.append("--{0}_out={1}".format(plugin, target_workdir))

        for base in bases:
            args.append('--proto_path={0}'.format(base))

        args.extend(sources)

        # Tack on extra path entries. These can be used to find protoc plugins
        protoc_environ = os.environ.copy()
        if self._extra_paths:
            protoc_environ['PATH'] = os.pathsep.join(
                self._extra_paths + protoc_environ['PATH'].split(os.pathsep))

        # Note: The test_source_ordering integration test scrapes this output, so modify it with care.
        self.context.log.debug('Executing: {0}'.format('\\\n  '.join(args)))
        with self.context.new_workunit(name='protoc',
                                       labels=[WorkUnitLabel.TOOL],
                                       cmd=' '.join(args)) as workunit:
            result = subprocess.call(args,
                                     env=protoc_environ,
                                     stdout=workunit.output('stdout'),
                                     stderr=workunit.output('stderr'))
            if result != 0:
                raise TaskError('{} ... exited non-zero ({})'.format(
                    self.protobuf_binary, result))
Esempio n. 16
0
    def console_output(self, targets):
        if not self.get_options().transitive:
            targets = self.context.target_roots

        buildroot = get_buildroot()
        with temporary_dir() as tmpdir:
            # Write the paths of all files we want cloc to process to the so-called 'list file'.
            list_file = os.path.join(tmpdir, 'list_file')
            with open(list_file, 'w') as list_file_out:
                for target in targets:
                    for source in target.sources_relative_to_buildroot():
                        list_file_out.write(os.path.join(buildroot, source))
                        list_file_out.write(b'\n')

            report_file = os.path.join(tmpdir, 'report_file')
            ignored_file = os.path.join(tmpdir, 'ignored')
            cloc_script = self._get_cloc_script()
            # See http://cloc.sourceforge.net/#options for cloc cmd-line options.
            cmd = [
                cloc_script, '--skip-uniqueness',
                '--ignored={}'.format(ignored_file),
                '--list-file={}'.format(list_file),
                '--report-file={}'.format(report_file)
            ]
            with self.context.new_workunit(name='cloc',
                                           labels=[WorkUnitLabel.TOOL],
                                           cmd=' '.join(cmd)) as workunit:
                result = subprocess.call(cmd,
                                         stdout=workunit.output('stdout'),
                                         stderr=workunit.output('stderr'))

            if result != 0:
                raise TaskError('{} ... exited non-zero ({}).'.format(
                    ' '.join(cmd), result))

            with open(report_file, 'r') as report_file_in:
                for line in report_file_in.read().split('\n'):
                    yield line

            if self.get_options().ignored:
                yield 'Ignored the following files:'
                with open(ignored_file, 'r') as ignored_file_in:
                    for line in ignored_file_in.read().split('\n'):
                        yield line
Esempio n. 17
0
  def execute_codegen(self, target, target_workdir):
    sources_by_base = self._calculate_sources(target)
    sources = target.sources_relative_to_buildroot()

    bases = OrderedSet()
    # Note that the root import must come first, otherwise protoc can get confused
    # when trying to resolve imports from the root against the import's source root.
    if self.get_options().import_from_root:
      bases.add('.')
    bases.update(sources_by_base.keys())
    bases.update(self._proto_path_imports([target]))

    gen_flag = '--java_out'

    gen = '{0}={1}'.format(gen_flag, target_workdir)

    args = [self.protobuf_binary, gen]

    if self.plugins:
      for plugin in self.plugins:
        args.append("--{0}_out={1}".format(plugin, target_workdir))

    for base in bases:
      args.append('--proto_path={0}'.format(base))

    args.extend(sources)

    # Tack on extra path entries. These can be used to find protoc plugins.
    protoc_environ = os.environ.copy()
    if self._extra_paths:
      protoc_environ['PATH'] = os.pathsep.join(self._extra_paths
                                               + protoc_environ['PATH'].split(os.pathsep))

    # Note: The test_source_ordering integration test scrapes this output, so modify it with care.
    self.context.log.debug('Executing: {0}'.format('\\\n  '.join(args)))
    with self.context.new_workunit(name='protoc',
                                   labels=[WorkUnitLabel.TOOL],
                                   cmd=' '.join(args)) as workunit:
      result = subprocess.call(args,
                               env=protoc_environ,
                               stdout=workunit.output('stdout'),
                               stderr=workunit.output('stderr'))
      if result != 0:
        raise TaskError('{} ... exited non-zero ({})'.format(self.protobuf_binary, result))
Esempio n. 18
0
  def execute(self):
    # One time setup of the default keystore config file.
    if not os.path.isfile(self.default_config_location):
      self.setup_default_config(self.default_config_location)

    targets = self.context.targets(self.is_signtarget)
    with self.invalidated(targets) as invalidation_check:
      invalid_targets = []
      for vt in invalidation_check.invalid_vts:
        invalid_targets.extend(vt.targets)
      for target in invalid_targets:

        def get_products_path(target):
          """Get path of target's unsigned apks as created by AaptBuilder."""
          unsigned_apks = self.context.products.get('apk')
          packages = unsigned_apks.get(target)
          if packages:
            for tgts, products in packages.items():
              for prod in products:
                yield os.path.join(tgts, prod)

        packages = list(get_products_path(target))
        for unsigned_apk in packages:
          keystores = KeystoreResolver.resolve(self.config_file)

          for key in keystores:
            outdir = self.sign_apk_out(target, keystores[key].build_type)
            safe_mkdir(outdir)
            args = self._render_args(target, keystores[key], unsigned_apk, outdir)
            with self.context.new_workunit(name='sign_apk',
                                           labels=[WorkUnitLabel.MULTITOOL]) as workunit:
              returncode = subprocess.call(args, stdout=workunit.output('stdout'),
                                           stderr=workunit.output('stderr'))
              if returncode:
                raise TaskError('The SignApk jarsigner process exited non-zero: {0}'
                                .format(returncode))

    for target in targets:
      release_path = self.sign_apk_out(target, 'release')
      release_apk = self.signed_package_name(target, 'release')

      if os.path.isfile(os.path.join(release_path, release_apk)):
        self.context.products.get('release_apk').add(target, release_path).append(release_apk)
Esempio n. 19
0
  def execute(self):
    targets = self.context.targets(self.is_zipaligntarget)
    for target in targets:

      def get_products_path(target):
        """Get path of target's apks that are signed with release keystores by SignApk task."""
        apks = self.context.products.get('release_apk')
        packages = apks.get(target)
        if packages:
          for tgts, products in packages.items():
            for prod in products:
              yield os.path.join(tgts, prod)

      packages = list(get_products_path(target))
      for package in packages:
        safe_mkdir(self.zipalign_out(target))
        args = self._render_args(package, target)
        with self.context.new_workunit(name='zipalign', labels=[WorkUnitLabel.MULTITOOL]) as workunit:
          returncode = subprocess.call(args, stdout=workunit.output('stdout'),
                                       stderr=workunit.output('stderr'))
          if returncode:
            raise TaskError('The zipalign process exited non-zero: {0}'.format(returncode))
Esempio n. 20
0
  def execute(self):
    # The number of R.java files produced from each library is == |sdks in play for its dependees|.
    # The number of R.java files produced for each android_binary == |android_library deps| + 1
    binaries = self.context.targets(self.is_android_binary)
    self.create_sdk_jar_deps(binaries)
    for binary in binaries:
      # TODO(mateo) add invalidation framework. Adding it here doesn't work right now because the
      # framework can't differentiate between one library that has to be compiled by multiple sdks.

      gentargets = [binary]

      def gather_gentargets(tgt):
        """Gather all AndroidLibrary targets that have a manifest."""
        if isinstance(tgt, AndroidLibrary) and tgt.manifest:
          gentargets.append(tgt)
      binary.walk(gather_gentargets)
      for gen in gentargets:
        aapt_output = self._relative_genfile(gen)
        aapt_file = os.path.join(self.aapt_out(binary), aapt_output)

        resource_deps = self.context.build_graph.transitive_subgraph_of_addresses([gen.address])
        resource_dirs = [t.resource_dir for t in resource_deps if isinstance(t, AndroidResources)]
        if resource_dirs:
          if aapt_file not in self._created_library_targets:

            # Priority for resources is left->right, so dependency order matters (see TODO in aapt_builder).
            args = self._render_args(binary, gen.manifest, resource_dirs)
            with self.context.new_workunit(name='aaptgen', labels=[WorkUnitLabel.MULTITOOL]) as workunit:
              returncode = subprocess.call(args,
                                           stdout=workunit.output('stdout'),
                                           stderr=workunit.output('stderr'))
              if returncode:
                raise TaskError('The AaptGen process exited non-zero: {}'.format(returncode))
            new_target = self.create_target(binary, gen)
            self._created_library_targets[aapt_file] = new_target
          gen.inject_dependency(self._created_library_targets[aapt_file].address)
Esempio n. 21
0
def _cmd_exists(cmd):
    return subprocess.call(["/usr/bin/which", cmd],
                           shell=False,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE) == 0
Esempio n. 22
0
def _mac_open(files):
    subprocess.call(['open'] + list(files))
Esempio n. 23
0
def _cmd_exists(cmd):
  return subprocess.call(["/usr/bin/which", cmd], shell=False, stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE) == 0
Esempio n. 24
0
def _mac_open(files):
  subprocess.call(['open'] + list(files))
Esempio n. 25
0
 def _check_call(self, args, failure_msg=None, raise_type=None):
   cmd = self._create_git_cmdline(args)
   self._log_call(cmd)
   result = subprocess.call(cmd)
   self._check_result(cmd, result, failure_msg, raise_type)
Esempio n. 26
0
 def call(args):
   return subprocess.call(cmd + args, **kwargs)
Esempio n. 27
0
 def call(args):
   return subprocess.call(cmd + args, **kwargs)
Esempio n. 28
0
def is_exe(name):
  result = subprocess.call(['which', name], stdout=open(os.devnull, 'w'), stderr=subprocess.STDOUT)
  return result == 0
Esempio n. 29
0
 def _check_call(self, args, failure_msg=None, raise_type=None):
   cmd = self._create_git_cmdline(args)
   self._log_call(cmd)
   result = subprocess.call(cmd)
   self._check_result(cmd, result, failure_msg, raise_type)
Esempio n. 30
0
def is_exe(name):
    result = subprocess.call(['which', name],
                             stdout=open(os.devnull, 'w'),
                             stderr=subprocess.STDOUT)
    return result == 0
 def test_empty_trash_async(self):
   with self.temporary_workdir() as work_dir:
     trash_dir = os.path.join(work_dir, "trash")
     subprocess.call(["touch", trash_dir + "foo.txt"])
     self.assert_success(self.run_pants_with_workdir(["clean-all", "--async"], work_dir))
     self.assertFalse(os._exists(trash_dir))