示例#1
0
async def run_setup_pys(targets: HydratedTargets, options: SetupPyOptions, console: Console,
                        provenance_map: AddressProvenanceMap,
                        distdir: DistDir, workspace: Workspace) -> SetupPy:
  """Run setup.py commands on all exported targets addressed."""
  args = tuple(options.values.args)
  validate_args(args)

  # Get all exported targets, ignoring any non-exported targets that happened to be
  # globbed over, but erroring on any explicitly-requested non-exported targets.

  exported_targets: List[ExportedTarget] = []
  explicit_nonexported_targets: List[HydratedTarget] = []

  for hydrated_target in targets:
    if _is_exported(hydrated_target):
      exported_targets.append(ExportedTarget(hydrated_target))
    elif provenance_map.is_single_address(hydrated_target.address):
      explicit_nonexported_targets.append(hydrated_target)
  if explicit_nonexported_targets:
    raise TargetNotExported(
      'Cannot run setup.py on these targets, because they have no `provides=` clause: '
      f'{", ".join(so.address.reference() for so in explicit_nonexported_targets)}')

  if options.values.transitive:
    # Expand out to all owners of the entire dep closure.
    tht = await Get[TransitiveHydratedTargets](
      BuildFileAddresses([et.hydrated_target.address for et in exported_targets]))
    owners = await MultiGet(
      Get[ExportedTarget](OwnedDependency(ht)) for ht in tht.closure if is_ownable_target(ht)
    )
    exported_targets = list(set(owners))

  chroots = await MultiGet(Get[SetupPyChroot](SetupPyChrootRequest(target))
                           for target in exported_targets)

  if args:
    setup_py_results = await MultiGet(
      Get[RunSetupPyResult](RunSetupPyRequest(exported_target, chroot, tuple(args)))
      for exported_target, chroot in zip(exported_targets, chroots)
    )

    for exported_target, setup_py_result in zip(exported_targets, setup_py_results):
      addr = exported_target.hydrated_target.address.reference()
      console.print_stderr(f'Writing contents of dist dir for {addr} to {distdir.relpath}')
      workspace.materialize_directory(
        DirectoryToMaterialize(setup_py_result.output, path_prefix=str(distdir.relpath))
      )
  else:
    # Just dump the chroot.
    for exported_target, chroot in zip(exported_targets, chroots):
      addr = exported_target.hydrated_target.address.reference()
      provides = exported_target.hydrated_target.adaptor.provides
      setup_py_dir = distdir.relpath / f'{provides.name}-{provides.version}'
      console.print_stderr(f'Writing setup.py chroot for {addr} to {setup_py_dir}')
      workspace.materialize_directory(
        DirectoryToMaterialize(chroot.digest, path_prefix=str(setup_py_dir))
      )

  return SetupPy(0)
示例#2
0
文件: test.py 项目: wisechengyi/pants
 def materialize(self, console: Console, workspace: Workspace) -> None:
     workspace.materialize_directory(
         DirectoryToMaterialize(
             self.result_digest, path_prefix=str(self.directory_to_materialize_to),
         )
     )
     console.print_stdout(f"\nWrote coverage report to `{self.directory_to_materialize_to}`")
示例#3
0
async def fmt(console: Console, targets: HydratedTargets, workspace: Workspace,
              union_membership: UnionMembership) -> Fmt:
    aggregated_results = await MultiGet(
        Get[AggregatedFmtResults](FormatTarget, target.adaptor)
        for target in targets
        if FormatTarget.is_formattable(target.adaptor,
                                       union_membership=union_membership))
    individual_results = [
        result for aggregated_result in aggregated_results
        for result in aggregated_result.results
    ]

    if not individual_results:
        return Fmt(exit_code=0)

    # NB: this will fail if there are any conflicting changes, which we want to happen rather than
    # silently having one result override the other. In practicality, this should never happen due
    # to our use of an aggregator rule for each distinct language.
    merged_formatted_digest = await Get[Digest](DirectoriesToMerge(
        tuple(aggregated_result.combined_digest
              for aggregated_result in aggregated_results)))
    workspace.materialize_directory(
        DirectoryToMaterialize(merged_formatted_digest))
    for result in individual_results:
        if result.stdout:
            console.print_stdout(result.stdout)
        if result.stderr:
            console.print_stderr(result.stderr)

    # Since the rules to produce FmtResult should use ExecuteRequest, rather than
    # FallibleExecuteProcessRequest, we assume that there were no failures.
    return Fmt(exit_code=0)
示例#4
0
 def create_pex_and_get_all_data(
     self,
     *,
     requirements=PexRequirements(),
     entry_point=None,
     interpreter_constraints=PexInterpreterConstraints(),
     input_files: Optional[Digest] = None,
     additional_pants_args: Tuple[str, ...] = (),
     additional_pex_args: Tuple[str, ...] = (),
 ) -> Dict:
     request = CreatePex(
         output_filename="test.pex",
         requirements=requirements,
         interpreter_constraints=interpreter_constraints,
         entry_point=entry_point,
         input_files_digest=input_files,
         additional_args=additional_pex_args,
     )
     requirements_pex = self.request_single_product(
         Pex,
         Params(
             request,
             create_options_bootstrapper(
                 args=["--backend-packages2=pants.backend.python", *additional_pants_args]
             ),
         ),
     )
     self.scheduler.materialize_directory(
         DirectoryToMaterialize(requirements_pex.directory_digest),
     )
     with zipfile.ZipFile(os.path.join(self.build_root, "test.pex"), "r") as pex:
         with pex.open("PEX-INFO", "r") as pex_info:
             pex_info_content = pex_info.readline().decode()
             pex_list = pex.namelist()
     return {"pex": requirements_pex, "info": json.loads(pex_info_content), "files": pex_list}
示例#5
0
文件: run.py 项目: tpasternak/pants
def run(console: Console, workspace: Workspace, runner: InteractiveRunner,
        bfa: BuildFileAddress) -> Run:
    target = bfa.to_address()
    binary = yield Get(CreatedBinary, Address, target)

    with temporary_dir(cleanup=True) as tmpdir:
        dirs_to_materialize = (DirectoryToMaterialize(
            path=str(tmpdir), directory_digest=binary.digest), )
        workspace.materialize_directories(dirs_to_materialize)

        console.write_stdout(f"Running target: {target}\n")
        full_path = str(Path(tmpdir, binary.binary_name))
        run_request = InteractiveProcessRequest(
            argv=[full_path],
            run_in_workspace=True,
        )

        try:
            result = runner.run_local_interactive_process(run_request)
            exit_code = result.process_exit_code
            if result.process_exit_code == 0:
                console.write_stdout(f"{target} ran successfully.\n")
            else:
                console.write_stderr(
                    f"{target} failed with code {result.process_exit_code}!\n")

        except Exception as e:
            console.write_stderr(
                f"Exception when attempting to run {target} : {e}\n")
            exit_code = -1

    yield Run(exit_code)
示例#6
0
async def create_binary(
    console: Console,
    workspace: Workspace,
    options: BinaryOptions,
    distdir: DistDir,
    buildroot: BuildRoot,
) -> Binary:
    targets_to_valid_configs = await Get[TargetsToValidConfigurations](
        TargetsToValidConfigurationsRequest(
            BinaryConfiguration,
            goal_description=f"the `{options.name}` goal",
            error_if_no_valid_targets=True,
        ))
    binaries = await MultiGet(
        Get[CreatedBinary](BinaryConfiguration, config)
        for config in targets_to_valid_configs.configurations)
    merged_digest = await Get[Digest](DirectoriesToMerge(
        tuple(binary.digest for binary in binaries)))
    result = workspace.materialize_directory(
        DirectoryToMaterialize(merged_digest,
                               path_prefix=str(distdir.relpath)))
    with options.line_oriented(console) as print_stdout:
        for path in result.output_paths:
            print_stdout(f"Wrote {os.path.relpath(path, buildroot.path)}")
    return Binary(exit_code=0)
示例#7
0
async def create_awslambda(
    console: Console,
    options: AWSLambdaOptions,
    distdir: DistDir,
    buildroot: BuildRoot,
    workspace: Workspace,
) -> AWSLambdaGoal:
    targets_to_valid_configs = await Get[TargetsToValidConfigurations](
        TargetsToValidConfigurationsRequest(
            AWSLambdaConfiguration,
            goal_description=f"the `{options.name}` goal",
            error_if_no_valid_targets=True,
        ))
    awslambdas = await MultiGet(
        Get[CreatedAWSLambda](AWSLambdaConfiguration, config)
        for config in targets_to_valid_configs.configurations)
    merged_digest = await Get[Digest](DirectoriesToMerge(
        tuple(awslambda.digest for awslambda in awslambdas)))
    result = workspace.materialize_directory(
        DirectoryToMaterialize(merged_digest,
                               path_prefix=str(distdir.relpath)))
    with options.line_oriented(console) as print_stdout:
        for awslambda, path in zip(awslambdas, result.output_paths):
            print_stdout(
                f"Wrote code bundle to {os.path.relpath(path, buildroot.path)}"
            )
            print_stdout(f"  Runtime: {awslambda.runtime}")
            print_stdout(f"  Handler: {awslambda.handler}")
            print_stdout("")
    return AWSLambdaGoal(exit_code=0)
示例#8
0
文件: pex_test.py 项目: OniOni/pants
    def create_pex_and_get_all_data(
            self,
            *,
            requirements=PexRequirements(),
            entry_point=None,
            interpreter_constraints=PexInterpreterConstraints(),
            input_files: Digest = None) -> (Dict, List[str]):
        def hashify_optional_collection(iterable):
            return tuple(sorted(iterable)) if iterable is not None else tuple()

        request = CreatePex(
            output_filename="test.pex",
            requirements=requirements,
            interpreter_constraints=interpreter_constraints,
            entry_point=entry_point,
            input_files_digest=input_files,
        )
        requirements_pex = self.request_single_product(
            Pex,
            Params(request, PythonSetup.global_instance(),
                   SubprocessEnvironment.global_instance(),
                   PythonNativeCode.global_instance()))
        self.scheduler.materialize_directory(
            DirectoryToMaterialize(requirements_pex.directory_digest), )
        with zipfile.ZipFile(os.path.join(self.build_root, "test.pex"),
                             "r") as pex:
            with pex.open("PEX-INFO", "r") as pex_info:
                pex_info_content = pex_info.readline().decode()
                pex_list = pex.namelist()
        return {
            'pex': requirements_pex,
            'info': json.loads(pex_info_content),
            'files': pex_list
        }
示例#9
0
async def create_binary(addresses: BuildFileAddresses, console: Console,
                        workspace: Workspace, options: Binary.Options,
                        options_bootstrapper: OptionsBootstrapper,
                        build_root: BuildRoot) -> Binary:
    with Binary.line_oriented(options, console) as print_stdout:
        global_options = options_bootstrapper.bootstrap_options.for_global_scope(
        )
        pants_distdir = Path(global_options.pants_distdir)
        if not is_child_of(pants_distdir, build_root.pathlib_path):
            console.print_stderr(
                f"When set to an absolute path, `--pants-distdir` must be relative to the build root."
                "You set it to {pants_distdir}. Instead, use a relative path or an absolute path relative to the build root."
            )
            return Binary(exit_code=1)

        relative_distdir = pants_distdir.relative_to(
            build_root.pathlib_path) if pants_distdir.is_absolute(
            ) else pants_distdir
        print_stdout(f"Generating binaries in `./{relative_distdir}`")

        binaries = await MultiGet(
            Get[CreatedBinary](Address, address.to_address())
            for address in addresses)
        merged_digest = await Get[Digest](DirectoriesToMerge(
            tuple(binary.digest for binary in binaries)))
        result = workspace.materialize_directory(
            DirectoryToMaterialize(merged_digest,
                                   path_prefix=str(relative_distdir)))
        for path in result.output_paths:
            print_stdout(f"Wrote {path}")
    return Binary(exit_code=0)
示例#10
0
async def generate_pants_ini(console: Console, workspace: Workspace) -> GeneratePantsIni:
  pants_ini_content = dedent(f"""\
    [GLOBAL]
    pants_version: {pants_version}
    """)

  preexisting_snapshot = await Get[Snapshot](PathGlobs(include=('pants.ini',)))
  if preexisting_snapshot.files:
    console.print_stderr(
      "./pants.ini already exists. This goal is only meant to be run the first time you run Pants "
      "in a project.\n\nTo update config values, please directly modify the file."
    )
    return GeneratePantsIni(exit_code=1)

  console.print_stdout(dedent(f"""\
    Adding sensible defaults to ./pants.ini:
      * Pinning `pants_version` to `{pants_version}`.
    """))

  digest = await Get[Digest](InputFilesContent([
    FileContent(path='pants.ini', content=pants_ini_content.encode())
  ]))
  workspace.materialize_directory(DirectoryToMaterialize(digest))

  console.print_stdout(
    "You may modify these values directly in the file at any time. The ./pants script will detect "
    "any changes the next time you run it.\n\nYou are now ready to use Pants!"
  )
  return GeneratePantsIni(exit_code=0)
示例#11
0
async def create_binary(
    targets_with_origins: TargetsWithOrigins,
    console: Console,
    workspace: Workspace,
    options: BinaryOptions,
    distdir: DistDir,
    buildroot: BuildRoot,
    union_membership: UnionMembership,
    registered_target_types: RegisteredTargetTypes,
) -> Binary:
    valid_config_types_by_target = gather_valid_binary_configuration_types(
        goal_subsytem=options,
        targets_with_origins=targets_with_origins,
        union_membership=union_membership,
        registered_target_types=registered_target_types,
    )
    binaries = await MultiGet(
        Get[CreatedBinary](BinaryConfiguration, valid_config_type.create(
            target))
        for target, valid_config_types in valid_config_types_by_target.items()
        for valid_config_type in valid_config_types)
    merged_digest = await Get[Digest](DirectoriesToMerge(
        tuple(binary.digest for binary in binaries)))
    result = workspace.materialize_directory(
        DirectoryToMaterialize(merged_digest,
                               path_prefix=str(distdir.relpath)))
    with options.line_oriented(console) as print_stdout:
        for path in result.output_paths:
            print_stdout(f"Wrote {os.path.relpath(path, buildroot.path)}")
    return Binary(exit_code=0)
示例#12
0
    def test_workspace_materialize_directories_result(self):
        #TODO(#8336): at some point, this test should require that Workspace only be invoked from a console_role
        workspace = Workspace(self.scheduler)

        input_files_content = InputFilesContent((
            FileContent(path='a.txt', content=b'hello', is_executable=False),
            FileContent(path='subdir/b.txt',
                        content=b'goodbye',
                        is_executable=False),
        ))

        digest, = self.scheduler.product_request(Digest, [input_files_content])

        with temporary_dir() as tmp_dir:
            path1 = Path(tmp_dir, 'a.txt')
            path2 = Path(tmp_dir, 'subdir', 'b.txt')

            self.assertFalse(path1.is_file())
            self.assertFalse(path2.is_file())

            output = workspace.materialize_directories(
                (DirectoryToMaterialize(path=tmp_dir,
                                        directory_digest=digest), ))

            self.assertEqual(type(output), MaterializeDirectoriesResult)
            materialize_result = output.dependencies[0]
            self.assertEqual(type(materialize_result),
                             MaterializeDirectoryResult)
            self.assertEqual(materialize_result.output_paths, (
                str(Path(tmp_dir, 'a.txt')),
                str(Path(tmp_dir, 'subdir/b.txt')),
            ))
示例#13
0
  def _execute_hermetic_compile(self, cmd, ctx):
    # For now, executing a compile remotely only works for targets that
    # do not have any dependencies or inner classes

    input_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
    output_files = tuple(
      # Assume no extra .class files to grab. We'll fix up that case soon.
      # Drop the source_root from the file path.
      # Assumes `-d .` has been put in the command.
      os.path.relpath(f.path.replace('.java', '.class'), ctx.target.target_base)
      for f in input_snapshot.files if f.path.endswith('.java')
    )
    exec_process_request = ExecuteProcessRequest(
      argv=tuple(cmd),
      input_files=input_snapshot.directory_digest,
      output_files=output_files,
      description='Compiling {} with javac'.format(ctx.target.address.spec),
    )
    exec_result = self.context.execute_process_synchronously(
      exec_process_request,
      'javac',
      (WorkUnitLabel.TASK, WorkUnitLabel.JVM),
    )

    # Dump the output to the .pants.d directory where it's expected by downstream tasks.
    classes_directory = ctx.classes_dir
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(text_type(classes_directory), exec_result.output_directory_digest),
    ))
示例#14
0
文件: fs_test.py 项目: OniOni/pants
async def workspace_console_rule(
        console: Console, workspace: Workspace,
        msg: MessageToConsoleRule) -> MockWorkspaceGoal:
    digest = await Get(Digest, InputFilesContent, msg.input_files_content)
    output = workspace.materialize_directory(DirectoryToMaterialize(digest))
    console.print_stdout(output.output_paths[0], end='')
    return MockWorkspaceGoal(exit_code=0)
示例#15
0
文件: fmt.py 项目: OniOni/pants
async def fmt(console: Console, targets: HydratedTargets, workspace: Workspace,
              union_membership: UnionMembership) -> Fmt:
    results = await MultiGet(
        Get[FmtResult](TargetWithSources, target.adaptor) for target in targets
        if TargetWithSources.is_formattable_and_lintable(
            target.adaptor, union_membership=union_membership))

    if not results:
        return Fmt(exit_code=0)

    # NB: this will fail if there are any conflicting changes, which we want to happen rather than
    # silently having one result override the other.
    # TODO(#8722): how should we handle multiple auto-formatters touching the same files?
    merged_formatted_digest = await Get[Digest](DirectoriesToMerge(
        tuple(result.digest for result in results)))
    workspace.materialize_directory(
        DirectoryToMaterialize(merged_formatted_digest))
    for result in results:
        if result.stdout:
            console.print_stdout(result.stdout)
        if result.stderr:
            console.print_stderr(result.stderr)

    # Since the rules to produce FmtResult should use ExecuteRequest, rather than
    # FallibleExecuteProcessRequest, we assume that there were no failures.
    return Fmt(exit_code=0)
  def create_pex_and_get_pex_info(
    self, *, requirements=None, entry_point=None, interpreter_constraints=None
  ):
    def hashify_optional_collection(iterable):
      return tuple(sorted(iterable)) if iterable is not None else tuple()

    request = RequirementsPexRequest(
      output_filename="test.pex",
      requirements=hashify_optional_collection(requirements),
      interpreter_constraints=hashify_optional_collection(interpreter_constraints),
      entry_point=entry_point,
    )
    requirements_pex = assert_single_element(
      self.scheduler.product_request(RequirementsPex, [Params(
        request,
        PythonSetup.global_instance(),
        PythonNativeCode.global_instance()
      )])
    )
    with temporary_dir() as tmp_dir:
      self.scheduler.materialize_directories((
        DirectoryToMaterialize(path=tmp_dir, directory_digest=requirements_pex.directory_digest),
      ))
      with zipfile.ZipFile(os.path.join(tmp_dir, "test.pex"), "r") as pex:
        with pex.open("PEX-INFO", "r") as pex_info:
          pex_info_content = pex_info.readline().decode()
    return json.loads(pex_info_content)
示例#17
0
文件: fs_test.py 项目: OniOni/pants
    def test_workspace_materialize_directories_result(self):
        #TODO(#8336): at some point, this test should require that Workspace only be invoked from a console_role
        workspace = Workspace(self.scheduler)

        input_files_content = InputFilesContent((
            FileContent(path='a.txt', content=b'hello'),
            FileContent(path='subdir/b.txt', content=b'goodbye'),
        ))

        digest = self.request_single_product(Digest, input_files_content)

        path1 = Path('a.txt')
        path2 = Path('subdir/b.txt')

        assert not path1.is_file()
        assert not path2.is_file()

        output = workspace.materialize_directories(
            (DirectoryToMaterialize(digest), ))

        assert type(output) == MaterializeDirectoriesResult
        materialize_result = output.dependencies[0]
        assert type(materialize_result) == MaterializeDirectoryResult
        assert materialize_result.output_paths == tuple(
            str(Path(self.build_root, p)) for p in [path1, path2])
示例#18
0
def workspace_console_rule(console: Console, workspace: Workspace,
                           msg: MessageToConsoleRule) -> MockWorkspaceGoal:
    digest = yield Get(Digest, InputFilesContent, msg.input_files_content)
    output = workspace.materialize_directories(
        (DirectoryToMaterialize(path=msg.tmp_dir, directory_digest=digest), ))
    output_path = output.dependencies[0].output_paths[0]
    console.print_stdout(str(Path(msg.tmp_dir, output_path)), end='')
    yield MockWorkspaceGoal(exit_code=0)
示例#19
0
文件: test.py 项目: briespoke/pants
 def materialize(self, console: Console, workspace: Workspace) -> Optional[PurePath]:
     workspace.materialize_directory(
         DirectoryToMaterialize(
             self.result_digest, path_prefix=str(self.directory_to_materialize_to),
         )
     )
     console.print_stderr(f"\nWrote coverage report to `{self.directory_to_materialize_to}`")
     return self.report_file
示例#20
0
  def _compile_hermetic(self, jvm_options, ctx, classes_dir, zinc_args,
                        compiler_bridge_classpath_entry, dependency_classpath,
                        scalac_classpath_entries):
    zinc_relpath = fast_relpath(self._zinc.zinc, get_buildroot())

    snapshots = [
      self._zinc.snapshot(self.context._scheduler),
      ctx.target.sources_snapshot(self.context._scheduler),
    ]

    relevant_classpath_entries = dependency_classpath + [compiler_bridge_classpath_entry]
    directory_digests = tuple(
      entry.directory_digest for entry in relevant_classpath_entries if entry.directory_digest
    )
    if len(directory_digests) != len(relevant_classpath_entries):
      for dep in relevant_classpath_entries:
        if dep.directory_digest is None:
          logger.warning(
            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
            "execution".format(dep)
          )

    snapshots.extend(
      classpath_entry.directory_digest for classpath_entry in scalac_classpath_entries
    )

    # TODO: Extract something common from Executor._create_command to make the command line
    # TODO: Lean on distribution for the bin/java appending here
    merged_input_digest = self.context._scheduler.merge_directories(
      tuple(s.directory_digest for s in snapshots) + directory_digests
    )
    argv = ['.jdk/bin/java'] + jvm_options + [
      '-cp', zinc_relpath,
      Zinc.ZINC_COMPILE_MAIN
    ] + zinc_args
    # TODO(#6071): Our ExecuteProcessRequest expects a specific string type for arguments,
    # which py2 doesn't default to. This can be removed when we drop python 2.
    argv = [text_type(arg) for arg in argv]

    req = ExecuteProcessRequest(
      argv=tuple(argv),
      input_files=merged_input_digest,
      output_directories=(classes_dir,),
      description="zinc compile for {}".format(ctx.target.address.spec),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying_dist`
      jdk_home=text_type(self._zinc.underlying_dist.home),
    )
    res = self.context.execute_process_synchronously_or_raise(
      req, self.name(), [WorkUnitLabel.COMPILER])

    # TODO: Materialize as a batch in do_compile or somewhere
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(get_buildroot(), res.output_directory_digest),
    ))

    # TODO: This should probably return a ClasspathEntry rather than a Digest
    return res.output_directory_digest
示例#21
0
  def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), input_digest=None, output_dir=None):
    tool_classpath_abs = self.tool_classpath(tool_name)
    tool_classpath = fast_relpath_collection(tool_classpath_abs)

    # TODO(#6071): Our ExecuteProcessRequest expects a specific string type for arguments,
    # which py2 doesn't default to. This can be removed when we drop python 2.
    str_jvm_options = [text_type(opt) for opt in self.get_options().jvm_options]
    cmd = [
            distribution.java,
          ] + str_jvm_options + [
            '-cp', os.pathsep.join(tool_classpath),
            main,
          ] + args

    pathglobs = list(tool_classpath)
    pathglobs.extend(f if os.path.isfile(f) else '{}/**'.format(f) for f in input_files)

    if pathglobs:
      root = PathGlobsAndRoot(
        PathGlobs(tuple(pathglobs)),
        text_type(get_buildroot()))
      # dont capture snapshot, if pathglobs is empty
      path_globs_input_digest = self.context._scheduler.capture_snapshots((root,))[0].directory_digest

    epr_input_files = self.context._scheduler.merge_directories(
      ((path_globs_input_digest,) if path_globs_input_digest else ())
      + ((input_digest,) if input_digest else ()))

    epr = ExecuteProcessRequest(
      argv=tuple(cmd),
      input_files=epr_input_files,
      output_files=tuple(),
      output_directories=(output_dir,),
      timeout_seconds=15*60,
      description='run {} for {}'.format(tool_name, tgt),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying.home` because
      # ExecuteProcessRequest requires an existing, local jdk location.
      jdk_home=text_type(distribution.underlying_home),
    )
    res = self.context.execute_process_synchronously_without_raising(
      epr,
      self.name(),
      [WorkUnitLabel.TOOL])

    if res.exit_code != 0:
      raise TaskError(res.stderr, exit_code=res.exit_code)

    if output_dir:
      res.output_directory_digest.dump(output_dir)
      self.context._scheduler.materialize_directories((
        DirectoryToMaterialize(
          # NB the first element here is the root to materialize into, not the dir to snapshot
          text_type(get_buildroot()),
          res.output_directory_digest),
      ))
      # TODO drop a file containing the digest, named maybe output_dir.digest
    return res
示例#22
0
  def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), input_digest=None, output_dir=None):
    tool_classpath_abs = self.tool_classpath(tool_name)
    tool_classpath = fast_relpath_collection(tool_classpath_abs)

    classpath_for_cmd = os.pathsep.join(tool_classpath)
    cmd = [
      distribution.java,
    ]
    cmd.extend(self.get_options().jvm_options)
    cmd.extend(['-cp', classpath_for_cmd])
    cmd.extend([main])
    cmd.extend(args)

    pathglobs = list(tool_classpath)
    pathglobs.extend(f if os.path.isfile(f) else '{}/**'.format(f) for f in input_files)

    if pathglobs:
      root = PathGlobsAndRoot(
      PathGlobs(tuple(pathglobs)),
      text_type(get_buildroot()))
      # dont capture snapshot, if pathglobs is empty
      path_globs_input_digest = self.context._scheduler.capture_snapshots((root,))[0].directory_digest

    if path_globs_input_digest and input_digest:
      epr_input_files = self.context._scheduler.merge_directories(
          (path_globs_input_digest, input_digest))
    else:
      epr_input_files = path_globs_input_digest or input_digest

    epr = ExecuteProcessRequest(
      argv=tuple(cmd),
      input_files=epr_input_files,
      output_files=tuple(),
      output_directories=(output_dir,),
      timeout_seconds=15*60,
      description='run {} for {}'.format(tool_name, tgt),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying_dist`
      jdk_home=text_type(self._zinc.underlying_dist.home),
    )
    res = self.context.execute_process_synchronously_without_raising(
      epr,
      self.name(),
      [WorkUnitLabel.TOOL])

    if res.exit_code != 0:
      raise TaskError(res.stderr)

    if output_dir:
      dump_digest(output_dir, res.output_directory_digest)
      self.context._scheduler.materialize_directories((
        DirectoryToMaterialize(
          # NB the first element here is the root to materialize into, not the dir to snapshot
          text_type(get_buildroot()),
          res.output_directory_digest),
      ))
      # TODO drop a file containing the digest, named maybe output_dir.digest
    return res
示例#23
0
async def run_repl(
    console: Console,
    workspace: Workspace,
    runner: InteractiveRunner,
    options: ReplOptions,
    transitive_targets: TransitiveTargets,
    build_root: BuildRoot,
    union_membership: UnionMembership,
    global_options: GlobalOptions,
) -> Repl:

    # We can guarantee that we will only even enter this `goal_rule` if there exists an implementer
    # of the `ReplImplementation` union because `LegacyGraphSession.run_goal_rules()` will not
    # execute this rule's body if there are no implementations registered.
    membership: Iterable[Type[
        ReplImplementation]] = union_membership.union_rules[ReplImplementation]
    implementations = {impl.name: impl for impl in membership}

    default_repl = "python"
    repl_shell_name = cast(str, options.values.shell or default_repl)

    repl_implementation_cls = implementations.get(repl_shell_name)
    if repl_implementation_cls is None:
        available = sorted(set(implementations.keys()))
        console.write_stdout(
            f"{repl_shell_name} is not an installed REPL program. Available REPLs: {available}"
        )
        return Repl(-1)

    repl_impl = repl_implementation_cls(targets=Targets(
        tgt for tgt in transitive_targets.closure
        if repl_implementation_cls.is_valid(tgt)))
    repl_binary = await Get[ReplBinary](ReplImplementation, repl_impl)

    with temporary_dir(root_dir=global_options.options.pants_workdir,
                       cleanup=False) as tmpdir:
        path_relative_to_build_root = PurePath(tmpdir).relative_to(
            build_root.path).as_posix()
        workspace.materialize_directory(
            DirectoryToMaterialize(repl_binary.digest,
                                   path_prefix=path_relative_to_build_root))

        full_path = PurePath(tmpdir, repl_binary.binary_name).as_posix()
        run_request = InteractiveProcessRequest(
            argv=(full_path, ),
            run_in_workspace=True,
        )

    result = runner.run_local_interactive_process(run_request)
    exit_code = result.process_exit_code

    if exit_code == 0:
        console.write_stdout("REPL exited successfully.")
    else:
        console.write_stdout(f"REPL exited with error: {exit_code}.")

    return Repl(exit_code)
示例#24
0
文件: test_fs.py 项目: OniOni/pants
 def test_materialize_directories(self):
     self.prime_store_with_roland_digest()
     digest = Digest(
         "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
         80)
     self.scheduler.materialize_directory(
         DirectoryToMaterialize(digest, path_prefix="test/"))
     assert Path(self.build_root,
                 "test/roland").read_text() == "European Burmese"
示例#25
0
 def _compile_nonhermetic(self, jvm_options, ctx, classes_directory):
     exit_code = self.runjava(classpath=self.get_zinc_compiler_classpath(),
                              main=Zinc.ZINC_COMPILE_MAIN,
                              jvm_options=jvm_options,
                              args=['@{}'.format(ctx.args_file)],
                              workunit_name=self.name(),
                              workunit_labels=[WorkUnitLabel.COMPILER],
                              dist=self._zinc.dist)
     if exit_code != 0:
         raise self.ZincCompileError('Zinc compile failed.',
                                     exit_code=exit_code)
     self.context._scheduler.materialize_directories(
         (DirectoryToMaterialize(text_type(classes_directory),
                                 self.extra_resources_digest(ctx)), ))
示例#26
0
async def run(
    console: Console,
    workspace: Workspace,
    runner: InteractiveRunner,
    build_root: BuildRoot,
    options: RunOptions,
    global_options: GlobalOptions,
) -> Run:
    targets_to_valid_configs = await Get[TargetsToValidConfigurations](
        TargetsToValidConfigurationsRequest(
            BinaryConfiguration,
            goal_description=f"the `{options.name}` goal",
            error_if_no_valid_targets=True,
            expect_single_config=True,
        ))
    config = targets_to_valid_configs.configurations[0]
    binary = await Get[CreatedBinary](BinaryConfiguration, config)

    workdir = global_options.options.pants_workdir
    with temporary_dir(root_dir=workdir, cleanup=True) as tmpdir:
        path_relative_to_build_root = PurePath(tmpdir).relative_to(
            build_root.path).as_posix()
        workspace.materialize_directory(
            DirectoryToMaterialize(binary.digest,
                                   path_prefix=path_relative_to_build_root))

        console.write_stdout(f"Running target: {config.address}\n")
        full_path = PurePath(tmpdir, binary.binary_name).as_posix()
        run_request = InteractiveProcessRequest(
            argv=(full_path, *options.values.args),
            run_in_workspace=True,
        )

        try:
            result = runner.run_local_interactive_process(run_request)
            exit_code = result.process_exit_code
            if result.process_exit_code == 0:
                console.write_stdout(f"{config.address} ran successfully.\n")
            else:
                console.write_stderr(
                    f"{config.address} failed with code {result.process_exit_code}!\n"
                )

        except Exception as e:
            console.write_stderr(
                f"Exception when attempting to run {config.address}: {e!r}\n")
            exit_code = -1

    return Run(exit_code)
示例#27
0
async def create_awslambda(addresses: BuildFileAddresses, console: Console,
                           options: AWSLambdaOptions, distdir: DistDir,
                           workspace: Workspace) -> AWSLambdaGoal:
    with options.line_oriented(console) as print_stdout:
        print_stdout(f"Generating AWS lambdas in `./{distdir.relpath}`")
        awslambdas = await MultiGet(
            Get[CreatedAWSLambda](Address, address.to_address())
            for address in addresses)
        merged_digest = await Get[Digest](DirectoriesToMerge(
            tuple(awslambda.digest for awslambda in awslambdas)))
        result = workspace.materialize_directory(
            DirectoryToMaterialize(merged_digest,
                                   path_prefix=str(distdir.relpath)))
        for path in result.output_paths:
            print_stdout(f"Wrote {path}")
    return AWSLambdaGoal(exit_code=0)
示例#28
0
  def _compile_nonhermetic(self, jvm_options, ctx, classes_directory):
    # Populate the resources to merge post compile onto disk for the nonhermetic case,
    # where `--post-compile-merge-dir` was added is the relevant part.
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(get_buildroot(), self.post_compile_extra_resources_digest(ctx)),
    ))

    exit_code = self.runjava(classpath=self.get_zinc_compiler_classpath(),
                             main=Zinc.ZINC_COMPILE_MAIN,
                             jvm_options=jvm_options,
                             args=['@{}'.format(ctx.args_file)],
                             workunit_name=self.name(),
                             workunit_labels=[WorkUnitLabel.COMPILER],
                             dist=self._zinc.dist)
    if exit_code != 0:
      raise self.ZincCompileError('Zinc compile failed.', exit_code=exit_code)
示例#29
0
    def _compile_compiler_bridge(self, context):
        """Compile the compiler bridge to be used by zinc, using our scala bootstrapper. It will
        compile and cache the jar, and materialize it if not already there.

        :param context: The context of the task trying to compile the bridge.
                        This is mostly needed to use its scheduler to create digests of the relevant jars.
        :return: The absolute path to the compiled scala-compiler-bridge jar.
        """
        bridge_jar_name = "scala-compiler-bridge.jar"
        bridge_jar = os.path.join(self._compiler_bridge_cache_dir, bridge_jar_name)
        global_bridge_cache_dir = os.path.join(
            self._zinc_factory.get_options().pants_bootstrapdir,
            fast_relpath(self._compiler_bridge_cache_dir, self._workdir()),
        )
        globally_cached_bridge_jar = os.path.join(global_bridge_cache_dir, bridge_jar_name)

        # Workaround to avoid recompiling the bridge for every integration test
        # We check the bootstrapdir (.cache) for the bridge.
        # If it exists, we make a copy to the buildroot.
        #
        # TODO Remove when action caches are implemented.
        if os.path.exists(globally_cached_bridge_jar):
            # Cache the bridge jar under buildroot, to allow snapshotting
            safe_mkdir(self._relative_to_buildroot(self._compiler_bridge_cache_dir))
            safe_hardlink_or_copy(globally_cached_bridge_jar, bridge_jar)

        if not os.path.exists(bridge_jar):
            res = self._run_bootstrapper(bridge_jar, context)
            context._scheduler.materialize_directory(
                DirectoryToMaterialize(res.output_directory_digest)
            )
            # For the workaround above to work, we need to store a copy of the bridge in
            # the bootstrapdir cache (.cache).
            safe_mkdir(global_bridge_cache_dir)
            safe_hardlink_or_copy(bridge_jar, globally_cached_bridge_jar)

            return ClasspathEntry(bridge_jar, res.output_directory_digest)
        else:
            bridge_jar_snapshot = context._scheduler.capture_snapshots(
                (
                    PathGlobsAndRoot(
                        PathGlobs((self._relative_to_buildroot(bridge_jar),)), get_buildroot()
                    ),
                )
            )[0]
            bridge_jar_digest = bridge_jar_snapshot.directory_digest
            return ClasspathEntry(bridge_jar, bridge_jar_digest)
示例#30
0
def create_binary(addresses: BuildFileAddresses, console: Console,
                  workspace: Workspace, options: Binary.Options) -> Binary:
    with Binary.line_oriented(options, console) as print_stdout:
        print_stdout("Generating binaries in `dist/`")
        binaries = yield [
            Get(CreatedBinary, Address, address.to_address())
            for address in addresses
        ]
        dirs_to_materialize = tuple(
            DirectoryToMaterialize(path='dist/',
                                   directory_digest=binary.digest)
            for binary in binaries)
        results = workspace.materialize_directories(dirs_to_materialize)
        for result in results.dependencies:
            for path in result.output_paths:
                print_stdout(f"Wrote {path}")
    yield Binary(exit_code=0)