def _default_args_execute_process_request(self, argv=tuple(), env=None):
   env = env or dict()
   return ExecuteProcessRequest.create_with_empty_snapshot(
     argv=argv,
     env=env,
     output_files=(),
   )
Exemple #2
0
  def _execute_hermetic_compile(self, cmd, ctx):
    # For now, executing a compile remotely only works for targets that
    # do not have any dependencies or inner classes

    input_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
    output_files = tuple(
      # Assume no extra .class files to grab. We'll fix up that case soon.
      # Drop the source_root from the file path.
      # Assumes `-d .` has been put in the command.
      os.path.relpath(f.path.replace('.java', '.class'), ctx.target.target_base)
      for f in input_snapshot.files if f.path.endswith('.java')
    )
    exec_process_request = ExecuteProcessRequest.create_from_snapshot(
      argv=tuple(cmd),
      snapshot=input_snapshot,
      output_files=output_files,
      description='Compiling {} with javac'.format(ctx.target.address.spec),
    )
    exec_result = self.context.execute_process_synchronously(
      exec_process_request,
      'javac',
      (WorkUnitLabel.TASK, WorkUnitLabel.JVM),
    )

    # Dump the output to the .pants.d directory where it's expected by downstream tasks.
    classes_directory = ctx.classes_dir
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(text_type(classes_directory), exec_result.output_directory_digest),
    ))
  def test_write_file(self):
    scheduler = self.mk_scheduler_in_example_fs(())

    request = ExecuteProcessRequest.create_with_empty_snapshot(
      ("/bin/bash", "-c", "echo -n 'European Burmese' > roland"),
      dict(),
      ("roland",)
    )

    execute_process_result = self.execute_expecting_one_result(scheduler, ExecuteProcessResult, request).value

    self.assertEquals(
      execute_process_result.output_directory_digest,
      DirectoryDigest(
        fingerprint=str("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"),
        serialized_bytes_length=80,
      )
    )

    files_content_result = self.execute_expecting_one_result(
      scheduler,
      FilesContent,
      execute_process_result.output_directory_digest
    ).value

    self.assertEquals(
      (files_content_result.dependencies),
      (FileContent("roland", "European Burmese"),)
    )
def javac_compile_process_result(javac_compile_req):
  java_files = javac_compile_req.javac_sources.java_files
  for java_file in java_files:
    if not java_file.endswith(".java"):
      raise ValueError("Can only compile .java files but got {}".format(java_file))
  sources_snapshot = yield Get(Snapshot, PathGlobs, PathGlobs(java_files, ()))
  process_request = ExecuteProcessRequest.create_from_snapshot(
    argv=javac_compile_req.argv_from_source_snapshot(sources_snapshot),
    env=dict(),
    snapshot=sources_snapshot,
    output_files=tuple(java_file[:-5] + ".class" for java_file in java_files),
  )
  javac_proc_result = yield Get(ExecuteProcessResult, ExecuteProcessRequest, process_request)

  exit_code = javac_proc_result.exit_code
  stdout = javac_proc_result.stdout
  stderr = javac_proc_result.stderr
  if exit_code != 0:
    raise ProcessExecutionFailure(
      exit_code, stdout, stderr, 'javac compilation')

  yield JavacCompileResult(
    stdout,
    stderr,
    javac_proc_result.output_directory_digest,
  )
Exemple #5
0
def cat_files_process_request_input_snapshot(cat_exe_req):
    cat_bin = cat_exe_req.shell_cat
    cat_files_snapshot = yield Get(Snapshot, PathGlobs, cat_exe_req.path_globs)
    yield ExecuteProcessRequest.create_from_snapshot(
        argv=cat_bin.argv_from_snapshot(cat_files_snapshot),
        env=tuple(),
        snapshot=cat_files_snapshot,
    )
Exemple #6
0
def javac_compile_sources_execute_process_request(javac_compile_req):
    sources_snapshot = yield Get(Snapshot, PathGlobs,
                                 javac_compile_req.javac_sources.path_globs)
    yield ExecuteProcessRequest.create_from_snapshot(
        argv=javac_compile_req.argv_from_source_snapshot(sources_snapshot),
        env=tuple(),
        snapshot=sources_snapshot,
    )
    def test_blows_up_on_invalid_args(self):
        try:
            self._default_args_execute_process_request()
        except ValueError:
            self.assertTrue(False, "should be able to construct without error")

        with self.assertRaises(TypeCheckError):
            self._default_args_execute_process_request(argv=1)
        with self.assertRaises(TypeCheckError):
            self._default_args_execute_process_request(argv='1')
        with self.assertRaises(TypeCheckError):
            self._default_args_execute_process_request(argv=('1', ),
                                                       env='foo=bar')

        with self.assertRaisesRegexp(TypeCheckError, "env"):
            ExecuteProcessRequest(argv=('1', ),
                                  env=(),
                                  input_files='',
                                  output_files=(),
                                  output_directories=(),
                                  timeout_seconds=0.1,
                                  description='')
        with self.assertRaisesRegexp(TypeCheckError, "input_files"):
            ExecuteProcessRequest(argv=('1', ),
                                  env=dict(),
                                  input_files=3,
                                  output_files=(),
                                  output_directories=(),
                                  timeout_seconds=0.1,
                                  description='')
        with self.assertRaisesRegexp(TypeCheckError, "output_files"):
            ExecuteProcessRequest(argv=('1', ),
                                  env=dict(),
                                  input_files=EMPTY_DIRECTORY_DIGEST,
                                  output_files=("blah"),
                                  output_directories=(),
                                  timeout_seconds=0.1,
                                  description='')
        with self.assertRaisesRegexp(TypeCheckError, "timeout"):
            ExecuteProcessRequest(argv=('1', ),
                                  env=dict(),
                                  input_files=EMPTY_DIRECTORY_DIGEST,
                                  output_files=("blah"),
                                  output_directories=(),
                                  timeout_seconds=None,
                                  description='')
Exemple #8
0
    def test_blows_up_on_invalid_args(self):
        try:
            self._default_args_execute_process_request()
        except ValueError:
            self.assertTrue(False, "should be able to construct without error")

        with self.assertRaises(TypeCheckError):
            self._default_args_execute_process_request(argv=['1'])
        with self.assertRaises(TypeCheckError):
            self._default_args_execute_process_request(argv=('1', ),
                                                       env=['foo', 'bar'])

        # TODO(cosmicexplorer): we should probably check that the digest info in
        # ExecuteProcessRequest is valid, beyond just checking if it's a string.
        with self.assertRaisesRegexp(TypeCheckError, "env"):
            ExecuteProcessRequest(argv=('1', ),
                                  env=dict(),
                                  input_files='',
                                  output_files=(),
                                  output_directories=(),
                                  timeout_seconds=0.1,
                                  description='')
        with self.assertRaisesRegexp(TypeCheckError, "input_files"):
            ExecuteProcessRequest(argv=('1', ),
                                  env=dict(),
                                  input_files=3,
                                  output_files=(),
                                  output_directories=(),
                                  timeout_seconds=0.1,
                                  description='')
        with self.assertRaisesRegexp(TypeCheckError, "output_files"):
            ExecuteProcessRequest(argv=('1', ),
                                  env=tuple(),
                                  input_files=EMPTY_DIRECTORY_DIGEST,
                                  output_files=("blah"),
                                  output_directories=(),
                                  timeout_seconds=0.1,
                                  description='')
        with self.assertRaisesRegexp(TypeCheckError, "timeout"):
            ExecuteProcessRequest(argv=('1', ),
                                  env=tuple(),
                                  input_files=EMPTY_DIRECTORY_DIGEST,
                                  output_files=("blah"),
                                  output_directories=(),
                                  timeout_seconds=None,
                                  description='')
Exemple #9
0
 def test_create_from_snapshot_with_env(self):
     req = ExecuteProcessRequest(
         argv=('foo', ),
         description="Some process",
         env={'VAR': 'VAL'},
         input_files=EMPTY_DIRECTORY_DIGEST,
     )
     self.assertEqual(req.env, ('VAR', 'VAL'))
Exemple #10
0
 def _default_args_execute_process_request(self, argv=tuple(), env=None):
     env = env or dict()
     return ExecuteProcessRequest.create_with_empty_snapshot(
         argv=argv,
         description='',
         env=env,
         output_files=(),
     )
 def test_create_from_snapshot_with_env(self):
     req = ExecuteProcessRequest(
         argv=("foo", ),
         description="Some process",
         env={"VAR": "VAL"},
         input_files=EMPTY_DIRECTORY_DIGEST,
     )
     self.assertEqual(req.env, ("VAR", "VAL"))
Exemple #12
0
  def _compile_hermetic(self, jvm_options, ctx, classes_dir, zinc_args,
                        compiler_bridge_classpath_entry, dependency_classpath,
                        scalac_classpath_entries):
    zinc_relpath = fast_relpath(self._zinc.zinc, get_buildroot())

    snapshots = [
      self._zinc.snapshot(self.context._scheduler),
      ctx.target.sources_snapshot(self.context._scheduler),
    ]

    relevant_classpath_entries = dependency_classpath + [compiler_bridge_classpath_entry]
    directory_digests = tuple(
      entry.directory_digest for entry in relevant_classpath_entries if entry.directory_digest
    )
    if len(directory_digests) != len(relevant_classpath_entries):
      for dep in relevant_classpath_entries:
        if dep.directory_digest is None:
          logger.warning(
            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
            "execution".format(dep)
          )

    snapshots.extend(
      classpath_entry.directory_digest for classpath_entry in scalac_classpath_entries
    )

    # TODO: Extract something common from Executor._create_command to make the command line
    # TODO: Lean on distribution for the bin/java appending here
    merged_input_digest = self.context._scheduler.merge_directories(
      tuple(s.directory_digest for s in snapshots) + directory_digests
    )
    argv = ['.jdk/bin/java'] + jvm_options + [
      '-cp', zinc_relpath,
      Zinc.ZINC_COMPILE_MAIN
    ] + zinc_args
    # TODO(#6071): Our ExecuteProcessRequest expects a specific string type for arguments,
    # which py2 doesn't default to. This can be removed when we drop python 2.
    argv = [text_type(arg) for arg in argv]

    req = ExecuteProcessRequest(
      argv=tuple(argv),
      input_files=merged_input_digest,
      output_directories=(classes_dir,),
      description="zinc compile for {}".format(ctx.target.address.spec),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying_dist`
      jdk_home=text_type(self._zinc.underlying_dist.home),
    )
    res = self.context.execute_process_synchronously_or_raise(
      req, self.name(), [WorkUnitLabel.COMPILER])

    # TODO: Materialize as a batch in do_compile or somewhere
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(get_buildroot(), res.output_directory_digest),
    ))

    # TODO: This should probably return a ClasspathEntry rather than a Digest
    return res.output_directory_digest
Exemple #13
0
  def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), input_digest=None, output_dir=None):
    tool_classpath_abs = self.tool_classpath(tool_name)
    tool_classpath = fast_relpath_collection(tool_classpath_abs)

    # TODO(#6071): Our ExecuteProcessRequest expects a specific string type for arguments,
    # which py2 doesn't default to. This can be removed when we drop python 2.
    str_jvm_options = [text_type(opt) for opt in self.get_options().jvm_options]
    cmd = [
            distribution.java,
          ] + str_jvm_options + [
            '-cp', os.pathsep.join(tool_classpath),
            main,
          ] + args

    pathglobs = list(tool_classpath)
    pathglobs.extend(f if os.path.isfile(f) else '{}/**'.format(f) for f in input_files)

    if pathglobs:
      root = PathGlobsAndRoot(
        PathGlobs(tuple(pathglobs)),
        text_type(get_buildroot()))
      # dont capture snapshot, if pathglobs is empty
      path_globs_input_digest = self.context._scheduler.capture_snapshots((root,))[0].directory_digest

    epr_input_files = self.context._scheduler.merge_directories(
      ((path_globs_input_digest,) if path_globs_input_digest else ())
      + ((input_digest,) if input_digest else ()))

    epr = ExecuteProcessRequest(
      argv=tuple(cmd),
      input_files=epr_input_files,
      output_files=tuple(),
      output_directories=(output_dir,),
      timeout_seconds=15*60,
      description='run {} for {}'.format(tool_name, tgt),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying.home` because
      # ExecuteProcessRequest requires an existing, local jdk location.
      jdk_home=text_type(distribution.underlying_home),
    )
    res = self.context.execute_process_synchronously_without_raising(
      epr,
      self.name(),
      [WorkUnitLabel.TOOL])

    if res.exit_code != 0:
      raise TaskError(res.stderr, exit_code=res.exit_code)

    if output_dir:
      res.output_directory_digest.dump(output_dir)
      self.context._scheduler.materialize_directories((
        DirectoryToMaterialize(
          # NB the first element here is the root to materialize into, not the dir to snapshot
          text_type(get_buildroot()),
          res.output_directory_digest),
      ))
      # TODO drop a file containing the digest, named maybe output_dir.digest
    return res
Exemple #14
0
  def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), input_digest=None, output_dir=None):
    tool_classpath_abs = self.tool_classpath(tool_name)
    tool_classpath = fast_relpath_collection(tool_classpath_abs)

    classpath_for_cmd = os.pathsep.join(tool_classpath)
    cmd = [
      distribution.java,
    ]
    cmd.extend(self.get_options().jvm_options)
    cmd.extend(['-cp', classpath_for_cmd])
    cmd.extend([main])
    cmd.extend(args)

    pathglobs = list(tool_classpath)
    pathglobs.extend(f if os.path.isfile(f) else '{}/**'.format(f) for f in input_files)

    if pathglobs:
      root = PathGlobsAndRoot(
      PathGlobs(tuple(pathglobs)),
      text_type(get_buildroot()))
      # dont capture snapshot, if pathglobs is empty
      path_globs_input_digest = self.context._scheduler.capture_snapshots((root,))[0].directory_digest

    if path_globs_input_digest and input_digest:
      epr_input_files = self.context._scheduler.merge_directories(
          (path_globs_input_digest, input_digest))
    else:
      epr_input_files = path_globs_input_digest or input_digest

    epr = ExecuteProcessRequest(
      argv=tuple(cmd),
      input_files=epr_input_files,
      output_files=tuple(),
      output_directories=(output_dir,),
      timeout_seconds=15*60,
      description='run {} for {}'.format(tool_name, tgt),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying_dist`
      jdk_home=text_type(self._zinc.underlying_dist.home),
    )
    res = self.context.execute_process_synchronously_without_raising(
      epr,
      self.name(),
      [WorkUnitLabel.TOOL])

    if res.exit_code != 0:
      raise TaskError(res.stderr)

    if output_dir:
      dump_digest(output_dir, res.output_directory_digest)
      self.context._scheduler.materialize_directories((
        DirectoryToMaterialize(
          # NB the first element here is the root to materialize into, not the dir to snapshot
          text_type(get_buildroot()),
          res.output_directory_digest),
      ))
      # TODO drop a file containing the digest, named maybe output_dir.digest
    return res
Exemple #15
0
 def _default_args_execute_process_request(self, argv=tuple(), env=None):
     env = env or dict()
     return ExecuteProcessRequest(
         argv=argv,
         description='',
         env=env,
         input_files=EMPTY_DIRECTORY_DIGEST,
         output_files=(),
     )
def create_requirements_pex(request, pex_bin, python_setup,
                            pex_build_environment, platform):
    """Returns a PEX with the given requirements, optional entry point, and optional
  interpreter constraints."""

    interpreter_search_paths = create_path_env_var(
        python_setup.interpreter_search_paths)
    env = {
        "PATH": interpreter_search_paths,
        **pex_build_environment.invocation_environment_dict
    }

    interpreter_constraint_args = []
    for constraint in request.interpreter_constraints:
        interpreter_constraint_args.extend(
            ["--interpreter-constraint", constraint])

    # NB: we use the hardcoded and generic bin name `python`, rather than something dynamic like
    # `sys.executable`, to ensure that the interpreter may be discovered both locally and in remote
    # execution (so long as `env` is populated with a `PATH` env var and `python` is discoverable
    # somewhere on that PATH). This is only used to run the downloaded PEX tool; it is not
    # necessarily the interpreter that PEX will use to execute the generated .pex file.
    # TODO(#7735): Set --python-setup-interpreter-search-paths differently for the host and target
    # platforms, when we introduce platforms in https://github.com/pantsbuild/pants/issues/7735.
    argv = [
        "python", f"./{pex_bin.executable}", "--output-file",
        request.output_filename
    ]
    if request.entry_point is not None:
        argv.extend(["--entry-point", request.entry_point])
    argv.extend(interpreter_constraint_args + list(request.requirements))
    # NOTE
    # PEX outputs are platform dependent so in order to get a PEX that we can use locally, without cross-building
    # we specify that out PEX command be run on the current local platform. When we support cross-building
    # through CLI flags we can configure requests that build a PEX for out local platform that are
    # able to execute on a different platform, but for now in order to guarantee correct build we need
    # to restrict this command to execute on the same platform type that the output is intended for.
    # The correct way to interpret the keys (execution_platform_constraint, target_platform_constraint)
    # of this dictionary is "The output of this command is intended for `target_platform_constraint` iff
    # it is run on `execution_platform_constraint`".
    execute_process_request = MultiPlatformExecuteProcessRequest(
        {
            (PlatformConstraint(platform.value),
             PlatformConstraint(platform.value)):
            ExecuteProcessRequest(
                argv=tuple(argv),
                env=env,
                input_files=pex_bin.directory_digest,
                description=
                f"Create a requirements PEX: {', '.join(request.requirements)}",
                output_files=(request.output_filename, ))
        })

    result = yield Get(ExecuteProcessResult,
                       MultiPlatformExecuteProcessRequest,
                       execute_process_request)
    yield RequirementsPex(directory_digest=result.output_directory_digest)
Exemple #17
0
    def console_output(self, targets):
        if not self.get_options().transitive:
            targets = self.context.target_roots

        buildroot = get_buildroot()
        with temporary_dir() as tmpdir:
            # Write the paths of all files we want cloc to process to the so-called 'list file'.
            # TODO: 1) list_file, report_file and ignored_file should be relative files within the
            # execution "chroot", 2) list_file should be part of an input files Snapshot, and
            # 3) report_file and ignored_file should be part of an output files Snapshot, when we have
            # that capability.
            list_file = os.path.join(tmpdir, 'list_file')
            with open(list_file, 'w') as list_file_out:
                for target in targets:
                    for source in target.sources_relative_to_buildroot():
                        list_file_out.write(os.path.join(buildroot, source))
                        list_file_out.write(b'\n')

            report_file = os.path.join(tmpdir, 'report_file')
            ignored_file = os.path.join(tmpdir, 'ignored')

            # TODO: Look at how to make BinaryUtil support Snapshots - such as adding an instrinsic to do
            # network fetch directly into a Snapshot.
            # See http://cloc.sourceforge.net/#options for cloc cmd-line options.
            cmd = (self._get_cloc_script(), '--skip-uniqueness',
                   '--ignored={}'.format(ignored_file),
                   '--list-file={}'.format(list_file),
                   '--report-file={}'.format(report_file))
            if self.context._scheduler is None:
                with self.context.new_workunit(name='cloc',
                                               labels=[WorkUnitLabel.TOOL],
                                               cmd=' '.join(cmd)) as workunit:
                    result = subprocess.call(cmd,
                                             stdout=workunit.output('stdout'),
                                             stderr=workunit.output('stderr'))
            else:
                # TODO: Longer term we need to figure out what to put on $PATH in a remote execution env.
                # Currently, we are adding everything within $PATH to the request.
                env_path = ['PATH', os.environ.get('PATH')]
                req = ExecuteProcessRequest(cmd, env_path)
                execute_process_result, = self.context._scheduler.product_request(
                    ExecuteProcessResult, [req])
                exit_code = execute_process_result.exit_code
                if exit_code != 0:
                    raise TaskError('{} ... exited non-zero ({}).'.format(
                        ' '.join(cmd), result))

            with open(report_file, 'r') as report_file_in:
                for line in report_file_in.read().split('\n'):
                    yield line

            if self.get_options().ignored:
                yield 'Ignored the following files:'
                with open(ignored_file, 'r') as ignored_file_in:
                    for line in ignored_file_in.read().split('\n'):
                        yield line
Exemple #18
0
    def test_fallible_failing_command_returns_exited_result(self):
        request = ExecuteProcessRequest.create_with_empty_snapshot(
            argv=("/bin/bash", "-c", "exit 1"),
            description='one-cat',
        )

        result = self.scheduler.product_request(FallibleExecuteProcessResult,
                                                [request])[0]

        self.assertEquals(result.exit_code, 1)
Exemple #19
0
def get_javac_version_output(javac_version_command):
  javac_version_proc_req = ExecuteProcessRequest(
    argv=javac_version_command.gen_argv(),
    description=javac_version_command.description,
    input_files=EMPTY_DIRECTORY_DIGEST,
  )
  javac_version_proc_result = yield Get(
    ExecuteProcessResult, ExecuteProcessRequest, javac_version_proc_req)

  yield JavacVersionOutput(text_type(javac_version_proc_result.stderr))
Exemple #20
0
    def test_non_fallible_failing_command_raises(self):
        request = ExecuteProcessRequest.create_with_empty_snapshot(
            argv=("/bin/bash", "-c", "exit 1"),
            description='one-cat',
        )

        with self.assertRaises(ExecutionError) as cm:
            self.scheduler.product_request(ExecuteProcessResult, [request])
        self.assertIn("process 'one-cat' failed with exit code 1.",
                      str(cm.exception))
Exemple #21
0
def cat_files_process_result_concatted(cat_exe_req):
  cat_bin = cat_exe_req.shell_cat
  cat_files_snapshot = yield Get(Snapshot, PathGlobs, cat_exe_req.path_globs)
  process_request = ExecuteProcessRequest(
    argv=cat_bin.argv_from_snapshot(cat_files_snapshot),
    input_files=cat_files_snapshot.directory_digest,
    description='cat some files',
  )
  cat_process_result = yield Get(ExecuteProcessResult, ExecuteProcessRequest, process_request)
  yield Concatted(cat_process_result.stdout.decode('utf-8'))
    def test_non_fallible_failing_command_raises(self):
        request = ExecuteProcessRequest(
            argv=("/bin/bash", "-c", "exit 1"),
            description="one-cat",
            input_files=EMPTY_DIRECTORY_DIGEST,
        )

        with self.assertRaises(ExecutionError) as cm:
            self.request_single_product(ExecuteProcessResult, request)
        self.assertIn("process 'one-cat' failed with exit code 1.", str(cm.exception))
async def cat_files_process_result_concatted(cat_exe_req: CatExecutionRequest) -> Concatted:
    cat_bin = cat_exe_req.shell_cat
    cat_files_snapshot = await Get[Snapshot](PathGlobs, cat_exe_req.path_globs)
    process_request = ExecuteProcessRequest(
        argv=cat_bin.argv_from_snapshot(cat_files_snapshot),
        input_files=cat_files_snapshot.directory_digest,
        description="cat some files",
    )
    cat_process_result = await Get[ExecuteProcessResult](ExecuteProcessRequest, process_request)
    return Concatted(cat_process_result.stdout.decode())
Exemple #24
0
  def test_fallible_failing_command_returns_exited_result(self):
    request = ExecuteProcessRequest(
      argv=("/bin/bash", "-c", "exit 1"),
      description='one-cat',
      input_files=EMPTY_DIRECTORY_DIGEST,
    )

    result = self.scheduler.product_request(FallibleExecuteProcessResult, [request])[0]

    self.assertEqual(result.exit_code, 1)
    def test_fallible_failing_command_returns_exited_result(self):
        request = ExecuteProcessRequest(
            argv=("/bin/bash", "-c", "exit 1"),
            description="one-cat",
            input_files=EMPTY_DIRECTORY_DIGEST,
        )

        result = self.request_single_product(FallibleExecuteProcessResult, request)

        self.assertEqual(result.exit_code, 1)
 def test_timeout(self):
     request = ExecuteProcessRequest(
         argv=("/bin/bash", "-c", "/bin/sleep 0.2; /bin/echo -n 'European Burmese'"),
         timeout_seconds=0.1,
         description="sleepy-cat",
         input_files=EMPTY_DIRECTORY_DIGEST,
     )
     result = self.request_single_product(FallibleExecuteProcessResult, request)
     self.assertNotEqual(result.exit_code, 0)
     self.assertIn(b"Exceeded timeout", result.stdout)
     self.assertIn(b"sleepy-cat", result.stdout)
def cat_files_process_result_concatted(cat_exe_req):
  cat_bin = cat_exe_req.shell_cat
  cat_files_snapshot = yield Get(Snapshot, PathGlobs, cat_exe_req.path_globs)
  process_request = ExecuteProcessRequest.create_from_snapshot(
    argv=cat_bin.argv_from_snapshot(cat_files_snapshot),
    env=dict(),
    snapshot=cat_files_snapshot,
    output_files=(),
  )
  cat_process_result = yield Get(ExecuteProcessResult, ExecuteProcessRequest, process_request)
  yield Concatted(str(cat_process_result.stdout))
Exemple #28
0
def cat_files_process_result_concatted(cat_exe_req):
    cat_bin = cat_exe_req.shell_cat
    cat_files_snapshot = yield Get(Snapshot, PathGlobs, cat_exe_req.path_globs)
    process_request = ExecuteProcessRequest.create_from_snapshot(
        argv=cat_bin.argv_from_snapshot(cat_files_snapshot),
        snapshot=cat_files_snapshot,
        description='cat some files',
    )
    cat_process_result = yield Get(ExecuteProcessResult, ExecuteProcessRequest,
                                   process_request)
    yield Concatted(text_type(cat_process_result.stdout))
Exemple #29
0
 def test_jdk(self):
     with temporary_dir() as temp_dir:
         with open(os.path.join(temp_dir, 'roland'), 'w') as f:
             f.write('European Burmese')
         request = ExecuteProcessRequest(
             argv=('/bin/cat', '.jdk/roland'),
             input_files=EMPTY_DIRECTORY_DIGEST,
             description='cat JDK roland',
             jdk_home=temp_dir,
         )
         result = self.request_single_product(ExecuteProcessResult, request)
         self.assertEqual(result.stdout, b'European Burmese')
def cat_files_process_result_concatted(cat_exe_req):
    cat_bin = cat_exe_req.shell_cat
    cat_files_snapshot = yield Get(Snapshot, PathGlobs, cat_exe_req.path_globs)
    process_request = ExecuteProcessRequest.create_from_snapshot(
        argv=cat_bin.argv_from_snapshot(cat_files_snapshot),
        env=dict(),
        snapshot=cat_files_snapshot,
        output_files=(),
    )
    cat_process_result = yield Get(ExecuteProcessResult, ExecuteProcessRequest,
                                   process_request)
    yield Concatted(str(cat_process_result.stdout))
 def test_jdk(self):
     with temporary_dir() as temp_dir:
         with open(os.path.join(temp_dir, "roland"), "w") as f:
             f.write("European Burmese")
         request = ExecuteProcessRequest(
             argv=("/bin/cat", ".jdk/roland"),
             input_files=EMPTY_DIRECTORY_DIGEST,
             description="cat JDK roland",
             jdk_home=temp_dir,
         )
         result = self.request_single_product(ExecuteProcessResult, request)
         self.assertEqual(result.stdout, b"European Burmese")
Exemple #32
0
    def test_fallible_failing_command_returns_exited_result(self):
        scheduler = self.mk_scheduler_in_example_fs(())

        request = ExecuteProcessRequest.create_with_empty_snapshot(
            argv=("/bin/bash", "-c", "exit 1"),
            description='one-cat',
        )

        result = self.execute_expecting_one_result(
            scheduler, FallibleExecuteProcessResult, request).value

        self.assertEquals(result.exit_code, 1)
Exemple #33
0
async def get_javac_version_output(
        javac_version_command: JavacVersionExecutionRequest
) -> JavacVersionOutput:
    javac_version_proc_req = ExecuteProcessRequest(
        argv=javac_version_command.gen_argv(),
        description=javac_version_command.description,
        input_files=EMPTY_DIRECTORY_DIGEST,
    )
    javac_version_proc_result = await Get(ExecuteProcessResult,
                                          ExecuteProcessRequest,
                                          javac_version_proc_req)

    return JavacVersionOutput(javac_version_proc_result.stderr.decode())
Exemple #34
0
    def test_platform_on_local_epr_result(self) -> None:

        this_platform = Platform.current

        req = ExecuteProcessRequest(
            argv=("/bin/echo", "test"),
            input_files=EMPTY_DIRECTORY_DIGEST,
            description="Run some program that will exit cleanly.",
        )
        result = self.request_single_product(
            FallibleExecuteProcessResultWithPlatform, req)
        assert result.exit_code == 0
        assert result.platform == this_platform
    def test_not_executable(self):
        file_name = "echo.sh"
        file_contents = b'#!/bin/bash -eu\necho "Hello"\n'

        input_file = InputFilesContent((FileContent(path=file_name, content=file_contents),))
        digest = self.request_single_product(Digest, input_file)

        req = ExecuteProcessRequest(
            argv=("./echo.sh",), input_files=digest, description="cat the contents of this file",
        )

        with self.assertRaisesWithMessageContaining(ExecutionError, "Permission"):
            self.request_single_product(ExecuteProcessResult, req)
  def test_exercise_python_side_of_timeout_implementation(self):
    # Local execution currently doesn't support timeouts,
    # but this allows us to ensure that all of the setup
    # on the python side does not blow up.
    scheduler = self.mk_scheduler_in_example_fs(())

    request = ExecuteProcessRequest.create_with_empty_snapshot(
      ("/bin/bash", "-c", "/bin/sleep 1; echo -n 'European Burmese'"),
      dict(),
      tuple(),
      timeout_seconds=0.1,
      description='sleepy-cat',
    )

    self.execute_expecting_one_result(scheduler, ExecuteProcessResult, request).value
def process_request_from_javac_version(javac_version_exe_req):
  yield ExecuteProcessRequest.create_with_empty_snapshot(
    argv=javac_version_exe_req.gen_argv(),
    env=dict(),
    output_files=())