예제 #1
0
파일: fs_test.py 프로젝트: wiwa/pants
    def test_accepts_single_file_snapshot(self):
        input_files_content = InputFilesContent(
            (FileContent(path="subdir/a.txt", content=b"test file contents"),)
        )
        snapshot = self.request_single_product(Snapshot, input_files_content)

        assert SingleFileExecutable(snapshot).exe_filename == "./subdir/a.txt"

        input_files_content = InputFilesContent(
            (FileContent(path="some_silly_file_name", content=b"test file contents"),)
        )
        snapshot = self.request_single_product(Snapshot, input_files_content)

        assert SingleFileExecutable(snapshot).exe_filename == "./some_silly_file_name"
예제 #2
0
async def generate_pants_ini(console: Console, workspace: Workspace) -> GeneratePantsIni:
  pants_ini_content = dedent(f"""\
    [GLOBAL]
    pants_version: {pants_version}
    """)

  preexisting_snapshot = await Get[Snapshot](PathGlobs(include=('pants.ini',)))
  if preexisting_snapshot.files:
    console.print_stderr(
      "./pants.ini already exists. This goal is only meant to be run the first time you run Pants "
      "in a project.\n\nTo update config values, please directly modify the file."
    )
    return GeneratePantsIni(exit_code=1)

  console.print_stdout(dedent(f"""\
    Adding sensible defaults to ./pants.ini:
      * Pinning `pants_version` to `{pants_version}`.
    """))

  digest = await Get[Digest](InputFilesContent([
    FileContent(path='pants.ini', content=pants_ini_content.encode())
  ]))
  workspace.materialize_directory(DirectoryToMaterialize(digest))

  console.print_stdout(
    "You may modify these values directly in the file at any time. The ./pants script will detect "
    "any changes the next time you run it.\n\nYou are now ready to use Pants!"
  )
  return GeneratePantsIni(exit_code=0)
예제 #3
0
    def test_snapshot_subset_globs(self) -> None:
        ss = SnapshotSubset(
            directory_digest=self.generate_original_digest(),
            globs=PathGlobs(("a.txt", "c.txt", "subdir2/**")),
        )

        subset_snapshot = self.request_single_product(Snapshot, ss)
        assert set(subset_snapshot.files) == {
            "a.txt",
            "c.txt",
            "subdir2/a.txt",
            "subdir2/nested_subdir/x.txt",
        }
        assert set(subset_snapshot.dirs) == {"subdir2/nested_subdir"}

        content = b"dummy content"
        subset_input = InputFilesContent(
            (
                FileContent(path="a.txt", content=content),
                FileContent(path="c.txt", content=content),
                FileContent(path="subdir2/a.txt", content=content),
                FileContent(path="subdir2/nested_subdir/x.txt", content=content),
            )
        )
        subset_digest = self.request_single_product(Digest, subset_input)
        assert subset_snapshot.directory_digest == subset_digest
예제 #4
0
 def run_flake8(
     self,
     source_files: List[FileContent],
     *,
     config: Optional[str] = None,
     passthrough_args: Optional[Sequence[str]] = None,
     interpreter_constraints: Optional[Sequence[str]] = None,
     skip: bool = False,
 ) -> LintResult:
     if config is not None:
         self.create_file(relpath=".flake8", contents=config)
     input_snapshot = self.request_single_product(
         Snapshot, InputFilesContent(source_files))
     target = Flake8Target(
         PythonTargetAdaptor(
             sources=EagerFilesetWithSpec('test', {'globs': []},
                                          snapshot=input_snapshot),
             address=Address.parse("test:target"),
             compatibility=interpreter_constraints,
         ))
     flake8_subsystem = global_subsystem_instance(
         Flake8,
         options={
             Flake8.options_scope: {
                 "config": ".flake8" if config else None,
                 "args": passthrough_args or [],
                 "skip": skip,
             }
         })
     return self.request_single_product(
         LintResult,
         Params(target, flake8_subsystem,
                PythonNativeCode.global_instance(),
                PythonSetup.global_instance(),
                SubprocessEnvironment.global_instance()))
 def run_black(
   self,
   source_files: List[FileContent],
   *,
   config: Optional[str] = None,
   passthrough_args: Optional[str] = None,
   skip: bool = False,
 ) -> Tuple[LintResult, FmtResult]:
   args = ["--backend-packages2=pants.backend.python.lint.black"]
   if config is not None:
     self.create_file(relpath="pyproject.toml", contents=config)
     args.append("--black-config=pyproject.toml")
   if passthrough_args:
     args.append(f"--black-args='{passthrough_args}'")
   if skip:
     args.append(f"--black-skip")
   input_snapshot = self.request_single_product(Snapshot, InputFilesContent(source_files))
   target_adaptor = TargetAdaptor(
     sources=EagerFilesetWithSpec('test', {'globs': []}, snapshot=input_snapshot),
     address=Address.parse("test:target"),
   )
   lint_target = BlackTarget(target_adaptor)
   fmt_target = BlackTarget(target_adaptor, prior_formatter_result_digest=input_snapshot.directory_digest)
   options_bootstrapper = create_options_bootstrapper(args=args)
   lint_result = self.request_single_product(LintResult, Params(lint_target, options_bootstrapper))
   fmt_result = self.request_single_product(FmtResult, Params(fmt_target, options_bootstrapper))
   return lint_result, fmt_result
예제 #6
0
def get_coverage_plugin_input() -> InputFilesContent:
    return InputFilesContent(
        FilesContent((FileContent(
            path=f"{COVERAGE_PLUGIN_MODULE_NAME}.py",
            content=pkg_resources.resource_string(__name__,
                                                  "coverage_plugin/plugin.py"),
        ), )))
예제 #7
0
    def test_pex_execution(self) -> None:
        input_files_content = InputFilesContent(
            (
                FileContent(path="main.py", content=b'print("from main")'),
                FileContent(path="subdir/sub.py", content=b'print("from sub")'),
            )
        )

        input_files = self.request_single_product(Digest, input_files_content)
        pex_output = self.create_pex_and_get_all_data(entry_point="main", input_files=input_files)

        pex_files = pex_output["files"]
        self.assertTrue("pex" not in pex_files)
        self.assertTrue("main.py" in pex_files)
        self.assertTrue("subdir/sub.py" in pex_files)

        init_subsystem(PythonSetup)
        python_setup = PythonSetup.global_instance()
        env = {"PATH": create_path_env_var(python_setup.interpreter_search_paths)}

        req = ExecuteProcessRequest(
            argv=("python", "test.pex"),
            env=env,
            input_files=pex_output["pex"].directory_digest,
            description="Run the pex and make sure it works",
        )
        result = self.request_single_product(ExecuteProcessResult, req)
        self.assertEqual(result.stdout, b"from main\n")
예제 #8
0
파일: fs_test.py 프로젝트: OniOni/pants
    def test_workspace_materialize_directories_result(self):
        #TODO(#8336): at some point, this test should require that Workspace only be invoked from a console_role
        workspace = Workspace(self.scheduler)

        input_files_content = InputFilesContent((
            FileContent(path='a.txt', content=b'hello'),
            FileContent(path='subdir/b.txt', content=b'goodbye'),
        ))

        digest = self.request_single_product(Digest, input_files_content)

        path1 = Path('a.txt')
        path2 = Path('subdir/b.txt')

        assert not path1.is_file()
        assert not path2.is_file()

        output = workspace.materialize_directories(
            (DirectoryToMaterialize(digest), ))

        assert type(output) == MaterializeDirectoriesResult
        materialize_result = output.dependencies[0]
        assert type(materialize_result) == MaterializeDirectoryResult
        assert materialize_result.output_paths == tuple(
            str(Path(self.build_root, p)) for p in [path1, path2])
예제 #9
0
    def test_workspace_materialize_directories_result(self):
        #TODO(#8336): at some point, this test should require that Workspace only be invoked from a console_role
        workspace = Workspace(self.scheduler)

        input_files_content = InputFilesContent((
            FileContent(path='a.txt', content=b'hello', is_executable=False),
            FileContent(path='subdir/b.txt',
                        content=b'goodbye',
                        is_executable=False),
        ))

        digest, = self.scheduler.product_request(Digest, [input_files_content])

        with temporary_dir() as tmp_dir:
            path1 = Path(tmp_dir, 'a.txt')
            path2 = Path(tmp_dir, 'subdir', 'b.txt')

            self.assertFalse(path1.is_file())
            self.assertFalse(path2.is_file())

            output = workspace.materialize_directories(
                (DirectoryToMaterialize(path=tmp_dir,
                                        directory_digest=digest), ))

            self.assertEqual(type(output), MaterializeDirectoriesResult)
            materialize_result = output.dependencies[0]
            self.assertEqual(type(materialize_result),
                             MaterializeDirectoryResult)
            self.assertEqual(materialize_result.output_paths, (
                str(Path(tmp_dir, 'a.txt')),
                str(Path(tmp_dir, 'subdir/b.txt')),
            ))
예제 #10
0
파일: fmt_test.py 프로젝트: mcguigan/pants
 def run_fmt_rule(self, *,
                  targets: List[HydratedTarget]) -> Tuple[Fmt, str]:
     result_digest = self.request_single_product(
         Digest,
         InputFilesContent([
             FileContent(path=str(self.formatted_file),
                         content=self.formatted_content.encode())
         ]))
     console = MockConsole(use_colors=False)
     result: Fmt = run_rule(
         fmt,
         rule_args=[
             console,
             HydratedTargets(targets),
             Workspace(self.scheduler),
             UnionMembership(
                 union_rules={FormatTarget: [PythonTargetAdaptor]})
         ],
         mock_gets=[
             MockGet(product_type=AggregatedFmtResults,
                     subject_type=PythonTargetAdaptor,
                     mock=lambda adaptor: AggregatedFmtResults(
                         (FmtResult(digest=result_digest,
                                    stdout=f"Formatted `{adaptor.name}`",
                                    stderr=""), ),
                         combined_digest=result_digest)),
             MockGet(product_type=Digest,
                     subject_type=DirectoriesToMerge,
                     mock=lambda _: result_digest),
         ],
     )
     return result, console.stdout.getvalue()
예제 #11
0
파일: fs_test.py 프로젝트: OniOni/pants
 def test(self):
     msg = MessageToConsoleRule(input_files_content=InputFilesContent(
         [FileContent(path='a.txt', content=b'hello')]))
     output_path = Path(self.build_root, 'a.txt')
     self.assert_console_output_contains(str(output_path),
                                         additional_params=[msg])
     assert output_path.read_text() == "hello"
예제 #12
0
 def run_bandit(
   self,
   source_files: List[FileContent],
   *,
   config: Optional[str] = None,
   passthrough_args: Optional[str] = None,
   interpreter_constraints: Optional[str] = None,
   skip: bool = False,
 ) -> LintResult:
   args = ["--backend-packages2=pants.backend.python.lint.bandit"]
   if config:
     # TODO: figure out how to get this file to exist...
     self.create_file(relpath=".bandit", contents=config)
     args.append("--bandit-config=.bandit")
   if passthrough_args:
     args.append(f"--bandit-args={passthrough_args}")
   if skip:
     args.append(f"--bandit-skip")
   input_snapshot = self.request_single_product(Snapshot, InputFilesContent(source_files))
   target = BanditTarget(
     PythonTargetAdaptor(
       sources=EagerFilesetWithSpec('test', {'globs': []}, snapshot=input_snapshot),
       address=Address.parse("test:target"),
       compatibility=[interpreter_constraints] if interpreter_constraints else None,
     )
   )
   return self.request_single_product(
     LintResult, Params(target, create_options_bootstrapper(args=args)),
   )
예제 #13
0
파일: pex_test.py 프로젝트: OniOni/pants
    def test_generic_pex_creation(self) -> None:
        input_files_content = InputFilesContent((
            FileContent(path='main.py', content=b'print("from main")'),
            FileContent(path='subdir/sub.py', content=b'print("from sub")'),
        ))

        input_files = self.request_single_product(Digest, input_files_content)
        pex_output = self.create_pex_and_get_all_data(entry_point='main',
                                                      input_files=input_files)

        pex_files = pex_output['files']
        self.assertTrue('pex' not in pex_files)
        self.assertTrue('main.py' in pex_files)
        self.assertTrue('subdir/sub.py' in pex_files)

        python_setup = PythonSetup.global_instance()
        env = {
            "PATH": create_path_env_var(python_setup.interpreter_search_paths)
        }

        pex = pex_output['pex']

        req = ExecuteProcessRequest(
            argv=('python', 'test.pex'),
            env=env,
            input_files=pex.directory_digest,
            description="Run the pex and make sure it works")
        result = self.request_single_product(ExecuteProcessResult, req)
        self.assertEqual(result.stdout, b"from main\n")
예제 #14
0
def get_coveragerc_input(coveragerc_content: str) -> InputFilesContent:
    return InputFilesContent([
        FileContent(
            path='.coveragerc',
            content=coveragerc_content.encode(),
            is_executable=False,
        ),
    ])
예제 #15
0
파일: run_test.py 프로젝트: OniOni/pants
 def create_mock_binary(self, program_text: bytes) -> CreatedBinary:
   input_files_content = InputFilesContent((
     FileContent(path='program.py', content=program_text, is_executable=True),
   ))
   digest = self.request_single_product(Digest, input_files_content)
   return CreatedBinary(
     binary_name='program.py',
     digest=digest,
   )
예제 #16
0
async def create_coverage_config(
    coverage_config_request: CoverageConfigRequest, source_root_config: SourceRootConfig
) -> CoverageConfig:
    sources = await Get[SourceFiles](
        AllSourceFilesRequest(
            (tgt.get(Sources) for tgt in coverage_config_request.targets), strip_source_roots=False,
        )
    )
    init_injected = await Get[InitInjectedSnapshot](InjectInitRequest(sources.snapshot))
    source_roots = source_root_config.get_source_roots()

    # Generate a map from source root stripped source to its source root. eg:
    #  {'pants/testutil/subsystem/util.py': 'src/python'}. This is so that coverage reports
    #  referencing /chroot/path/pants/testutil/subsystem/util.py can be mapped back to the actual
    #  sources they reference when generating coverage reports.
    def stripped_file_with_source_root(file_name: str) -> Tuple[str, str]:
        source_root_object = source_roots.find_by_path(file_name)
        source_root = source_root_object.path if source_root_object is not None else ""
        stripped_path = file_name[len(source_root) + 1 :]
        return stripped_path, source_root

    stripped_files_to_source_roots = dict(
        stripped_file_with_source_root(filename)
        for filename in sorted(init_injected.snapshot.files)
    )

    default_config = dedent(
        """
        [run]
        branch = True
        timid = False
        relative_files = True
        """
    )

    config_parser = configparser.ConfigParser()
    config_parser.read_file(StringIO(default_config))
    config_parser.set("run", "plugins", COVERAGE_PLUGIN_MODULE_NAME)
    config_parser.add_section(COVERAGE_PLUGIN_MODULE_NAME)
    config_parser.set(
        COVERAGE_PLUGIN_MODULE_NAME,
        "source_to_target_base",
        json.dumps(stripped_files_to_source_roots),
    )
    config_parser.set(
        COVERAGE_PLUGIN_MODULE_NAME, "test_time", json.dumps(coverage_config_request.is_test_time)
    )

    config_io_stream = StringIO()
    config_parser.write(config_io_stream)
    digest = await Get[Digest](
        InputFilesContent(
            [FileContent(".coveragerc", content=config_io_stream.getvalue().encode())]
        )
    )
    return CoverageConfig(digest)
예제 #17
0
async def generate_fortran_from_avro(request: GenerateFortranFromAvroRequest) -> GeneratedSources:
    protocol_files = request.protocol_sources.files

    def generate_fortran(fp: str) -> FileContent:
        parent = str(PurePath(fp).parent).replace("src/avro", "src/fortran")
        file_name = f"{PurePath(fp).stem}.f95"
        return FileContent(str(PurePath(parent, file_name)), b"Generated")

    result = await Get[Snapshot](InputFilesContent([generate_fortran(fp) for fp in protocol_files]))
    return GeneratedSources(result)
예제 #18
0
 def make_target_with_origin(
     self, source_files: List[FileContent], *, origin: Optional[OriginSpec] = None,
 ) -> TargetAdaptorWithOrigin:
     input_snapshot = self.request_single_product(Snapshot, InputFilesContent(source_files))
     adaptor = TargetAdaptor(
         sources=EagerFilesetWithSpec("test", {"globs": []}, snapshot=input_snapshot),
         address=Address.parse("test:target"),
     )
     if origin is None:
         origin = SingleAddress(directory="test", name="target")
     return TargetAdaptorWithOrigin(adaptor, origin)
예제 #19
0
파일: fs_test.py 프로젝트: tpasternak/pants
  def test(self):
    with temporary_dir() as tmp_dir:
      input_files_content = InputFilesContent((
        FileContent(path='a.txt', content=b'hello', is_executable=False),
      ))

      msg = MessageToConsoleRule(tmp_dir=tmp_dir, input_files_content=input_files_content)
      output_path = str(Path(tmp_dir, 'a.txt'))
      self.assert_console_output_contains(output_path, additional_params=[msg])
      contents = open(output_path).read()
      self.assertEqual(contents, 'hello')
예제 #20
0
  def test_raises_with_multiple_files(self):
    input_files_content = InputFilesContent((
      FileContent(path='a.txt', content=b'test file contents'),
      FileContent(path='b.txt', content=b'more test file contents'),
    ))

    snapshot = self.request_single_product(Snapshot, input_files_content)

    with self.assertRaisesWithMessage(
        SingleFileExecutable.ValidationError,
        f'snapshot {snapshot} used for {SingleFileExecutable} should have exactly 1 file!'):
      SingleFileExecutable(snapshot)
예제 #21
0
 def generate_original_digest(self) -> Digest:
     content = b'dummy content'
     input_files_content = InputFilesContent((
         FileContent(path='a.txt', content=content),
         FileContent(path='b.txt', content=content),
         FileContent(path='c.txt', content=content),
         FileContent(path='subdir/a.txt', content=content),
         FileContent(path='subdir/b.txt', content=content),
         FileContent(path='subdir2/a.txt', content=content),
         FileContent(path='subdir2/nested_subdir/x.txt', content=content),
     ))
     return self.request_single_product(Digest, input_files_content)
예제 #22
0
파일: test_test.py 프로젝트: pyranja/pants
 def make_ipr(self) -> InteractiveProcessRequest:
     input_files_content = InputFilesContent(
         (FileContent(path="program.py", content=b"def test(): pass"), ))
     digest = self.request_single_product(Digest, input_files_content)
     return InteractiveProcessRequest(
         argv=(
             "/usr/bin/python",
             "program.py",
         ),
         run_in_workspace=False,
         input_digest=digest,
     )
예제 #23
0
async def generate_chroot(request: SetupPyChrootRequest) -> SetupPyChroot:
  if request.py2:
    # TODO: Implement Python 2 support.  This will involve, among other things: merging ancestor
    # __init__.py files into the chroot, detecting packages based on the presence of __init__.py,
    # and inspecting all __init__.py files for the namespace package incantation.
    raise UnsupportedPythonVersion('Running setup.py commands not supported for Python 2.')

  owned_deps = await Get[OwnedDependencies](DependencyOwner(request.exported_target))
  targets = HydratedTargets(od.hydrated_target for od in owned_deps)
  sources = await Get[SetupPySources](SetupPySourcesRequest(targets))
  requirements = await Get[ExportedTargetRequirements](DependencyOwner(request.exported_target))

  # Nest the sources under the src/ prefix.
  src_digest = await Get[Digest](DirectoryWithPrefixToAdd(sources.digest, CHROOT_SOURCE_ROOT))

  # Generate the kwargs to the setup() call.
  setup_kwargs = request.exported_target.hydrated_target.adaptor.provides.setup_py_keywords.copy()
  setup_kwargs.update({
    'package_dir': {'': CHROOT_SOURCE_ROOT},
    'packages': sources.packages,
    'namespace_packages': sources.namespace_packages,
    'package_data': dict(sources.package_data),
    'install_requires': requirements.requirement_strs
  })
  ht = request.exported_target.hydrated_target
  key_to_binary_spec = getattr(ht.adaptor.provides, 'binaries', {})
  keys = list(key_to_binary_spec.keys())
  binaries = await MultiGet(Get[HydratedTarget](
    Address, Address.parse(key_to_binary_spec[key], relative_to=ht.address.spec_path))
    for key in keys)
  for key, binary in zip(keys, binaries):
    if (not isinstance(binary.adaptor, PythonBinaryAdaptor) or
        getattr(binary.adaptor, 'entry_point', None) is None):
      raise InvalidEntryPoint(
        f'The binary {key} exported by {ht.address.reference()} is not a valid entry point.')
    entry_points = setup_kwargs['entry_points'] = setup_kwargs.get('entry_points', {})
    console_scripts = entry_points['console_scripts'] = entry_points.get('console_scripts', [])
    console_scripts.append(f'{key}={binary.adaptor.entry_point}')

  # Generate the setup script.
  setup_py_content = SETUP_BOILERPLATE.format(
    target_address_spec=ht.address.reference(),
    setup_kwargs_str=distutils_repr(setup_kwargs)
  ).encode()
  extra_files_digest = await Get[Digest](
    InputFilesContent([
      FileContent('setup.py', setup_py_content),
      FileContent('MANIFEST.in', 'include *.py'.encode())  # Make sure setup.py is included.
    ]))

  chroot_digest = await Get[Digest](DirectoriesToMerge((src_digest, extra_files_digest)))
  return SetupPyChroot(chroot_digest, json.dumps(setup_kwargs, sort_keys=True))
예제 #24
0
파일: fs_test.py 프로젝트: wiwa/pants
 def test_add_prefix(self) -> None:
     input_files_content = InputFilesContent((
         FileContent(path="main.py", content=b'print("from main")'),
         FileContent(path="subdir/sub.py", content=b'print("from sub")'),
     ))
     digest = self.request_single_product(Digest, input_files_content)
     output_digest = self.request_single_product(
         Digest, AddPrefix(digest, "outer_dir"))
     snapshot = self.request_single_product(Snapshot, output_digest)
     assert sorted(snapshot.files) == [
         "outer_dir/main.py", "outer_dir/subdir/sub.py"
     ]
     assert sorted(snapshot.dirs) == ["outer_dir", "outer_dir/subdir"]
예제 #25
0
    def test_not_executable(self):
        file_name = "echo.sh"
        file_contents = b'#!/bin/bash -eu\necho "Hello"\n'

        input_file = InputFilesContent((FileContent(path=file_name, content=file_contents),))
        digest = self.request_single_product(Digest, input_file)

        req = ExecuteProcessRequest(
            argv=("./echo.sh",), input_files=digest, description="cat the contents of this file",
        )

        with self.assertRaisesWithMessageContaining(ExecutionError, "Permission"):
            self.request_single_product(ExecuteProcessResult, req)
예제 #26
0
 def make_ipr(self, content: bytes) -> InteractiveProcessRequest:
     input_files_content = InputFilesContent(
         (FileContent(path='program.py',
                      content=content,
                      is_executable=True), ))
     digest = self.request_single_product(Digest, input_files_content)
     return InteractiveProcessRequest(
         argv=(
             "/usr/bin/python",
             "program.py",
         ),
         run_in_workspace=False,
         input_files=digest,
     )
예제 #27
0
async def inject_missing_init_files(request: InjectInitRequest) -> InitInjectedSnapshot:
    """Ensure that every package has an `__init__.py` file in it.

    This will preserve any `__init__.py` files already in the input snapshot.
    """
    snapshot = request.snapshot
    missing_init_files = sorted(identify_missing_init_files(snapshot.files))
    if not missing_init_files:
        return InitInjectedSnapshot(snapshot)
    generated_inits_digest = await Get[Digest](
        InputFilesContent(FileContent(path=fp, content=b"") for fp in missing_init_files)
    )
    result = await Get[Snapshot](MergeDigests((snapshot.digest, generated_inits_digest)))
    return InitInjectedSnapshot(result)
예제 #28
0
파일: test_fs.py 프로젝트: tpasternak/pants
  def test_add_prefix(self):
    input_files_content = InputFilesContent((
      FileContent(path='main.py', content=b'print("from main")', is_executable=False),
      FileContent(path='subdir/sub.py', content=b'print("from sub")', is_executable=False),
    ))

    digest, = self.scheduler.product_request(Digest, [input_files_content])

    dpa = DirectoryWithPrefixToAdd(digest, "outer_dir")
    output_digest, = self.scheduler.product_request(Digest, [dpa])
    snapshot, = self.scheduler.product_request(Snapshot, [output_digest])

    self.assertEqual(sorted(snapshot.files), ['outer_dir/main.py', 'outer_dir/subdir/sub.py'])
    self.assertEqual(sorted(snapshot.dirs), ['outer_dir', 'outer_dir/subdir'])
예제 #29
0
    def test_executable(self):
        file_name = "echo.sh"
        file_contents = b'#!/bin/bash -eu\necho "Hello"\n'

        input_file = InputFilesContent(
            (FileContent(path=file_name, content=file_contents, is_executable=True),)
        )
        digest = self.request_single_product(Digest, input_file)

        req = ExecuteProcessRequest(
            argv=("./echo.sh",), input_files=digest, description="cat the contents of this file",
        )

        result = self.request_single_product(ExecuteProcessResult, req)
        self.assertEqual(result.stdout, b"Hello\n")
예제 #30
0
    def test_file_in_directory_creation(self):
        path = "somedir/filename"
        content = b"file contents"

        input_file = InputFilesContent((FileContent(path=path, content=content),))
        digest = self.request_single_product(Digest, input_file)

        req = ExecuteProcessRequest(
            argv=("/bin/cat", "somedir/filename"),
            input_files=digest,
            description="Cat a file in a directory to make sure that doesn't break",
        )

        result = self.request_single_product(ExecuteProcessResult, req)
        self.assertEqual(result.stdout, content)