Exemplo n.º 1
0
 def _retrieve_single_product_at_target_base(self, product_mapping, target):
   self.context.log.debug("product_mapping: {}".format(product_mapping))
   self.context.log.debug("target: {}".format(target))
   product = product_mapping.get(target)
   single_base_dir = assert_single_element(product.keys())
   single_product = assert_single_element(product[single_base_dir])
   return single_product
Exemplo n.º 2
0
  def test_assert_single_element(self):
    single_element = [1]
    self.assertEqual(1, assert_single_element(single_element))

    no_elements = []
    with self.assertRaises(StopIteration):
      assert_single_element(no_elements)

    too_many_elements = [1, 2]
    with self.assertRaises(ValueError) as cm:
      assert_single_element(too_many_elements)
    expected_msg = "iterable [1, 2] has more than one element."
    self.assertEqual(expected_msg, str(cm.exception))
Exemplo n.º 3
0
    def _assert_unpacking(self, module_name):
        # TODO: figure out how to generate a nice fake wheel that the pex resolve will accept instead of
        # depending on a real wheel!
        pex_requirement = PythonRequirement("pex==1.5.3")
        unpacked_wheel_tgt = self._make_unpacked_wheel(
            pex_requirement,
            include_patterns=["pex/pex.py", "pex/__init__.py"],
            module_name=module_name,
            # TODO: `within_data_subdir` is only tested implicitly by the tensorflow_custom_op target
            # in examples/! Make a fake wheel, resolve it, and test that `within_data_subdir`
            # descends into the correct directory!
            within_data_subdir=None,
        )
        context = self.context(target_roots=[unpacked_wheel_tgt])
        unpack_task = self.create_task(context)
        unpack_task.execute()

        expected_files = {"pex/__init__.py", "pex/pex.py"}

        with unpack_task.invalidated([unpacked_wheel_tgt
                                      ]) as invalidation_check:
            vt = assert_single_element(invalidation_check.all_vts)
            self.assertEqual(vt.target, unpacked_wheel_tgt)
            archives = context.products.get_data(UnpackedArchives,
                                                 dict)[vt.target]
            self.assertEqual(expected_files, set(archives.found_files))
Exemplo n.º 4
0
 def request_single_product(
   self, product_type: Type["TestBase._P"], subject: Union[Params, Any]
 ) -> "TestBase._P":
   result = assert_single_element(
     self.scheduler.product_request(product_type, [subject])
   )
   return cast(TestBase._P, result)
  def test_zipkin_reporter_multi_threads(self):
    ZipkinHandler = zipkin_handler()
    with http_server(ZipkinHandler) as port:
      endpoint = "http://localhost:{}".format(port)
      command = [
        '-ldebug',
        '--reporting-zipkin-endpoint={}'.format(endpoint),
        'compile',
        'examples/src/scala/org/pantsbuild/example/several_scala_targets::'
      ]

      pants_run = self.run_pants(command)
      self.assert_success(pants_run)

      child_processes = self.find_child_processes_that_send_spans(pants_run.stderr_data)
      self.assertTrue(child_processes)

      self.wait_spans_to_be_sent(child_processes)

      trace = assert_single_element(ZipkinHandler.traces.values())

      rsc_task_span = self.find_spans_by_name_and_service_name(trace, 'rsc', 'pants/task')
      self.assertEqual(len(rsc_task_span), 1)
      rsc_task_span_id = rsc_task_span[0]['id']

      compile_workunit_spans = self.find_spans_by_name_and_service_name(
        trace, 'compile', 'pants/workunit'
      )
      self.assertEqual(len(compile_workunit_spans), 3)
      self.assertTrue(all(span['parentId'] == rsc_task_span_id for span in compile_workunit_spans))
  def test_zipkin_reports_for_pure_v2_goals(self):
    ZipkinHandler = zipkin_handler()
    with http_server(ZipkinHandler) as port:
      endpoint = "http://localhost:{}".format(port)
      command = [
        '-ldebug',
        '--no-v1',
        '--v2',
        '--reporting-zipkin-endpoint={}'.format(endpoint),
        '--reporting-zipkin-trace-v2',
        'list',
        '3rdparty:'
      ]

      pants_run = self.run_pants(command)
      self.assert_success(pants_run)

      child_processes = self.find_child_processes_that_send_spans(pants_run.stderr_data)
      self.assertTrue(child_processes)

      self.wait_spans_to_be_sent(child_processes)

      trace = assert_single_element(ZipkinHandler.traces.values())

      v2_span_name_part = "Scandir"
      self.assertTrue(any(v2_span_name_part in span['name'] for span in trace),
        "There is no span that contains '{}' in it's name. The trace:{}".format(
        v2_span_name_part, trace
        ))
  def test_zipkin_reporter(self):
    ZipkinHandler = zipkin_handler()
    with http_server(ZipkinHandler) as port:
      endpoint = "http://localhost:{}".format(port)
      command = [
        '-ldebug',
        '--reporting-zipkin-endpoint={}'.format(endpoint),
        'cloc',
        'examples/src/java/org/pantsbuild/example/hello/simple',
      ]

      pants_run = self.run_pants(command)
      self.assert_success(pants_run)

      child_processes = self.find_child_processes_that_send_spans(pants_run.stderr_data)
      self.assertTrue(child_processes)

      self.wait_spans_to_be_sent(child_processes)

      trace = assert_single_element(ZipkinHandler.traces.values())

      main_span = self.find_spans_by_name(trace, 'main')
      self.assertEqual(len(main_span), 1)

      parent_id = main_span[0]['id']
      main_children = self.find_spans_by_parentId(trace, parent_id)
      self.assertTrue(main_children)
      self.assertTrue(any(span['name'] == 'cloc' for span in main_children))
Exemplo n.º 8
0
 def assert_deprecation_warning():
     if deprecation_expected:
         warning = assert_single_element(seen_warnings)
         assert warning.category == DeprecationWarning
         return warning.message
     else:
         assert len(seen_warnings) == 0
Exemplo n.º 9
0
    def test_zipkin_reporter_multi_threads(self):
        ZipkinHandler = zipkin_handler()
        with http_server(ZipkinHandler) as port:
            endpoint = f"http://localhost:{port}"
            command = [
                "-ldebug",
                f"--reporting-zipkin-endpoint={endpoint}",
                "compile",
                "examples/src/scala/org/pantsbuild/example/several_scala_targets::",
            ]

            pants_run = self.run_pants(command)
            self.assert_success(pants_run)

            child_processes = self.find_child_processes_that_send_spans(
                pants_run.stderr_data)
            self.assertTrue(child_processes)

            self.wait_spans_to_be_sent(child_processes)

            trace = assert_single_element(ZipkinHandler.traces.values())

            rsc_task_span = self.find_spans_by_name_and_service_name(
                trace, "rsc", "pants/task")
            self.assertEqual(len(rsc_task_span), 1)
            rsc_task_span_id = rsc_task_span[0]["id"]

            compile_workunit_spans = self.find_spans_by_name_and_service_name(
                trace, "compile", "pants/workunit")
            self.assertEqual(len(compile_workunit_spans), 4)
            self.assertTrue(
                all(span["parentId"] == rsc_task_span_id
                    for span in compile_workunit_spans))
Exemplo n.º 10
0
    def test_zipkin_reports_for_pure_v2_goals(self):
        ZipkinHandler = zipkin_handler()
        with http_server(ZipkinHandler) as port:
            endpoint = f"http://localhost:{port}"
            command = [
                "-ldebug",
                "--no-v1",
                "--v2",
                f"--reporting-zipkin-endpoint={endpoint}",
                "--reporting-zipkin-trace-v2",
                "list",
                "3rdparty:",
            ]

            pants_run = self.run_pants(command)
            self.assert_success(pants_run)

            child_processes = self.find_child_processes_that_send_spans(
                pants_run.stderr_data)
            self.assertTrue(child_processes)

            self.wait_spans_to_be_sent(child_processes)

            trace = assert_single_element(ZipkinHandler.traces.values())

            v2_span_name_part = "snapshot"
            self.assertTrue(
                any(v2_span_name_part in span["name"] for span in trace),
                "There is no span that contains '{}' in it's name. The trace:{}"
                .format(v2_span_name_part, trace),
            )
Exemplo n.º 11
0
 def assert_deprecation_warning():
   if deprecation_expected:
     warning = assert_single_element(seen_warnings)
     self.assertEqual(warning.category, DeprecationWarning)
     return warning.message
   else:
     self.assertEqual(0, len(seen_warnings))
Exemplo n.º 12
0
    def wrapper(func):
        if not inspect.isfunction(func):
            raise ValueError(
                "The @rule decorator must be applied innermost of all decorators."
            )

        owning_module = sys.modules[func.__module__]
        source = inspect.getsource(func) or "<string>"
        source_file = inspect.getsourcefile(func)
        beginning_indent = _get_starting_indent(source)
        if beginning_indent:
            source = "\n".join(line[beginning_indent:]
                               for line in source.split("\n"))
        module_ast = ast.parse(source)

        def resolve_type(name):
            resolved = getattr(owning_module, name,
                               None) or owning_module.__builtins__.get(
                                   name, None)
            if resolved is None:
                raise ValueError(
                    f"Could not resolve type `{name}` in top level of module "
                    f"{owning_module.__name__} defined in {source_file}")
            elif not isinstance(resolved, type):
                raise ValueError(
                    f"Expected a `type` constructor for `{name}`, but got: {resolved} (type "
                    f"`{type(resolved).__name__}`) in {source_file}")
            return resolved

        rule_func_node = assert_single_element(
            node for node in ast.iter_child_nodes(module_ast)
            if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef))
            and node.name == func.__name__)

        parents_table = {}
        for parent in ast.walk(rule_func_node):
            for child in ast.iter_child_nodes(parent):
                parents_table[child] = parent

        rule_visitor = _RuleVisitor(source_file_name=source_file,
                                    resolve_type=resolve_type)
        rule_visitor.visit(rule_func_node)

        gets = FrozenOrderedSet(rule_visitor.gets)

        # Set our own custom `__line_number__` dunder so that the engine may visualize the line number.
        func.__line_number__ = func.__code__.co_firstlineno

        func.rule = TaskRule(
            return_type,
            parameter_types,
            func,
            input_gets=gets,
            canonical_name=canonical_name,
            desc=desc,
            level=level,
            cacheable=cacheable,
        )

        return func
Exemplo n.º 13
0
    def test_zipkin_reporter_for_v2_engine(self):
        ZipkinHandler = zipkin_handler()
        with http_server(ZipkinHandler) as port:
            endpoint = f"http://localhost:{port}"
            command = [
                "-ldebug",
                f"--reporting-zipkin-endpoint={endpoint}",
                "--reporting-zipkin-trace-v2",
                "cloc",
                "examples/src/java/org/pantsbuild/example/hello/simple",
            ]

            pants_run = self.run_pants(command)
            self.assert_success(pants_run)

            child_processes = self.find_child_processes_that_send_spans(
                pants_run.stderr_data)
            self.assertTrue(child_processes)

            self.wait_spans_to_be_sent(child_processes)

            trace = assert_single_element(ZipkinHandler.traces.values())

            v2_span_name_part = "Snapshot"
            self.assertTrue(
                any(v2_span_name_part in span["name"] for span in trace),
                "There is no span that contains '{}' in it's name. The trace:{}"
                .format(v2_span_name_part, trace),
            )
Exemplo n.º 14
0
    def test_zipkin_reporter(self):
        ZipkinHandler = zipkin_handler()
        with http_server(ZipkinHandler) as port:
            endpoint = f"http://localhost:{port}"
            command = [
                "-ldebug",
                f"--reporting-zipkin-endpoint={endpoint}",
                "cloc",
                "examples/src/java/org/pantsbuild/example/hello/simple",
            ]

            pants_run = self.run_pants(command)
            self.assert_success(pants_run)

            child_processes = self.find_child_processes_that_send_spans(
                pants_run.stderr_data)
            self.assertTrue(child_processes)

            self.wait_spans_to_be_sent(child_processes)

            trace = assert_single_element(ZipkinHandler.traces.values())

            main_span = self.find_spans_by_name(trace, "main")
            self.assertEqual(len(main_span), 1)

            parent_id = main_span[0]["id"]
            main_children = self.find_spans_by_parentId(trace, parent_id)
            self.assertTrue(main_children)
            self.assertTrue(
                any(span["name"] == "cloc" for span in main_children))
Exemplo n.º 15
0
  def _do_test_products(self, intransitive):
    self.maxDiff = None
    with self.sample_jarfile('foo') as foo_jar:
      with self.sample_jarfile('bar') as bar_jar:
        foo_coords = M2Coordinate(org='com.example', name='foo', rev='0.0.1')
        bar_coords = M2Coordinate(org='com.example', name='bar', rev='0.0.7')
        unpacked_jar_tgt = self._make_unpacked_jar(
          foo_coords, include_patterns=['a/b/c/*.proto'], intransitive=intransitive)

        context = self.context(target_roots=[unpacked_jar_tgt])
        unpack_task = self.create_task(context)
        self._add_dummy_product(context, unpacked_jar_tgt, foo_jar, foo_coords)
        # We add jar_bar as a product against foo_tgt, to simulate it being an
        # externally-resolved dependency of jar_foo.
        self._add_dummy_product(context, unpacked_jar_tgt, bar_jar, bar_coords)
        unpack_task.execute()

        expected_files = {'a/b/c/foo.proto'}
        if not intransitive:
          expected_files.add('a/b/c/bar.proto')

        with unpack_task.invalidated([unpacked_jar_tgt]) as invalidation_check:
          vt = assert_single_element(invalidation_check.all_vts)
          self.assertEqual(vt.target, unpacked_jar_tgt)
          archives = context.products.get_data(UnpackedArchives, dict)[vt.target]
          self.assertEqual(expected_files, set(archives.found_files))
Exemplo n.º 16
0
    def create_pex_and_get_pex_info(self,
                                    requirements=None,
                                    entry_point=None,
                                    interpreter_constraints=None):
        def hashify_optional_collection(iterable):
            return tuple(sorted(iterable)) if iterable is not None else tuple()

        request = ResolveRequirementsRequest(
            output_filename="test.pex",
            requirements=hashify_optional_collection(requirements),
            interpreter_constraints=hashify_optional_collection(
                interpreter_constraints),
            entry_point=entry_point,
        )
        requirements_pex = assert_single_element(
            self.scheduler.product_request(ResolvedRequirementsPex, [
                Params(request, PythonSetup.global_instance(),
                       PythonNativeCode.global_instance())
            ]))
        with temporary_dir() as tmp_dir:
            self.scheduler.materialize_directories((DirectoryToMaterialize(
                path=tmp_dir,
                directory_digest=requirements_pex.directory_digest), ))
            with zipfile.ZipFile(os.path.join(tmp_dir, "test.pex"),
                                 "r") as pex:
                with pex.open("PEX-INFO", "r") as pex_info:
                    pex_info_content = pex_info.readline().decode()
        return json.loads(pex_info_content)
Exemplo n.º 17
0
  def wrapper(func):
    if not inspect.isfunction(func):
      raise ValueError('The @rule decorator must be applied innermost of all decorators.')

    owning_module = sys.modules[func.__module__]
    source = inspect.getsource(func)
    beginning_indent = _get_starting_indent(source)
    if beginning_indent:
      source = "\n".join(line[beginning_indent:] for line in source.split("\n"))
    module_ast = ast.parse(source)

    def resolve_type(name):
      resolved = getattr(owning_module, name, None) or owning_module.__builtins__.get(name, None)
      if resolved is None:
        raise ValueError('Could not resolve type `{}` in top level of module {}'
                         .format(name, owning_module.__name__))
      elif not isinstance(resolved, type):
        raise ValueError('Expected a `type` constructor for `{}`, but got: {} (type `{}`)'
                         .format(name, resolved, type(resolved).__name__))
      return resolved

    gets = OrderedSet()
    rule_func_node = assert_single_element(
      node for node in ast.iter_child_nodes(module_ast)
      if isinstance(node, ast.FunctionDef) and node.name == func.__name__)

    parents_table = {}
    for parent in ast.walk(rule_func_node):
      for child in ast.iter_child_nodes(parent):
        parents_table[child] = parent

    rule_visitor = _RuleVisitor(
      func=func,
      func_node=rule_func_node,
      func_source=source,
      orig_indent=beginning_indent,
      parents_table=parents_table,
    )
    rule_visitor.visit(rule_func_node)
    gets.update(
      Get.create_statically_for_rule_graph(resolve_type(p), resolve_type(s))
      for p, s in rule_visitor.gets)

    # Register dependencies for @console_rule/Goal.
    if is_goal_cls:
      dependency_rules = (optionable_rule(output_type.Options),)
    else:
      dependency_rules = None

    func.rule = TaskRule(
        output_type,
        tuple(input_selectors),
        func,
        input_gets=tuple(gets),
        dependency_rules=dependency_rules,
        cacheable=cacheable,
      )

    return func
Exemplo n.º 18
0
    def _collect_dist_jar(self, dist_dir):
        # We should only see a single file in the dist dir.
        dist_jar_glob = os.path.join(dist_dir, '*.jar')
        globbed_jars = glob.glob(dist_jar_glob)

        if globbed_jars:
            return assert_single_element(globbed_jars)
        else:
            return None
  def _add_artifacts(self, dist_target_dir, shared_libs_product, native_artifact_targets):
    all_shared_libs = []
    for tgt in native_artifact_targets:
      product_mapping = shared_libs_product.get(tgt)
      base_dir = assert_single_element(product_mapping.keys())
      shared_lib = assert_single_element(product_mapping[base_dir])
      all_shared_libs.append(shared_lib)

    for shared_lib in all_shared_libs:
      basename = os.path.basename(shared_lib.path)
      # NB: We convert everything to .so here so that the setup.py can just
      # declare .so to build for either platform.
      resolved_outname = re.sub(r'\..*\Z', '.so', basename)
      dest_path = os.path.join(dist_target_dir, resolved_outname)
      safe_mkdir_for(dest_path)
      shutil.copyfile(shared_lib.path, dest_path)

    return all_shared_libs
Exemplo n.º 20
0
  def _add_artifacts(self, dist_target_dir, shared_libs_product, native_artifact_targets):
    all_shared_libs = []
    for tgt in native_artifact_targets:
      product_mapping = shared_libs_product.get(tgt)
      base_dir = assert_single_element(product_mapping.keys())
      shared_lib = assert_single_element(product_mapping[base_dir])
      all_shared_libs.append(shared_lib)

    for shared_lib in all_shared_libs:
      basename = os.path.basename(shared_lib.path)
      # NB: We convert everything to .so here so that the setup.py can just
      # declare .so to build for either platform.
      resolved_outname = re.sub(r'\..*\Z', '.so', basename)
      dest_path = os.path.join(dist_target_dir, resolved_outname)
      safe_mkdir_for(dest_path)
      shutil.copyfile(shared_lib.path, dest_path)

    return all_shared_libs
Exemplo n.º 21
0
  def test_simple(self):
    self.make_target(':import_whls', PythonRequirementLibrary, requirements=[
      PythonRequirement('foo==123'),
    ])
    target = self.make_target(':foo', UnpackedWheels, libraries=[':import_whls'], module_name='foo')

    self.assertIsInstance(target, UnpackedWheels)
    dependency_specs = [spec for spec in target.compute_dependency_specs(payload=target.payload)]
    self.assertSequenceEqual([':import_whls'], dependency_specs)
    import_whl_dep = assert_single_element(target.all_imported_requirements)
    self.assertIsInstance(import_whl_dep, PythonRequirement)
Exemplo n.º 22
0
 def matching_addresses(
     self, address_families: Sequence["AddressFamily"]
 ) -> Sequence[Tuple[Address, TargetAdaptor]]:
     single_af = assert_single_element(address_families)
     addr_tgt_pairs = tuple(
         (addr, tgt)
         for addr, tgt in single_af.addresses_to_target_adaptors.items()
         if addr.target_name == self.name)
     # There will be at most one target with a given name in a single AddressFamily.
     assert len(addr_tgt_pairs) <= 1
     return addr_tgt_pairs
Exemplo n.º 23
0
  def test_simple(self):
    self.make_target(':import_whls', PythonRequirementLibrary, requirements=[
      PythonRequirement('foo==123'),
    ])
    target = self.make_target(':foo', UnpackedWheels, libraries=[':import_whls'], module_name='foo')

    self.assertIsInstance(target, UnpackedWheels)
    dependency_specs = [spec for spec in target.compute_dependency_specs(payload=target.payload)]
    self.assertSequenceEqual([':import_whls'], dependency_specs)
    import_whl_dep = assert_single_element(target.all_imported_requirements)
    self.assertIsInstance(import_whl_dep, PythonRequirement)
Exemplo n.º 24
0
 def run_black(
         self,
         source_files: List[FileContent],
         *,
         config: Optional[str] = None) -> Tuple[LintResult, FmtResult]:
     if config is not None:
         self.create_file(relpath="pyproject.toml", contents=config)
     input_snapshot = assert_single_element(
         self.scheduler.product_request(Snapshot,
                                        [InputFilesContent(source_files)]))
     target = FormattablePythonTarget(
         TargetAdaptor(
             sources=EagerFilesetWithSpec('test', {'globs': []},
                                          snapshot=input_snapshot),
             address=Address.parse("test:target"),
         ))
     black_subsystem = global_subsystem_instance(
         Black,
         options={
             Black.options_scope: {
                 "config": "pyproject.toml" if config else None
             }
         })
     black_setup = assert_single_element(
         self.scheduler.product_request(BlackSetup, [
             Params(
                 target,
                 black_subsystem,
                 PythonNativeCode.global_instance(),
                 PythonSetup.global_instance(),
                 SubprocessEnvironment.global_instance(),
             )
         ]))
     fmt_and_lint_params = Params(target, black_setup,
                                  PythonSetup.global_instance(),
                                  SubprocessEnvironment.global_instance())
     lint_result: LintResult = assert_single_element(
         self.scheduler.product_request(LintResult, [fmt_and_lint_params]))
     fmt_result: FmtResult = assert_single_element(
         self.scheduler.product_request(FmtResult, [fmt_and_lint_params]))
     return lint_result, fmt_result
Exemplo n.º 25
0
  def _assert_ctypes_binary_creation(self, toolchain_variant):
    with temporary_dir() as tmp_dir:
      pants_run = self.run_pants(command=['binary', self._binary_target], config={
        GLOBAL_SCOPE_CONFIG_SECTION: {
          'pants_distdir': tmp_dir,
        },
        'native-build-step': {
          'toolchain_variant': toolchain_variant,
        },
      })

      self.assert_success(pants_run)

      # Check that we have selected the appropriate compilers for our selected toolchain variant,
      # for both C and C++ compilation.
      # TODO(#6866): don't parse info logs for testing!
      for compiler_name in self._compiler_names_for_variant[toolchain_variant]:
        self.assertIn("selected compiler exe name: '{}'".format(compiler_name),
                      pants_run.stdout_data)

      for linker_name in self._linker_names_for_variant[toolchain_variant]:
        self.assertIn("selected linker exe name: '{}'".format(linker_name),
                      pants_run.stdout_data)

      # Check for the pex and for the wheel produced for our python_dist().
      pex = os.path.join(tmp_dir, 'bin.pex')
      self.assertTrue(is_executable(pex))

      # The + is because we append the target's fingerprint to the version. We test this version
      # string in test_build_local_python_distributions.py.
      wheel_glob = os.path.join(tmp_dir, 'ctypes_test-0.0.1+*.whl')
      wheel_dist_with_path = assert_single_element(glob.glob(wheel_glob))
      wheel_dist = re.sub('^{}{}'.format(re.escape(tmp_dir), os.path.sep), '', wheel_dist_with_path)

      dist_name, dist_version, wheel_platform = name_and_platform(wheel_dist)
      self.assertEqual(dist_name, 'ctypes_test')
      contains_current_platform = Platform.create().resolve_platform_specific({
        'darwin': lambda: wheel_platform.startswith('macosx'),
        'linux': lambda: wheel_platform.startswith('linux'),
      })
      self.assertTrue(contains_current_platform)

      # Verify that the wheel contains our shared libraries.
      wheel_files = ZipFile(wheel_dist_with_path).namelist()

      dist_versioned_name = '{}-{}.data'.format(dist_name, dist_version)
      for shared_lib_filename in ['libasdf-c_ctypes.so', 'libasdf-cpp_ctypes.so']:
        full_path_in_wheel = os.path.join(dist_versioned_name, 'data', shared_lib_filename)
        self.assertIn(full_path_in_wheel, wheel_files)

      # Execute the binary and ensure its output is correct.
      binary_run_output = invoke_pex_for_output(pex)
      self.assertEqual(b'x=3, f(x)=17\n', binary_run_output)
Exemplo n.º 26
0
 def test_backup_logging_on_fatal_error(self):
     sink = self._gen_sink_subclass()
     with self.captured_logging(level=logging.ERROR) as captured:
         with temporary_dir() as tmpdir:
             exc_log_path = os.path.join(tmpdir, 'logs', 'exceptions.log')
             touch(exc_log_path)
             # Make the exception log file unreadable.
             os.chmod(exc_log_path, 0)
             sink.set_destination(tmpdir)
             sink.log_exception('XXX')
     single_error_logged = str(assert_single_element(captured.errors()))
     expected_rx_str = (re.escape(
         "pants.base.exception_sink: Problem logging original exception: [Errno 13] Permission denied: '"
     ) + '.*' + re.escape("/logs/exceptions.log'"))
     self.assertRegexpMatches(single_error_logged, expected_rx_str)
  def _assert_dist_and_wheel_identity(self, expected_name, expected_version, expected_platform,
                                      dist_target, **kwargs):
    context, synthetic_target, fingerprint_suffix = self._create_distribution_synthetic_target(
      dist_target, **kwargs)
    resulting_dist_req = assert_single_element(synthetic_target.requirements.value)
    expected_snapshot_version = '{}+{}'.format(expected_version, fingerprint_suffix)
    self.assertEquals(
      '{}=={}'.format(expected_name, expected_snapshot_version),
      str(resulting_dist_req.requirement))

    local_wheel_products = context.products.get('local_wheels')
    local_wheel = self.retrieve_single_product_at_target_base(local_wheel_products, dist_target)
    dist, version, platform = name_and_platform(local_wheel)
    self.assertEquals(dist, expected_name)
    self.assertEquals(version, expected_snapshot_version)
    self.assertEquals(platform, expected_platform)
 def test_pants_binary(self):
   with temporary_dir() as tmp_dir:
     pex = os.path.join(tmp_dir, 'main.pex')
     command=[
       '--pants-distdir={}'.format(tmp_dir), 'binary', '{}:main'.format(self.fasthello_project)]
     pants_run = self.run_pants(command=command)
     self.assert_success(pants_run)
     # Check that the pex was built.
     self.assertTrue(os.path.isfile(pex))
     # Check that the pex runs.
     output = subprocess.check_output(pex).decode('utf-8')
     self._assert_native_greeting(output)
     # Check that we have exactly one wheel output.
     single_wheel_output = assert_single_element(glob.glob(os.path.join(tmp_dir, '*.whl')))
     self.assertRegexpMatches(os.path.basename(single_wheel_output),
                              r'\A{}'.format(re.escape('fasthello-1.0.0+')))
Exemplo n.º 29
0
    def test_has_all_imported_req_libs(self):
        def assert_dep(reqA, reqB):
            self.assertEqual(reqA.requirement, reqB.requirement)

        def sort_requirements(reqs):
            return list(sorted(reqs, key=lambda r: str(r.requirement)))

        self.add_to_build_file(
            "BUILD",
            dedent("""
                python_requirement_library(name='lib1',
                  requirements=[
                    python_requirement('testName1==123'),
                  ],
                )
                python_requirement_library(name='lib2',
                  requirements=[
                    python_requirement('testName2==456'),
                    python_requirement('testName3==789'),
                  ],
                )
                unpacked_whls(name='unpacked-lib',
                  libraries=[':lib1', ':lib2'],
                  module_name='foo',
                )
                """),
        )
        lib1 = self.target("//:lib1")
        self.assertIsInstance(lib1, PythonRequirementLibrary)
        assert_dep(assert_single_element(lib1.requirements),
                   PythonRequirement("testName1==123"))

        lib2 = self.target("//:lib2")
        self.assertIsInstance(lib2, PythonRequirementLibrary)
        lib2_reqs = sort_requirements(lib2.requirements)
        self.assertEqual(2, len(lib2_reqs))
        assert_dep(lib2_reqs[0], PythonRequirement("testName2==456"))
        assert_dep(lib2_reqs[1], PythonRequirement("testName3==789"))

        unpacked_lib = self.target("//:unpacked-lib")
        unpacked_req_libs = sort_requirements(
            unpacked_lib.all_imported_requirements)

        self.assertEqual(3, len(unpacked_req_libs))
        assert_dep(unpacked_req_libs[0], PythonRequirement("testName1==123"))
        assert_dep(unpacked_req_libs[1], PythonRequirement("testName2==456"))
        assert_dep(unpacked_req_libs[2], PythonRequirement("testName3==789"))
Exemplo n.º 30
0
  def address_target_pairs_from_address_families(self, address_families):
    """Return the pair for the single target matching the single AddressFamily, or error.

    :raises: :class:`SingleAddress._SingleAddressResolutionError` if no targets could be found for a
             :class:`SingleAddress` instance.
    :return: list of (Address, Target) pairs with exactly one element.
    """
    single_af = assert_single_element(address_families)
    addr_tgt_pairs = [
      (addr, tgt) for addr, tgt in single_af.addressables.items()
      if addr.target_name == self.name
    ]
    if len(addr_tgt_pairs) == 0:
      raise self._SingleAddressResolutionError(single_af, self.name)
    # There will be at most one target with a given name in a single AddressFamily.
    assert(len(addr_tgt_pairs) == 1)
    return addr_tgt_pairs
Exemplo n.º 31
0
  def test_select_argv(self):
    """Test invoking binary_util.py as a standalone script."""
    with temporary_dir() as tmp_dir:
      config_file_loc = os.path.join(tmp_dir, 'pants.ini')
      safe_file_dump(config_file_loc, payload="""\
[GLOBAL]
allow_external_binary_tool_downloads: True
pants_bootstrapdir: {}
""".format(tmp_dir))
      expected_output_glob = os.path.join(
        tmp_dir, 'bin', 'cmake', '*', '*', '3.9.5', 'cmake')
      with environment_as(PANTS_CONFIG_FILES='[{!r}]'.format(config_file_loc)):
        # Ignore the first argument, as per sys.argv.
        output_file = select(['_', 'cmake', '3.9.5', 'cmake.tar.gz'])
      self.assertTrue(is_readable_dir(output_file))
      realized_glob = assert_single_element(glob.glob(expected_output_glob))
      self.assertEqual(os.path.realpath(output_file), os.path.realpath(realized_glob))
Exemplo n.º 32
0
    def _test_zinc_reports_diagnostic_counts(self, reporting):
        with self.temporary_workdir() as workdir:
            target = "testprojects/src/scala/org/pantsbuild/testproject/compilation_warnings/unused_import_warning:unused_import"
            with self.temporary_cachedir() as cachedir:
                args = ['--compile-rsc-args=+["-S-Ywarn-unused:_"]'
                        ] + (["--compile-rsc-report-diagnostic-counts"]
                             if reporting else [])
                pants_run = self.run_test_compile(
                    workdir,
                    cachedir,
                    target,
                    extra_args=args,
                )
                self.assert_success(pants_run)

            expected_strings = [
                f"Reporting number of diagnostics for: {target}",
                "Error: 0",
                "Warning: 1",
                "Information: 0",
                "Hint: 0",
            ]

            for expected in expected_strings:
                if reporting:
                    self.assertIn(expected, pants_run.stdout_data)
                else:
                    self.assertNotIn(expected, pants_run.stdout_data)
            run_info_path = os.path.join(workdir, "run-tracker", "latest",
                                         "info")
            with open(run_info_path, "r") as run_info:

                def is_target_data_line(line):
                    return line.startswith("target_data: ")

                target_data_line = assert_single_element(
                    filter(is_target_data_line, run_info))
                expected_target_data = (
                    "'diagnostic_counts': {'Error': 0, 'Warning': 1, 'Information': 0, 'Hint': 0}"
                )
                if reporting:
                    self.assertIn(expected_target_data, target_data_line)
                else:
                    self.assertNotIn(expected_target_data, target_data_line)
Exemplo n.º 33
0
  def test_has_all_imported_req_libs(self):
    def assert_dep(reqA, reqB):
      self.assertEqual(reqA.requirement, reqB.requirement)

    def sort_requirements(reqs):
      return list(sorted(reqs, key=lambda r: str(r.requirement)))

    self.add_to_build_file('BUILD', dedent('''
    python_requirement_library(name='lib1',
      requirements=[
        python_requirement('testName1==123'),
      ],
    )
    python_requirement_library(name='lib2',
      requirements=[
        python_requirement('testName2==456'),
        python_requirement('testName3==789'),
      ],
    )
    unpacked_whls(name='unpacked-lib',
      libraries=[':lib1', ':lib2'],
      module_name='foo',
    )
    '''))
    lib1 = self.target('//:lib1')
    self.assertIsInstance(lib1, PythonRequirementLibrary)
    assert_dep(assert_single_element(lib1.requirements),
               PythonRequirement('testName1==123'))

    lib2 = self.target('//:lib2')
    self.assertIsInstance(lib2, PythonRequirementLibrary)
    lib2_reqs = sort_requirements(lib2.requirements)
    self.assertEqual(2, len(lib2_reqs))
    assert_dep(lib2_reqs[0], PythonRequirement('testName2==456'))
    assert_dep(lib2_reqs[1], PythonRequirement('testName3==789'))

    unpacked_lib = self.target('//:unpacked-lib')
    unpacked_req_libs = sort_requirements(unpacked_lib.all_imported_requirements)

    self.assertEqual(3, len(unpacked_req_libs))
    assert_dep(unpacked_req_libs[0], PythonRequirement('testName1==123'))
    assert_dep(unpacked_req_libs[1], PythonRequirement('testName2==456'))
    assert_dep(unpacked_req_libs[2], PythonRequirement('testName3==789'))
Exemplo n.º 34
0
    def test_generate_ipex_tensorflow(self):
        with temporary_dir() as tmp_distdir:
            with self.pants_results([
                    f"--pants-distdir={tmp_distdir}",
                    # tensorflow==1.14.0 has a setuptools>=41.0.0 requirement, so the .ipex resolve fails
                    # without this override.
                    f"--pex-builder-wrapper-setuptools-version=41.0.0",
                    "--binary-py-generate-ipex",
                    "binary",
                    "examples/src/python/example/tensorflow_custom_op:show-tf-version",
            ]) as pants_run:
                self.assert_success(pants_run)
                output_ipex = assert_single_element(
                    glob.glob(os.path.join(tmp_distdir, "*")))
                ipex_basename = os.path.basename(output_ipex)
                self.assertEqual(ipex_basename, "show-tf-version.ipex")

                pex_execution_output = subprocess.check_output([output_ipex])
                assert "tf version: 1.14.0" in pex_execution_output.decode()
 def test_pydist_binary(self):
   with temporary_dir() as tmp_dir:
     pex = os.path.join(tmp_dir, 'main_with_no_conflict.pex')
     command = [
       '--pants-distdir={}'.format(tmp_dir),
       'binary',
       '{}:main_with_no_conflict'.format(self.hello_install_requires_dir),
     ]
     pants_run = self.run_pants(command=command)
     self.assert_success(pants_run)
     # Check that the pex was built.
     self.assertTrue(os.path.isfile(pex))
     # Check that the pex runs.
     output = subprocess.check_output(pex).decode('utf-8')
     self._assert_nation_and_greeting(output)
     # Check that we have exactly one wheel output.
     single_wheel_output = assert_single_element(glob.glob(os.path.join(tmp_dir, '*.whl')))
     assertRegex(self, os.path.basename(single_wheel_output),
                 r'\A{}'.format(re.escape('hello_with_install_requires-1.0.0+')))
  def _get_dist_snapshot_version(self, task, python_dist_target):
    """Get the target's fingerprint, and guess the resulting version string of the built dist.

    Local python_dist() builds are tagged with the versioned target's fingerprint using the
    --tag-build option in the egg_info command. This fingerprint string is slightly modified by
    distutils to ensure a valid version string, and this method finds what that modified version
    string is so we can verify that the produced local dist is being tagged with the correct
    snapshot version.

    The argument we pass to that option begins with a +, which is unchanged. See
    https://www.python.org/dev/peps/pep-0440/ for further information.
    """
    with task.invalidated([python_dist_target], invalidate_dependents=True) as invalidation_check:
      versioned_dist_target = assert_single_element(invalidation_check.all_vts)

    versioned_target_fingerprint = versioned_dist_target.cache_key.hash

    # This performs the normalization that distutils performs to the version string passed to the
    # --tag-build option.
    return re.sub(r'[^a-zA-Z0-9]', '.', versioned_target_fingerprint.lower())
Exemplo n.º 37
0
  def assert_single_path_by_glob(self, components):
    """Assert that the path components (which are joined into a glob) match exactly one path.

    The matched path may be a file or a directory. This method is used to avoid having to guess
    platform-specific intermediate directory names, e.g. 'x86_64-linux-gnu' or
    'x86_64-apple-darwin17.5.0'.
    """
    glob_path_string = os.path.join(*components)
    expanded_glob = glob.glob(glob_path_string)

    try:
      return assert_single_element(expanded_glob)
    except StopIteration as e:
      raise self.ArchiveFileMappingError(
        "No elements for glob '{}' -- expected exactly one."
        .format(glob_path_string),
        e)
    except ValueError as e:
      raise self.ArchiveFileMappingError(
        "Should have exactly one path matching expansion of glob '{}'."
        .format(glob_path_string),
        e)
Exemplo n.º 38
0
  def extract_single_dist_for_current_platform(self, reqs, dist_key):
    """Resolve a specific distribution from a set of requirements matching the current platform.

    :param list reqs: A list of :class:`PythonRequirement` to resolve.
    :param str dist_key: The value of `distribution.key` to match for a `distribution` from the
                         resolved requirements.
    :return: The single :class:`pkg_resources.Distribution` matching `dist_key`.
    :raises: :class:`self.SingleDistExtractionError` if no dists or multiple dists matched the given
             `dist_key`.
    """
    distributions = self._resolve_distributions_by_platform(reqs, platforms=['current'])
    try:
      matched_dist = assert_single_element(list(
        dist
        for _, dists in distributions.items()
        for dist in dists
        if dist.key == dist_key
      ))
    except (StopIteration, ValueError) as e:
      raise self.SingleDistExtractionError(
        "Exactly one dist was expected to match name {} in requirements {}: {}"
        .format(dist_key, reqs, e))
    return matched_dist
Exemplo n.º 39
0
  def _assert_unpacking(self, module_name):
    # TODO: figure out how to generate a nice fake wheel that the pex resolve will accept instead of
    # depending on a real wheel!
    pex_requirement = PythonRequirement('pex==1.5.3')
    unpacked_wheel_tgt = self._make_unpacked_wheel(
      pex_requirement,
      include_patterns=['pex/pex.py', 'pex/__init__.py'],
      module_name=module_name,
      # TODO: `within_data_subdir` is only tested implicitly by the tensorflow_custom_op target
      # in examples/! Make a fake wheel, resolve it, and test that `within_data_subdir`
      # descends into the correct directory!
      within_data_subdir=None)
    context = self.context(target_roots=[unpacked_wheel_tgt])
    unpack_task = self.create_task(context)
    unpack_task.execute()

    expected_files = {'pex/__init__.py', 'pex/pex.py'}

    with unpack_task.invalidated([unpacked_wheel_tgt]) as invalidation_check:
      vt = assert_single_element(invalidation_check.all_vts)
      self.assertEqual(vt.target, unpacked_wheel_tgt)
      archives = context.products.get_data(UnpackedArchives, dict)[vt.target]
      self.assertEqual(expected_files, set(archives.found_files))
Exemplo n.º 40
0
  def test_strip_prefix(self):
    # Set up files:

    relevant_files = (
      'characters/dark_tower/roland',
      'characters/dark_tower/susannah',
    )
    all_files = (
      'books/dark_tower/gunslinger',
      'characters/altered_carbon/kovacs',
    ) + relevant_files + (
      'index',
    )

    with temporary_dir() as temp_dir:
      safe_file_dump(os.path.join(temp_dir, 'index'), 'books\ncharacters\n')
      safe_file_dump(
        os.path.join(temp_dir, "characters", "altered_carbon", "kovacs"),
        "Envoy",
        makedirs=True,
      )

      tower_dir = os.path.join(temp_dir, "characters", "dark_tower")
      safe_file_dump(os.path.join(tower_dir, "roland"), "European Burmese", makedirs=True)
      safe_file_dump(os.path.join(tower_dir, "susannah"), "Not sure actually", makedirs=True)

      safe_file_dump(
        os.path.join(temp_dir, "books", "dark_tower", "gunslinger"),
        "1982",
        makedirs=True,
      )

      snapshot, snapshot_with_extra_files = self.scheduler.capture_snapshots((
        PathGlobsAndRoot(PathGlobs(("characters/dark_tower/*",)), text_type(temp_dir)),
        PathGlobsAndRoot(PathGlobs(("**",)), text_type(temp_dir)),
      ))
      # Check that we got the full snapshots that we expect
      self.assertEquals(snapshot.files, relevant_files)
      self.assertEquals(snapshot_with_extra_files.files, all_files)

      # Strip empty prefix:
      zero_prefix_stripped_digest = assert_single_element(self.scheduler.product_request(
        Digest,
        [DirectoryWithPrefixToStrip(snapshot.directory_digest, text_type(""))],
      ))
      self.assertEquals(snapshot.directory_digest, zero_prefix_stripped_digest)

      # Strip a non-empty prefix shared by all files:
      stripped_digest = assert_single_element(self.scheduler.product_request(
        Digest,
        [DirectoryWithPrefixToStrip(snapshot.directory_digest, text_type("characters/dark_tower"))],
      ))
      self.assertEquals(
        stripped_digest,
        Digest(
          fingerprint='71e788fc25783c424db555477071f5e476d942fc958a5d06ffc1ed223f779a8c',
          serialized_bytes_length=162,
        )
      )
      expected_snapshot = assert_single_element(self.scheduler.capture_snapshots((
        PathGlobsAndRoot(PathGlobs(("*",)), text_type(tower_dir)),
      )))
      self.assertEquals(expected_snapshot.files, ('roland', 'susannah'))
      self.assertEquals(stripped_digest, expected_snapshot.directory_digest)

      # Try to strip a prefix which isn't shared by all files:
      with self.assertRaisesWithMessageContaining(Exception, "Cannot strip prefix characters/dark_tower from root directory Digest(Fingerprint<28c47f77867f0c8d577d2ada2f06b03fc8e5ef2d780e8942713b26c5e3f434b8>, 243) - root directory contained non-matching directory named: books and file named: index"):
        self.scheduler.product_request(
          Digest,
          [DirectoryWithPrefixToStrip(snapshot_with_extra_files.directory_digest, text_type("characters/dark_tower"))]
        )
Exemplo n.º 41
0
  def test_ctypes_binary_creation(self, toolchain_variant):
    """Create a python_binary() with all native toolchain variants, and test the result."""
    with temporary_dir() as tmp_dir:
      pants_run = self.run_pants(command=['binary', self._binary_target], config={
        GLOBAL_SCOPE_CONFIG_SECTION: {
          'pants_distdir': tmp_dir,
        },
        'native-build-step': {
          'toolchain_variant': toolchain_variant.value,
        },
      })

      self.assert_success(pants_run)

      # Check that we have selected the appropriate compilers for our selected toolchain variant,
      # for both C and C++ compilation.
      # TODO(#6866): don't parse info logs for testing! There is a TODO in test_cpp_compile.py
      # in the native backend testing to traverse the PATH to find the selected compiler.
      compiler_names_to_check = toolchain_variant.resolve_for_enum_variant({
        'gnu': ['gcc', 'g++'],
        'llvm': ['clang', 'clang++'],
      })
      for compiler_name in compiler_names_to_check:
        self.assertIn("selected compiler exe name: '{}'".format(compiler_name),
                      pants_run.stdout_data)

      # All of our toolchains currently use the C++ compiler's filename as argv[0] for the linker,
      # so there is only one name to check.
      linker_names_to_check = toolchain_variant.resolve_for_enum_variant({
        'gnu': ['g++'],
        'llvm': ['clang++'],
      })
      for linker_name in linker_names_to_check:
        self.assertIn("selected linker exe name: '{}'".format(linker_name),
                      pants_run.stdout_data)

      # Check for the pex and for the wheel produced for our python_dist().
      pex = os.path.join(tmp_dir, 'bin.pex')
      self.assertTrue(is_executable(pex))

      # The + is because we append the target's fingerprint to the version. We test this version
      # string in test_build_local_python_distributions.py.
      wheel_glob = os.path.join(tmp_dir, 'ctypes_test-0.0.1+*.whl')
      wheel_dist_with_path = assert_single_element(glob.glob(wheel_glob))
      wheel_dist = re.sub('^{}{}'.format(re.escape(tmp_dir), os.path.sep), '', wheel_dist_with_path)

      dist_name, dist_version, wheel_platform = name_and_platform(wheel_dist)
      self.assertEqual(dist_name, 'ctypes_test')
      contains_current_platform = Platform.current.resolve_for_enum_variant({
        'darwin': wheel_platform.startswith('macosx'),
        'linux': wheel_platform.startswith('linux'),
      })
      self.assertTrue(contains_current_platform)

      # Verify that the wheel contains our shared libraries.
      wheel_files = ZipFile(wheel_dist_with_path).namelist()

      dist_versioned_name = '{}-{}.data'.format(dist_name, dist_version)
      for shared_lib_filename in ['libasdf-c_ctypes.so', 'libasdf-cpp_ctypes.so']:
        full_path_in_wheel = os.path.join(dist_versioned_name, 'data', shared_lib_filename)
        self.assertIn(full_path_in_wheel, wheel_files)

      # Execute the binary and ensure its output is correct.
      binary_run_output = invoke_pex_for_output(pex)
      self.assertEqual(b'x=3, f(x)=17\n', binary_run_output)
Exemplo n.º 42
0
 def test_lift_directory_digest_to_snapshot(self):
   digest = self.prime_store_with_roland_digest()
   snapshot = assert_single_element(self.scheduler.product_request(Snapshot, [digest]))
   self.assertEquals(snapshot.files, ("roland",))
   self.assertEquals(snapshot.directory_digest, digest)
Exemplo n.º 43
0
 def assertWarning(self, w, category, warning_text):
   single_warning = assert_single_element(w)
   self.assertEqual(single_warning.category, category)
   warning_message = single_warning.message
   self.assertEqual(warning_text, text_type(warning_message))
Exemplo n.º 44
0
 def retrieve_single_product_at_target_base(self, product_mapping, target):
   mapping_for_target = product_mapping.get(target)
   single_base_dir = assert_single_element(list(mapping_for_target.keys()))
   single_product = assert_single_element(mapping_for_target[single_base_dir])
   return single_product