class Test_Security:
    """
    This Test Class contains security checks on the configuration data based on our Security Policy.
    """
    @staticmethod
    @pytest.mark.parametrize(
        "config_path, pool_members",
        iterutils.research(tenants, lambda p, k, v: k == "serverAddresses"),
    )
    def test_pool_must_use_private_addresses(config_path, pool_members):
        """
        The Security Policy requires pool members use private addresses / no public IPs
        """
        print(f"currently processing: {config_path}, members: ", end="")
        for member in pool_members:
            print(f"member:{member} ", end="")
            assert IPv4Address(
                member).is_private  # member IP address must be private

    @staticmethod
    @pytest.mark.parametrize(
        "config_path, servicePort",
        iterutils.research(tenants, lambda p, k, v: k == "servicePort"),
    )
    def test_pool_allow_specific_ports(config_path,
                                       servicePort,
                                       allowed_ports=(80, 81, 8000, 8080)):
        """
        Pool members must use a port in the allowed ports list
        """
        print(
            f"currently processing: {config_path}, servicePort: {servicePort}",
            end=" ")
        assert isinstance(servicePort, int)  # servicePort must be an integer
        assert (servicePort in allowed_ports
                )  # servicePort must be listed in allowed_ports

    @staticmethod
    @pytest.mark.parametrize(
        "config_path, virtualPort",
        iterutils.research(tenants, lambda p, k, v: k == "virtualPort"),
    )
    def test_disallowed_service_ports(
        config_path,
        virtualPort,
        disallowed_ports=(
            0,  # no wildcard listeners
            6667,  # disallowed port
        )):
        """
        Services must not listen on the disallowed_ports
        """
        print(
            f"currently processing: {config_path}, virtualPort: {virtualPort}",
            end=" ")
        if virtualPort is not None:
            assert isinstance(virtualPort,
                              int)  # virtualPort must be an integer
            assert (virtualPort not in disallowed_ports
                    )  # virtualPort must not be in disallowed_ports
Example #2
0
def get_config_t(conf: Dict) -> Type:
    """
    First create the transformation dictionary
    Then read the config dictionary and create the config type
    """
    transname_dict = dict(
        research(conf, query=lambda p, k, v: k == _type_spec[9])
    )  # construct dictionary mapping paths ('k1', ..., 'kn') to transformation name

    # remove last key -> 'transformation' and lookup the transformation with correct name
    trans_dict = {
        _path_to_glom_spec(k[:-1]): lookup_trans(v)
        for k, v in transname_dict.items()
    }

    paths = _nodes(conf)
    leaves = _leaves(paths)

    # create a copy of the dictionary, and recursively update the leaf nodes
    _conf = reduce(_update_inplace(_str_to_spec), leaves, deepcopy(conf))

    # walk up the tree, and process the "new" leaf nodes.  using a set takes
    # care of duplicates.
    branches: Set[_Path_t] = _leaves(paths - leaves)
    while branches:
        _conf = reduce(_update_inplace(_nested_type), branches, _conf)
        branches = {path[:-1] for path in branches if path[:-1]}
    config_t = _spec_to_type("config", _conf, bases=(_ConfigIO, ))

    config_t.trans_dict = trans_dict

    return config_t
Example #3
0
def make_paths_relative(doc: Dict,
                        base_directory: PurePath,
                        allow_paths_outside_base=False):
    """
    Find all pathlib.Path values in a document structure and make them relative to the given path.

    >>> from copy import deepcopy
    >>> base = PurePath('/tmp/basket')
    >>> doc = {'id': 1, 'fruits': [{'apple': PurePath('/tmp/basket/fruits/apple.txt')}]}
    >>> make_paths_relative(doc, base)
    >>> doc
    {'id': 1, 'fruits': [{'apple': 'fruits/apple.txt'}]}
    >>> # No change if repeated. (relative paths still relative)
    >>> previous = deepcopy(doc)
    >>> make_paths_relative(doc, base)
    >>> doc == previous
    True
    >>> # Relative pathlibs also become relative strings for consistency.
    >>> doc = {'villains': PurePath('the-baron.txt')}
    >>> make_paths_relative(doc, base)
    >>> doc
    {'villains': 'the-baron.txt'}
    """
    for doc_path, value in iterutils.research(
            doc, lambda p, k, v: isinstance(v, PurePath)):
        value: PurePath
        value = relative_path(
            value,
            base_directory,
            allow_paths_outside_base=allow_paths_outside_base)
        docpath_set(doc, doc_path, value.as_posix())
Example #4
0
def permute_nested_values(dicts: "List[dict]", gen_val: "Callable[[int], Any]"):
    """
    This function permutes the values of a nested mapping, for testing that out merge
    method work regardless of the values types.

    Assumes the intial dictionary had integers for values.
    """
    dicts = deepcopy(dicts)
    initial_values = [
        x[1] for x in research(dicts, query=lambda p, k, v: isinstance(v, int))
    ]
    mapping = {k: gen_val(k) for k in initial_values}
    return [remap(d, exit=partial(map_values, mapping)) for d in dicts]
Example #5
0
def _nodes(conf: Dict) -> Set[_Path_t]:
    """Filter the list of paths for nodes

    Parameters
    ----------
    conf : Dict
        Config dictionary

    Returns
    -------
    Set[_Path_t]
        List of paths to nodes

    """
    return {path for path, _ in research(conf, query=_is_node)}
Example #6
0
def validate_item(item: Dict):
    validate_document(item,
                      _ITEM_SCHEMA,
                      schema_folder=_ITEM_SCHEMA_PATH.parent)

    # Should be a valid polygon
    assert "geometry" in item, "Item has no geometry"
    assert item["geometry"], "Item has blank geometry"
    with DebugContext(f"Failing shape:\n{pformat(item['geometry'])}"):
        shape = shapely_shape(item["geometry"])
        assert shape.is_valid, f"Item has invalid geometry: {explain_validity(shape)}"
        assert shape.geom_type in (
            "Polygon",
            "MultiPolygon",
        ), "Unexpected type of shape"

    # href should never be blank if present
    # -> The jsonschema enforces href as required, but it's not checking for emptiness.
    #    (and we've had empty ones in previous prototypes)
    for offset, value in research(item, lambda p, k, v: k == "href"):
        viewable_offset = "→".join(map(repr, offset))
        assert value.strip(), f"href has empty value: {repr(viewable_offset)}"
Example #7
0
def make_paths_relative(
    doc: Dict, base_directory: PurePath, allow_paths_outside_base=False
):
    """
    Find all pathlib.Path values in a document structure and make them relative to the given path.

    >>> base = PurePath('/tmp/basket')
    >>> doc = {'id': 1, 'fruits': [{'apple': PurePath('/tmp/basket/fruits/apple.txt')}]}
    >>> make_paths_relative(doc, base)
    >>> doc
    {'id': 1, 'fruits': [{'apple': 'fruits/apple.txt'}]}
    >>> # No change if repeated. (relative paths still relative)
    >>> previous = deepcopy(doc)
    >>> make_paths_relative(doc, base)
    >>> doc == previous
    True
    >>> # Relative pathlibs also become relative strings for consistency.
    >>> doc = {'villains': PurePath('the-baron.txt')}
    >>> make_paths_relative(doc, base)
    >>> doc
    {'villains': 'the-baron.txt'}
    """
    for doc_path, value in iterutils.research(
        doc, lambda p, k, v: isinstance(v, PurePath)
    ):
        value: Path

        if value.is_absolute():
            if base_directory not in value.parents:
                if not allow_paths_outside_base:
                    raise ValueError(
                        f"Path {value.as_posix()!r} is outside path {base_directory.as_posix()!r} "
                        f"(allow_paths_outside_base={allow_paths_outside_base})"
                    )
                continue
            value = value.relative_to(base_directory)

        docpath_set(doc, doc_path, str(value))
class Test_RequiredPractices:
    """
    These tests make sure the configuration follows required practices.
    Required practices usually ensure the configuration follows good/best practices
    to ensure they function as well as possible
    """
    @staticmethod
    @pytest.mark.parametrize("config_path, _",
                             iterutils.research(tenants,
                                                lambda p, k, v: v == "Pool"))
    def test_pools_have_monitors(config_path, _):
        """
        Require monitor configuration for each service
        """
        config_path = config_path[
            0:
            -1]  # remove 'class' from config_path, which is always the last element
        monitors = iterutils.get_path(tenants, config_path).get("monitors")

        print(f"currently processing: {config_path}, monitors: {monitors}",
              end=" ")
        assert monitors  # not empty and not None
        for monitor in monitors:
            assert monitor  # make we don't have an empty entry in the list of monitors either!
Example #9
0
def test_research():
    root = {}

    with pytest.raises(TypeError):
        research(root, query=None)

    root = {'a': 'a'}
    res = research(root, query=lambda p, k, v: v == 'a')
    assert len(res) == 1
    assert res[0] == (('a', ), 'a')

    def broken_query(p, k, v):
        raise RuntimeError()

    with pytest.raises(RuntimeError):
        research(root, broken_query, reraise=True)

    # empty results with default, reraise=False
    assert research(root, broken_query) == []
Example #10
0
def test_research():
    root = {}

    with pytest.raises(TypeError):
        research(root, query=None)

    root = {'a': 'a'}
    res = research(root, query=lambda p, k, v: v == 'a')
    assert len(res) == 1
    assert res[0] == (('a',), 'a')

    def broken_query(p, k, v):
        raise RuntimeError()

    with pytest.raises(RuntimeError):
        research(root, broken_query, reraise=True)

    # empty results with default, reraise=False
    assert research(root, broken_query) == []
Example #11
0
def serve(
    known,
    nginx_command,
    parsed_config,
    parsed_config_str,
    parsed_config_http,
    parsed_config_http_str,
):
    if not os.path.isdir(known.temp_dir):
        os.mkdir(known.temp_dir)
    logger.debug("temp_dir:\t{!r}".format(known.temp_dir))
    _config_files = "nginx.conf", "mime.types"
    nginx_conf_join = partial(
        os.path.join,
        os.path.join(
            os.path.dirname(
                os.path.join(
                    resource_filename(PythonPackageInfo().get_app_name(),
                                      "__init__.py"))),
            "_config",
        ),
    )
    config_files = tuple(map(nginx_conf_join, _config_files))
    deque(map(partial(copy, dst=known.temp_dir), config_files), maxlen=0)
    sites_available = os.path.join(known.temp_dir, "sites-available")
    if not os.path.isdir(sites_available):
        os.mkdir(sites_available)
    server_conf = os.path.join(sites_available, "server.conf")
    if parsed_config is not None:
        pp(parsed_config)
        pp(
            research(
                parsed_config,
                query=lambda p, k, v: is_directive(v) and print(
                    "k:", k, ";\nv:", v, ";"),
            ))
    # pp(research(parsed_config, query=lambda p, k, v: is_directive(v)))
    with open(server_conf, "wt") as f:
        if parsed_config_http_str is not None:
            f.write(parsed_config_http_str)
        if parsed_config_str is not None:
            f.write(parsed_config_str)
    # Include this config in the new nginx.conf
    nginx_conf = os.path.join(known.temp_dir, _config_files[0])
    nginx_conf_parsed = crossplane.parse(nginx_conf,
                                         catch_errors=False,
                                         comments=False)
    nginx_conf_parse = next(
        config for config in nginx_conf_parsed["config"]
        if os.path.basename(config["file"]) == "nginx.conf")

    line = count(nginx_conf_parse["parsed"][-1]["block"][-1]["line"])
    del nginx_conf_parse["parsed"][-1]["block"][-1]
    nginx_conf_parse["parsed"].insert(1, {
        "args": ["off"],
        "directive": "daemon"
    })
    nginx_conf_parse["parsed"][-1]["block"] += [
        {
            "args": ["stderr", "warn"],
            "directive": "error_log",
            "line": next(line)
        },
        {
            "args": ["/dev/stdout"],
            "directive": "access_log",
            "line": next(line)
        },
        {
            "args": [os.path.join(sites_available, "*.conf")],
            "directive": "include",
            "includes": [2],
            "line": next(line),
        },
    ]
    config_str = crossplane.build(nginx_conf_parse["parsed"])
    os.remove(nginx_conf)
    with open(nginx_conf, "wt") as f:
        f.write(config_str + os.linesep)
    # logger.error
    print("nginx is running. Stop with: {}".format(" ".join(
        (known.nginx, "-c", nginx_conf, "-s", "stop"))))
    Popen([known.nginx, "-c", nginx_conf] + nginx_command)