def __init__(self, logger: logging.Logger, yaml_file: str, enable_cache=True) -> None: self.test_dir = os.path.join( os.path.dirname( os.environ.get('PYTEST_CURRENT_TEST').split("::")[0]), "test_cache_data") self.cache: Optional[Cache] = None if enable_cache: self.cache = Cache(logger) # This is a brutal hack: we load all the YAML, store it as objects, then # build IR and econf from the re-serialized YAML from these resources. # The reason is that it's kind of the only way we can apply deltas in # a meaningful way. self.resources: Dict[str, Any] = {} # Load the initial YAML. self.apply_yaml(yaml_file, allow_updates=False) self.secret_handler = NullSecretHandler(logger, "/tmp/secrets/src", "/tmp/secrets/cache", "0") # Save builds to make this simpler to call. self.builds: List[Tuple[IR, EnvoyConfig]] = []
def test_hr_good_1(): yaml = """ --- apiVersion: x.getambassador.io/v3alpha1 kind: AmbassadorMapping metadata: name: mapping-1 namespace: default spec: hostname: "*" prefix: / service: svc1 --- apiVersion: x.getambassador.io/v3alpha1 kind: AmbassadorMapping metadata: name: mapping-2 namespace: default spec: hostname: "*" prefix: / service: svc2 """ cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) logger.info("R1 IR: %s", r1["ir"].as_json()) require_no_errors(r1["ir"]) require_no_errors(r2["ir"])
def test_invalid_forward_client_cert_details(): yaml = """ --- apiVersion: getambassador.io/v2 kind: Module metadata: name: ambassador namespace: default spec: config: forward_client_cert_details: SANITIZE_INVALID """ cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) require_errors(r1["ir"], [( "ambassador.default.1", "'forward_client_cert_details' may not be set to 'SANITIZE_INVALID'; it may only be set to one of: SANITIZE, FORWARD_ONLY, APPEND_FORWARD, SANITIZE_SET, ALWAYS_FORWARD_ONLY" )]) require_errors(r2["ir"], [( "ambassador.default.1", "'forward_client_cert_details' may not be set to 'SANITIZE_INVALID'; it may only be set to one of: SANITIZE, FORWARD_ONLY, APPEND_FORWARD, SANITIZE_SET, ALWAYS_FORWARD_ONLY" )])
def test_invalid_set_current_client_cert_details_value(): yaml = """ --- apiVersion: getambassador.io/v2 kind: Module metadata: name: ambassador namespace: default spec: config: set_current_client_cert_details: subject: invalid """ cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) require_errors(r1["ir"], [( "ambassador.default.1", "'set_current_client_cert_details' value for key 'subject' may only be 'true' or 'false', not 'invalid'" )]) require_errors(r2["ir"], [( "ambassador.default.1", "'set_current_client_cert_details' value for key 'subject' may only be 'true' or 'false', not 'invalid'" )])
def test_invalid_set_current_client_cert_details_key(): yaml = """ --- apiVersion: getambassador.io/v2 kind: Module metadata: name: ambassador namespace: default spec: config: set_current_client_cert_details: invalid: true """ cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) logger.info("R1 IR: %s", r1["ir"].as_json()) require_errors(r1["ir"], [( "ambassador.default.1", "'set_current_client_cert_details' may not contain key 'invalid'; it may only contain keys: subject, cert, chain, dns, uri" )]) require_errors(r2["ir"], [( "ambassador.default.1", "'set_current_client_cert_details' may not contain key 'invalid'; it may only contain keys: subject, cert, chain, dns, uri" )])
def test_hr_error_4(): yaml = """ --- apiVersion: getambassador.io/v2 kind: Mapping metadata: name: mapping-1 namespace: default spec: prefix: /svc1 service: svc1 host_redirect: true path_redirect: /path/ prefix_redirect: /prefix/ --- apiVersion: getambassador.io/v2 kind: Mapping metadata: name: mapping-2 namespace: default spec: prefix: /svc2 service: svc2 host_redirect: true path_redirect: /path/ regex_redirect: pattern: /regex/ substitution: /substitution/ --- apiVersion: getambassador.io/v2 kind: Mapping metadata: name: mapping-3 namespace: default spec: prefix: /svc3 service: svc3 host_redirect: true prefix_redirect: /prefix/ regex_redirect: pattern: /regex/ substitution: /substitution/ """ cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) for r in [r1, r2]: require_errors(r["ir"], [ ("mapping-1.default.1", "Cannot specify both path_redirect and prefix_redirect. Using path_redirect and ignoring prefix_redirect." ), ("mapping-2.default.1", "Cannot specify both path_redirect and regex_redirect. Using path_redirect and ignoring regex_redirect." ), ("mapping-3.default.1", "Cannot specify both prefix_redirect and regex_redirect. Using prefix_redirect and ignoring regex_redirect." ) ])
def econf_compile(yaml, envoy_version="V2"): # Compile with and without a cache. Neither should produce errors. cache = Cache(logger) secret_handler = _secret_handler() r1 = Compile(logger, yaml, k8s=True, secret_handler=secret_handler, envoy_version=envoy_version) r2 = Compile(logger, yaml, k8s=True, secret_handler=secret_handler, cache=cache, envoy_version=envoy_version) _require_no_errors(r1["ir"]) _require_no_errors(r2["ir"]) # Both should produce equal Envoy config as sorted json. r1j = json.dumps(r1[envoy_version.lower()].as_dict(), sort_keys=True, indent=2) r2j = json.dumps(r2[envoy_version.lower()].as_dict(), sort_keys=True, indent=2) assert r1j == r2j # Now we can return the Envoy config as a dictionary return r1[envoy_version.lower()].as_dict()
def compile_with_cachecheck(yaml, envoy_version="V2", errors_ok=False): # Compile with and without a cache. Neither should produce errors. cache = Cache(logger) secret_handler = _secret_handler() r1 = Compile(logger, yaml, k8s=True, secret_handler=secret_handler, envoy_version=envoy_version) r2 = Compile(logger, yaml, k8s=True, secret_handler=secret_handler, cache=cache, envoy_version=envoy_version) if not errors_ok: _require_no_errors(r1["ir"]) _require_no_errors(r2["ir"]) # Both should produce equal Envoy config as sorted json. r1j = json.dumps(r1[envoy_version.lower()].as_dict(), sort_keys=True, indent=2) r2j = json.dumps(r2[envoy_version.lower()].as_dict(), sort_keys=True, indent=2) assert r1j == r2j # All good. return r1
def test_valid_forward_client_cert_details(): yaml = """ --- apiVersion: getambassador.io/v2 kind: Module metadata: name: ambassador namespace: default spec: config: forward_client_cert_details: SANITIZE_SET """ cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) require_no_errors(r1["ir"]) require_no_errors(r2["ir"])
def test_valid_set_current_client_cert_details(): yaml = """ --- apiVersion: getambassador.io/v2 kind: Module metadata: name: ambassador namespace: default spec: config: set_current_client_cert_details: subject: true dns: true """ cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) require_no_errors(r1["ir"]) require_no_errors(r2["ir"])
def test_hr_error_1(): yaml = """ --- apiVersion: x.getambassador.io/v3alpha1 kind: AmbassadorMapping metadata: name: mapping-1 namespace: default spec: hostname: "*" prefix: / service: svc1 host_redirect: true --- apiVersion: x.getambassador.io/v3alpha1 kind: AmbassadorMapping metadata: name: mapping-2 namespace: default spec: hostname: "*" prefix: / service: svc2 host_redirect: true """ cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) # XXX Why are these showing up tagged with "mapping-1.default.1" rather than "mapping-2.default.1"? require_errors( r1["ir"], [("mapping-1.default.1", "cannot accept mapping-2 as second host_redirect after mapping-1")]) require_errors( r2["ir"], [("mapping-1.default.1", "cannot accept mapping-2 as second host_redirect after mapping-1")])
def test_hr_error_2(): yaml = """ --- apiVersion: x.getambassador.io/v3alpha1 kind: AmbassadorMapping metadata: name: mapping-1 namespace: default spec: hostname: "*" prefix: / service: svc1 host_redirect: true --- apiVersion: x.getambassador.io/v3alpha1 kind: AmbassadorMapping metadata: name: mapping-2 namespace: default spec: hostname: "*" prefix: / service: svc2 """ cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) # XXX Why are these showing up as "-global-"? require_errors(r1["ir"], [( "-global-", "cannot accept mapping-2 without host_redirect after mapping-1 with host_redirect" )]) require_errors(r2["ir"], [( "-global-", "cannot accept mapping-2 without host_redirect after mapping-1 with host_redirect" )])
def test_hr_error_3(): yaml = """ --- apiVersion: getambassador.io/v2 kind: Mapping metadata: name: mapping-1 namespace: default spec: prefix: / service: svc1 --- apiVersion: getambassador.io/v2 kind: Mapping metadata: name: mapping-2 namespace: default spec: prefix: / service: svc2 host_redirect: true """ cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) # XXX Why are these showing up tagged with "mapping-1.default.1" rather than "mapping-2.default.1"? require_errors(r1["ir"], [( "mapping-1.default.1", "cannot accept mapping-2 with host_redirect after mappings without host_redirect (eg mapping-1)" )]) require_errors(r2["ir"], [( "mapping-1.default.1", "cannot accept mapping-2 with host_redirect after mappings without host_redirect (eg mapping-1)" )])
class Builder: def __init__(self, logger: logging.Logger, yaml_file: str, enable_cache=True) -> None: self.test_dir = os.path.join( os.path.dirname( os.environ.get('PYTEST_CURRENT_TEST').split("::")[0]), "test_cache_data") self.cache: Optional[Cache] = None if enable_cache: self.cache = Cache(logger) # This is a brutal hack: we load all the YAML, store it as objects, then # build IR and econf from the re-serialized YAML from these resources. # The reason is that it's kind of the only way we can apply deltas in # a meaningful way. self.resources: Dict[str, Any] = {} # Load the initial YAML. self.apply_yaml(yaml_file, allow_updates=False) self.secret_handler = NullSecretHandler(logger, "/tmp/secrets/src", "/tmp/secrets/cache", "0") # Save builds to make this simpler to call. self.builds: List[Tuple[IR, EnvoyConfig]] = [] def apply_yaml(self, yaml_file: str, allow_updates=True) -> None: yaml_data = open(os.path.join(self.test_dir, yaml_file), "r").read() for rsrc in yaml.safe_load_all(yaml_data): # We require kind, metadata.name, and metadata.namespace here. kind = rsrc['kind'] metadata = rsrc['metadata'] name = metadata['name'] namespace = metadata['namespace'] key = f"{kind}-v2-{name}-{namespace}" if key in self.resources: # This is an attempted update. if not allow_updates: raise RuntimeError(f"Cannot update {key}") if self.cache is not None: self.cache.invalidate(key) self.resources[key] = rsrc def delete_yaml(self, yaml_file: str) -> None: yaml_data = open(os.path.join(self.test_dir, yaml_file), "r").read() for rsrc in yaml.safe_load_all(yaml_data): # We require kind, metadata.name, and metadata.namespace here. kind = rsrc['kind'] metadata = rsrc['metadata'] name = metadata['name'] namespace = metadata['namespace'] key = f"{kind}-v2-{name}-{namespace}" if key in self.resources: del (self.resources[key]) if self.cache is not None: self.cache.invalidate(key) def build(self) -> Tuple[IR, EnvoyConfig]: # Do a build, return IR & econf, but also stash them in self.builds. yaml_data = yaml.safe_dump_all(self.resources.values()) aconf = Config() fetcher = ResourceFetcher(logger, aconf) fetcher.parse_yaml(yaml_data, k8s=True) aconf.load_all(fetcher.sorted()) ir = IR(aconf, cache=self.cache, file_checker=lambda path: True, secret_handler=self.secret_handler) assert ir, "could not create an IR" econf = EnvoyConfig.generate(ir, "V2", cache=self.cache) assert econf, "could not create an econf" self.builds.append((ir, econf)) return ir, econf def invalidate(self, key) -> None: assert self.cache[key] is not None, f"key {key} is not cached" self.cache.invalidate(key) def check(self, what: str, b1: Tuple[IR, EnvoyConfig], b2: Tuple[IR, EnvoyConfig], strip_cache_keys=False) -> None: for kind, idx in [("IR", 0), ("econf", 1)]: if strip_cache_keys and (idx == 0): x1 = self.strip_cache_keys(b1[idx].as_dict()) j1 = json.dumps(x1, sort_keys=True, indent=4) x2 = self.strip_cache_keys(b2[idx].as_dict()) j2 = json.dumps(x2, sort_keys=True, indent=4) else: j1 = b1[idx].as_json() j2 = b2[idx].as_json() match = (j1 == j2) output = "" if not match: l1 = j1.split("\n") l2 = j2.split("\n") n1 = f"{what} {kind} 1" n2 = f"{what} {kind} 2" output += "\n--------\n" for line in difflib.context_diff(l1, l2, fromfile=n1, tofile=n2): line = line.rstrip() output += line output += "\n" assert match, output def check_last(self, what: str) -> None: build_count = len(self.builds) b1 = self.builds[build_count - 2] b2 = self.builds[build_count - 1] self.check(what, b1, b2) def strip_cache_keys(self, node: Any) -> None: if isinstance(node, dict): output = {} for k, v in node.items(): if k == '_cache_key': continue output[k] = self.strip_cache_keys(v) return output elif isinstance(node, list): return [self.strip_cache_keys(x) for x in node] else: return node