def test_version_map(path, schema_tags): assert VALID_FILENAME_RE.match( path.name ) is not None, f"{path.name} is an invalid version map filename" assert_yaml_header_and_footer(path) vm = load_yaml(path) assert set(vm.keys()) == {"FILE_FORMAT", "YAML_VERSION", "tags"} assert vm["FILE_FORMAT"] in VALID_FILE_FORMAT_VERSIONS assert vm["YAML_VERSION"] in VALID_YAML_VERSIONS for tag_base, tag_version in vm["tags"].items(): tag = f"{tag_base}-{tag_version}" assert tag in schema_tags, f"{path.name} specifies missing tag {tag}" assert len(vm["tags"].keys()) == len(set( vm["tags"].keys())), f"{path.name} contains duplicate tags" sorted_tags = sorted(list(vm["tags"].keys())) if sorted_tags != list(vm["tags"].keys()): sorted_list = "\n".join( [f"""{tag}: {vm["tags"][tag]}""" for tag in sorted_tags]) message = f"{path.name} tag list is not sorted. Try this order instead:\n{sorted_list}" assert False, message
def _assert_latest_schema_correct(path): __tracebackhide__ = True schema = load_yaml(path) if is_deprecated(schema["id"]): return refs = [ r.split("#")[0] for r in list_refs(schema) if not r.startswith("#") and not r == METASCHEMA_ID ] for ref in refs: ref_id = ref_to_id(schema["id"], ref) assert ref_id in latest_schema_ids, ( f"{path.name} is the latest version of a schema, " f"but references {ref}, which is not latest") for example_id in list_example_ids(schema): assert example_id in latest_schema_ids, ( f"{path.name} is the latest version of a schema, " f"but its examples include {example_id}, which is not latest") for description_id in list_description_ids(schema): if len(description_id.rsplit("-", 1)) > 1: assert description_id in latest_schema_ids, ( f"{path.name} is the latest version of a schema, " f"but its description includes a ref to {description_id}, " "which is not latest")
def test_version_map_tags_retained(path, previous_path): """ Confirm that non-deprecated tags were not lost between successive version maps. """ vm = load_yaml(path) prev_vm = load_yaml(previous_path) expected_tags = set(prev_vm["tags"].keys()) - DEPRECATED_TAG_BASES tags = set(vm["tags"].keys()) if not expected_tags.issubset(tags): missing_tags = expected_tags - tags assert False, ( f"{path.name} is missing schemas that were present in the " "previous version. If this was intentional, update the deprecation " "list in tests/common.py, otherwise add the following missing schemas: " f"""{", ".join(missing_tags)}""")
def _assert_schema_correct(path): __tracebackhide__ = True assert VALID_SCHEMA_FILENAME_RE.match( path.name ) is not None, f"{path.name} is an invalid schema filename" assert_yaml_header_and_footer(path) schema = load_yaml(path) assert "$schema" in schema, f"{path.name} is missing $schema key" assert schema[ "$schema"] == METASCHEMA_ID, f"{path.name} has wrong $schema value (expected {METASCHEMA_ID})" expected_id = path_to_id(path) expected_tag = path_to_tag(path) assert "id" in schema, f"{path.name} is missing id key (expected {expected_id})" assert schema[ "id"] == expected_id, f"{path.name} id doesn't match filename (expected {expected_id})" if "tag" in schema: assert schema[ "tag"] == expected_tag, f"{path.name} tag doesn't match filename (expected {expected_tag})" assert "title" in schema, f"{path.name} is missing title key" assert len(schema["title"].strip() ) > 0, f"{path.name} title must have content" assert "description" in schema, f"{path.name} is missing description key" assert len(schema["description"].strip() ) > 0, f"{path.name} description must have content" assert len(id_to_schema[ schema["id"]]) == 1, f"{path.name} does not have a unique id" if "tag" in schema: assert len(tag_to_schema[ schema["tag"]]) == 1, f"{path.name} does not have a unique tag" id_base, _ = split_id(schema["id"]) for example_id in list_example_ids(schema): example_id_base, _ = split_id(example_id) if example_id_base == id_base and example_id != schema["id"]: assert False, f"{path.name} contains an example with an outdated tag" for description_id in list_description_ids(schema): if len(description_id.rsplit("-", 1)) > 1: description_id_base, _ = split_id(description_id) if description_id_base == id_base and description_id != schema[ "id"]: assert False, f"{path.name} descriptioon contains an outdated ref"
def test_transform(path, assert_schema_correct): assert_schema_correct(path) if path.name != "domain-1.0.0.yaml": schema = load_yaml(path) message = f"{path.name} must include a base or other transform schema" if "allOf" in schema: assert any("$ref" in c and c["$ref"] in REFS for c in schema["allOf"]), message elif "$ref" in schema: assert schema["$ref"] in REFS, message else: assert False, message
def test_latest_version_map(latest_schema_tags): """ The current latest version map has some special requirements. """ vm = load_yaml(LATEST_PATH) tag_base_to_version = dict( [tag.rsplit("-", 1) for tag in latest_schema_tags]) expected_tag_bases = set( [t for t in tag_base_to_version.keys() if not is_deprecated(t)]) vm_tag_bases = set(vm["tags"].keys()) if not expected_tag_bases.issubset(vm_tag_bases): missing_tag_bases = expected_tag_bases - vm_tag_bases insert_list = "\n".join( sorted(f"""{tag_base}-{tag_base_to_version[tag_base]}""" for tag_base in missing_tag_bases)) message = ( f"{LATEST_PATH.name} must include the latest version of " "every non-deprecated schema with a tag. Update the deprecation " "list in tests/common.py, or add the following missing schemas: \n" f"{insert_list}") assert False, message incorrect_tag_bases = sorted([ tag for tag in expected_tag_bases if vm["tags"][tag] != tag_base_to_version[tag] ]) if len(incorrect_tag_bases) > 0: update_list = "\n".join([ f"""{tag}: {vm["tags"][tag]} --> {tag_base_to_version[tag]}""" for tag in incorrect_tag_bases ]) message = ( f"{LATEST_PATH.name} must include the latest version of " "every non-deprecated schema with a tag. Update the following: \n" f"{update_list}") assert False, message
def task(task): dry_run = task.is_dry_run() napalm = task.host.get_connection("napalm", task.nornir.config) intf_types = { "et": "Ethernet", "lo": "Loopback", "ma": "Management", } leaf_roles = { "compute", "service", "storage", } root = Path(__file__).parent templates = Path(f"{root}/templates") # fmt: off evpn_af_evpn_template = load_template(LOAD_METHOD, f"{templates}/jinja_evpn_af_evpn.j2") evpn_af_ipv4_template = load_template(LOAD_METHOD, f"{templates}/jinja_evpn_af_ipv4.j2") evpn_bgp_template = load_template(LOAD_METHOD, f"{templates}/jinja_evpn_bgp.j2") evpn_bgp_overlay_neighbor_template = load_template(LOAD_METHOD, f"{templates}/jinja_evpn_bgp_overlay_neighbor.j2") evpn_bgp_underlay_neighbor_template = load_template(LOAD_METHOD, f"{templates}/jinja_evpn_bgp_underlay_neighbor.j2") interface_template = load_template(LOAD_METHOD, f"{templates}/jinja_interface.j2") mlag_template = load_template(LOAD_METHOD, f"{templates}/jinja_mlag.j2") mlag_ibgp_template = load_template(LOAD_METHOD, f"{templates}/jinja_mlag_ibgp.j2") # fmt: on switches = load_yaml(LOAD_METHOD, f"{root}/switches.yaml") leaves = [] spines = [] for sw in switches: sw_name = sw.get("name") if sw["role"] in leaf_roles: leaves.append(sw_name) elif sw["role"] == "spine": spines.append(sw_name) leaves.sort() spines.sort() name = str(task.host) sw = next(sw for sw in switches if sw["name"] == name) intfs_routed = [] for intf in sw["interfaces"]: intf_name = list(intf.keys())[0] intf_type, _ = get_interface(intf_name) intf_ip = intf.get(intf_name, {}).get("ip") if intf_type == "et" and intf_ip: intfs_routed.append(intf) peer_ips = [] for intf in intfs_routed: intf_name = list(intf.keys())[0] intf_ip = intf.get(intf_name, {}).get("ip") peer_ip = get_peer_ip(intf_ip) peer_ips.append(peer_ip) config = [] config.extend(["hostname {name}".format(name=sw["name"])]) for intf in sw.get("interfaces"): intf_name = list(intf.keys())[0] intf_pattern = re.compile(r"([a-zA-Z]+)(\d+(?:\/\d+)?)") intf_type, intf_number = get_interface(intf_name) intf_ip = intf.get(intf_name, {}).get("ip") intf_description = intf.get(intf_name, {}).get("desc") configlet = interface_template.render( description=intf_description, ip=intf_ip, number=intf_number, type_=intf_types[intf_type], ) config.extend(configlet.split("\n")) if "mlag" in sw: mlag = sw.get("mlag", {}) ip = mlag.get("ip") interfaces = [] for intf in mlag.get("interfaces", []): intf_type, intf_number = get_interface(intf) interfaces.append( "{type_}{number}".format( number=intf_number, type_=intf_types[intf_type], ) ) configlet = mlag_template.render( domain=sw.get("bgp-as"), interfaces=interfaces, ip=ip, peer_ip=get_peer_ip(ip), port_channel=mlag.get("port-channel"), vlan=mlag.get("vlan"), ) config.extend(configlet.split("\n")) if "bgp-as" in sw: router_id = next( intf.get("lo0", {}).get("ip") for intf in sw.get("interfaces", []) if "lo0" in intf ).replace("/32", "") configlet = evpn_bgp_template.render( bgp_as=sw.get("bgp-as"), router_id=router_id, ) config.extend(configlet.split("\n")) if sw.get("role") in leaf_roles: peers = spines elif sw.get("role") == "spine": peers = leaves for peer in peers: pr = next(sw for sw in switches if sw.get("name") == peer) peer_ip = next( intf.get("lo0", {}).get("ip") for intf in pr.get("interfaces", []) if "lo0" in intf ).replace("/32", "") peer_as = pr.get("bgp-as") configlet = evpn_bgp_overlay_neighbor_template.render( neighbor_as=peer_as, neighbor_ip=peer_ip, neighbor_name="{peer} lo0".format(peer=peer), ) config.extend(configlet.split("\n")) for intf in pr.get("interfaces", []): intf_name = list(intf.keys())[0] intf_ip = intf.get(intf_name, {}).get("ip").split("/")[0] if intf_ip in peer_ips: configlet = evpn_bgp_underlay_neighbor_template.render( neighbor_as=peer_as, neighbor_ip=intf_ip, neighbor_name="{peer} {intf}".format( peer=peer, intf=intf_name, ), ) config.extend(configlet.split("\n")) if "mlag" in sw: configlet = mlag_ibgp_template.render( bgp_as=sw.get("bgp-as"), mlag_neighbor=get_peer_ip(sw.get("mlag", {}).get("ip")), ) config.extend(configlet.split("\n")) configlet = evpn_af_evpn_template.render() config.extend(configlet.split("\n")) networks = [] for intf in sw["interfaces"]: intf_name = list(intf.keys())[0] if intf_name.startswith("lo"): networks.append(intf.get(intf_name, {}).get("ip")) configlet = evpn_af_ipv4_template.render(networks=networks,) config.extend(configlet.split("\n")) vxlan_template = load_template(LOAD_METHOD, f"{templates}/jinja_vxlan.j2") vlans = load_yaml(LOAD_METHOD, f"{root}/vlans.yaml") vrfs = load_yaml(LOAD_METHOD, f"{root}/vrfs.yaml") for vlan in vlans: vlan["network"] = IPNetwork(vlan.get("gateway")).cidr bgp_as = sw.get("bgp-as") sw_intfs = sw.get("interfaces", []) lo0_ip = get_interface_ip("lo0", sw_intfs) lo100_ip = get_interface_ip("lo100", sw_intfs) router_id = lo0_ip vxlan_src = "100" if lo100_ip else "0" configlet = vxlan_template.render( bgp_as=bgp_as, router_id=router_id, vlans=vlans, vxlan_src=vxlan_src, vrfs=vrfs ) config.extend(configlet.split("\n")) configuration = "\n".join(config) filename = None replace = False if replace: napalm.load_replace_candidate(filename=filename, config=configuration) else: napalm.load_merge_candidate(filename=filename, config=configuration) diff = napalm.compare_config() if not dry_run and diff: napalm.commit_config() else: napalm.discard_config() return Result(host=task.host, diff=diff, changed=len(diff) > 0)
def test_asdf_schema(path): assert_yaml_header_and_footer(path) # Asserting no exceptions here load_yaml(path)
def latest_schemas(): return [load_yaml(p) for p in list_latest_schema_paths(SCHEMAS_PATH)]