def config_backup(job_result, data, backup_root_folder): """Nornir play to backup configurations.""" now = datetime.now() logger = NornirLogger(__name__, job_result, data.get("debug")) global_settings = GoldenConfigSettings.objects.get(id="aaaaaaaa-0000-0000-0000-000000000001") verify_global_settings(logger, global_settings, ["backup_path_template", "intended_path_template"]) nornir_obj = InitNornir( runner=NORNIR_SETTINGS.get("runner"), logging={"enabled": False}, inventory={ "plugin": "nautobot-inventory", "options": { "credentials_class": NORNIR_SETTINGS.get("credentials"), "params": NORNIR_SETTINGS.get("inventory_params"), "queryset": get_allowed_os(data), "defaults": {"now": now}, }, }, ) nr_with_processors = nornir_obj.with_processors([ProcessGoldenConfig(logger)]) nr_with_processors.run( task=run_backup, name="BACKUP CONFIG", logger=logger, global_settings=global_settings, backup_root_folder=backup_root_folder, ) logger.log_debug("Completed configuration from devices.")
def config_backup(job_result, data, backup_root_folder): """Nornir play to backup configurations.""" now = datetime.now() logger = NornirLogger(__name__, job_result, data.get("debug")) global_settings = GoldenConfigSetting.objects.first() verify_global_settings(logger, global_settings, ["backup_path_template"]) # Build a dictionary, with keys of platform.slug, and the regex line in it for the netutils func. remove_regex_dict = {} for regex in ConfigRemove.objects.all(): if not remove_regex_dict.get(regex.platform.slug): remove_regex_dict[regex.platform.slug] = [] remove_regex_dict[regex.platform.slug].append({"regex": regex.regex}) # Build a dictionary, with keys of platform.slug, and the regex and replace keys for the netutils func. replace_regex_dict = {} for regex in ConfigReplace.objects.all(): if not replace_regex_dict.get(regex.platform.slug): replace_regex_dict[regex.platform.slug] = [] replace_regex_dict[regex.platform.slug].append({ "replace": regex.replace, "regex": regex.regex }) nornir_obj = InitNornir( runner=NORNIR_SETTINGS.get("runner"), logging={"enabled": False}, inventory={ "plugin": "nautobot-inventory", "options": { "credentials_class": NORNIR_SETTINGS.get("credentials"), "params": NORNIR_SETTINGS.get("inventory_params"), "queryset": get_job_filter(data), "defaults": { "now": now }, }, }, ) nr_with_processors = nornir_obj.with_processors( [ProcessGoldenConfig(logger)]) nr_with_processors.run( task=run_backup, name="BACKUP CONFIG", logger=logger, global_settings=global_settings, remove_regex_dict=remove_regex_dict, replace_regex_dict=replace_regex_dict, backup_root_folder=backup_root_folder, ) logger.log_debug("Completed configuration from devices.")
def main(): """ Execution begins here. """ # Initialize Nornir, process CLI args, and extract the collector IP address nornir = InitNornir() args = process_args() mdt_rx = nornir.inventory.groups["devices"]["mdt"]["collector_ip_addr"] # Register the Grafana processor to create dashboards if receiver exists if mdt_rx: nornir = nornir.with_processors([ProcGrafanaDashboard(mdt_rx)]) # Initialize empty lists and iterate over entire inventory entry_list = [] schedule_list = [] for host, attr in nornir.inventory.hosts.items(): print(f"Building SLA entry for {host}") # Build the SLA entry list entry = build_sla.entry(attr, tag=host) entry_list.append(entry) # Build the SLA schedule list schedule = build_sla.schedule(attr) schedule_list.append(schedule) # Create an RPC payload which includes the entry list, # schedule list, and SLA responder enablement merge_sla = build_sla.wrapper( operation="merge", entry=entry_list, schedule=schedule_list, responder=None, ) print("Constructed common SLA config") # The MDT config is simpler and more static. Build that next mdt_inputs = nornir.inventory.groups["devices"].data["mdt"] replace_mdt = build_mdt.subscription(mdt_inputs) print("Constructed common MDT config") # Manage the IP SLA probes on each device using the common # merge_sla and replace_mdt dictionaries nornir.run( task=manage_probes, merge_sla=merge_sla, replace_mdt=replace_mdt, rebuild=args.rebuild, )
def main(): """ Execution starts here. """ # Initialize Nornir and register output file processors init_nornir = InitNornir() nornir = init_nornir.with_processors([ProcJSON(), ProcCSV()]) # Build the SLA filter string in xpath format sla_filter = ("xpath", "/ip-sla-stats") # Perform the data collection using NETCONF with new filter nornir.run(task=collect_sla_stats, sla_filter=sla_filter)
def config_intended(job_result, data, jinja_root_path, intended_root_folder): """Nornir play to generate configurations.""" now = datetime.now() logger = NornirLogger(__name__, job_result, data.get("debug")) global_settings = GoldenConfigSettings.objects.get( id="aaaaaaaa-0000-0000-0000-000000000001") verify_global_settings( logger, global_settings, ["jinja_path_template", "intended_path_template", "sot_agg_query"]) nornir_obj = InitNornir( runner=NORNIR_SETTINGS.get("runner"), logging={"enabled": False}, inventory={ "plugin": "nautobot-inventory", "options": { "credentials_class": NORNIR_SETTINGS.get("credentials"), "params": NORNIR_SETTINGS.get("inventory_params"), "queryset": get_allowed_os(data), "defaults": { "now": now }, }, }, ) nr_with_processors = nornir_obj.with_processors( [ProcessGoldenConfig(logger)]) # Run the Nornir Tasks nr_with_processors.run( task=run_template, name="RENDER CONFIG", logger=logger, global_settings=global_settings, job_result=job_result, jinja_root_path=jinja_root_path, intended_root_folder=intended_root_folder, )
def config_compliance(job_result, data, backup_root_path, intended_root_folder): """Nornir play to generate configurations.""" now = datetime.now() features = get_features() logger = NornirLogger(__name__, job_result, data.get("debug")) global_settings = GoldenConfigSetting.objects.first() verify_global_settings(logger, global_settings, ["backup_path_template", "intended_path_template"]) nornir_obj = InitNornir( runner=NORNIR_SETTINGS.get("runner"), logging={"enabled": False}, inventory={ "plugin": "nautobot-inventory", "options": { "credentials_class": NORNIR_SETTINGS.get("credentials"), "params": NORNIR_SETTINGS.get("inventory_params"), "queryset": get_job_filter(data), "defaults": { "now": now }, }, }, ) nr_with_processors = nornir_obj.with_processors( [ProcessGoldenConfig(logger)]) nr_with_processors.run( task=run_compliance, name="RENDER COMPLIANCE TASK GROUP", logger=logger, global_settings=global_settings, backup_root_path=backup_root_path, intended_root_folder=intended_root_folder, features=features, ) logger.log_debug("Completed Compliance for devices.")
def main(args): """ Execution begins here. """ # Initialize nornir using default configuration settings init_nornir = InitNornir() nornir = init_nornir.with_processors([ProcTerse(), ProcCSV(), ProcJSON()]) # Execute the "run_checks" task to get started, passing in CLI args aresult = nornir.run(task=run_checks, args=args) # Handle failed checks by printing them out and exiting with rc=1 failed = False for host, mresult in aresult.items(): if mresult[0].result: print(f"{host} error: at least one check is invalid") for chk in mresult[0].result: name = chk.get("id", "no_id") print(f"{host[:12]:<12} {name[:24]:<24} -> {chk['reason']}") failed = True if failed: sys.exit(1)
def main(): nr = InitNornir(config_file="config.yaml") nr = nr.filter(F(groups__contains="eos")) nr_with_processors = nr.with_processors([PrintResult()]) nr_with_processors.run(task=get_version)
from nornir import InitNornir from nornir3_demo.plugins.tasks import acmeos from nornir3_demo.plugins.processors.rich import ProgressBar nr = InitNornir(inventory={"plugin": "ACMEInventory"}) total_hosts = len(nr.inventory.hosts) nr_with_progress_bar = nr.with_processors([ProgressBar(total_hosts)]) nr_with_progress_bar.run(task=acmeos.upgrade_os, version="5.3.2")
def main(): nr = InitNornir(config_file="config.yaml") nr = nr.filter(F(groups__contains="eos")) nr_with_processors = nr.with_processors([SimpleProcessor()]) nr_with_processors.run(task=get_version, num_workers=2)
#!/usr/bin/env python from nornir import InitNornir from nornir3_demo.plugins.processors.rich import ProgressBar from nornir3_demo.plugins.runners.dc_aware import DCAwareRunner from nornir3_demo.plugins.tasks import acmeos nr = InitNornir(inventory={"plugin": "ACMEInventory"}) total_hosts = len(nr.inventory.hosts) dc_runner = DCAwareRunner(num_workers=100) nr = nr.with_processors([ProgressBar(total_hosts)]).with_runner(dc_runner) nr.run(task=acmeos.upgrade_os, version="5.3.1") # let's print the report so we can see which hosts failed and which ones were skipped print() for data in dc_runner.report(): print(data)
##### /// START \\\ ##### nr = InitNornir(config_file="config.yaml", dry_run=True) SWPORTS = {} interfaces_switchport_data = nr.run(netmiko_send_command, command_string='show interfaces switchport') for switch, resuslt in interfaces_switchport_data.items(): #print(switch) interfaces_list = parse_output(platform="cisco_ios", command="show interfaces switchport", data=str(resuslt[0])) SWPORTS[switch] = interfaces_list mac_address_table_data = {} nr_with_processors = nr.with_processors([SaveResultToDict(mac_address_table_data), PrintResult()]) nr_with_processors.run(task=napalm_get, getters=["mac_address_table"]) interfaces_data = {} nr_with_processors = nr.with_processors([SaveResultToDict(interfaces_data), PrintResult()]) nr_with_processors.run(task=napalm_get, getters=["interfaces"]) print("TRYING TO FIND MAC: {}".format(MAC_TO_FIND)) MACDB = {} for switch in SWPORTS.keys(): MACDB[switch] = {} for port in interfaces_data['napalm_get'][switch]["result"]["interfaces"]: if interfaces_data['napalm_get'][switch]['result']['interfaces'][port]['mac_address'] == MAC_TO_FIND: MACDB[switch][port] = 'static SVI'
from nornir import InitNornir from nornir_utils.plugins.functions import print_result from nornir_utils.plugins.processors import PrintResult from pprint import pprint from processors import PrintProgress, SaveResultToDict, SaveResultToList from napalm_tasks import my_napalm_get from netmiko_tasks import my_netmiko_command nr = InitNornir(config_file="config.yaml", dry_run=True) #results_dict = {} #results_list = {} #nr_with_processors = nr.with_processors([SaveResultToDict(results_dict), # SaveResultToList(results_list), # PrintProgress() # ]) nr_with_processors = nr.with_processors([PrintResult()]) results = nr_with_processors.run(task=my_napalm_get) print_result(results) #pprint(results_list, depth=5) #pprint(results_dict, depth=5)
import logging from nornir import InitNornir from nornir3_demo.plugins.tasks import acmeos from nornir3_demo.plugins.processors.logger import Logger from nornir3_demo.plugins.processors.rich import ProgressBar nr = InitNornir(inventory={"plugin": "ACMEInventory"}) total_hosts = len(nr.inventory.hosts) # We can run as many procressors at the same time as we want!!! nr_with_progress_bar_and_logs = nr.with_processors([ ProgressBar(total_hosts), Logger("upgrade_os.log", log_level=logging.DEBUG) ]) nr_with_progress_bar_and_logs.run(task=acmeos.upgrade_os, version="5.3.2")