def test_link_deletion(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") for link_name in links: link = fetch("Link", name=link_name) user_client.post(f"/delete/link/{link.id}") assert len(fetch_all("Device")) == 33 assert len(fetch_all("Link")) == 38
def test_device_deletion(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") for device_name in routers: device = fetch("Device", name=device_name) user_client.post(f"/delete/device/{device.id}") assert len(fetch_all("Device")) == 18 assert len(fetch_all("Link")) == 18
def poller_service(self, payload: dict) -> dict: for service in fetch_all("Service"): if getattr(service, "configuration_backup_service", False): service.try_run() for pool in fetch_all("Pool"): if pool.device_current_configuration: pool.compute_pool() return {"success": True}
def test_base_services(user_client: FlaskClient) -> None: user_client.post("/update/NetmikoConfigurationService", data=netmiko_ping) assert len(fetch_all("NetmikoConfigurationService")) == 3 assert len(fetch_all("Service")) == 26 user_client.post("/update/NetmikoFileTransferService", data=file_transfer_service) assert len(fetch_all("NetmikoFileTransferService")) == 1 assert len(fetch_all("Service")) == 27
def test_pool_management(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") user_client.post("/update/pool", data=pool1) user_client.post("/update/pool", data=pool2) p1, p2 = fetch("Pool", name="pool1"), fetch("Pool", name="pool2") assert len(p1.devices) == 21 assert len(p1.links) == 20 assert len(p2.devices) == 12 assert len(p2.links) == 4 assert len(fetch_all("Pool")) == 8 user_client.post(f"/delete/pool/{p1.id}") user_client.post(f"/delete/pool/{p2.id}") assert len(fetch_all("Pool")) == 6
def test_manual_object_creation(user_client: FlaskClient) -> None: for subtype in device_subtypes: for description in ("desc1", "desc2"): obj_dict = define_device(subtype, description) user_client.post("/update/device", data=obj_dict) for subtype in link_subtypes: devices = fetch_all("Device") for source in devices[:3]: for destination in devices[:3]: obj_dict = define_link(subtype, source.name, destination.name) user_client.post("/update/link", data=obj_dict) assert len(fetch_all("Device")) == 44 assert len(fetch_all("Link")) == 82
def test_user_management(user_client: FlaskClient) -> None: for user in ("user1", "user2", "user3"): dict_user = { "list_fields": "permissions", "name": user, "email": f"{user}@test.com", "permissions": ["Admin"], "password": user, } user_client.post("/update/user", data=dict_user) assert len(fetch_all("User")) == 4 user1 = fetch("User", name="user1") user_client.post("/delete/user/{}".format(user1.id)) assert len(fetch_all("User")) == 3
def object_import(request: dict, file: FileStorage) -> str: if request["replace"]: delete_all("Device") result = "Topology successfully imported." if allowed_file(secure_filename(file.filename), {"xls", "xlsx"}): book = open_workbook(file_contents=file.read()) for obj_type in ("Device", "Link"): try: sheet = book.sheet_by_name(obj_type) except XLRDError: continue properties = sheet.row_values(0) for row_index in range(1, sheet.nrows): prop = dict(zip(properties, sheet.row_values(row_index))) prop["dont_update_pools"] = True try: factory(obj_type, **prop).serialized except Exception as e: info(f"{str(prop)} could not be imported ({str(e)})") result = "Partial import (see logs)." db.session.commit() for pool in fetch_all("Pool"): pool.compute_pool() db.session.commit() info("Inventory import: Done.") return result
def job(self, payload: dict, device: Device) -> dict: path_backup = Path.cwd() / "logs" / "job_logs" now = strip_all(str(datetime.now())) path_dir = path_backup / f"logs_{now}" source = path_backup / f"logs_{now}.tgz" makedirs(path_dir) for job in fetch_all("Job"): with open(path_dir / f"{job.name}.json", "w") as log_file: dump(job.logs, log_file) with open_tar(source, "w:gz") as tar: tar.add(path_dir, arcname="/") ssh_client = SSHClient() ssh_client.set_missing_host_key_policy(AutoAddPolicy()) ssh_client.connect( device.ip_address, username=device.username, password=device.password, look_for_keys=False, ) destination = f"{self.destination_path}/logs_{now}.tgz" self.transfer_file(ssh_client, [(source, destination)]) ssh_client.close() if self.delete_folder: rmtree(path_dir) if self.delete_archive: remove(source) return { "success": True, "result": f"logs stored in {destination} ({device.ip_address})", }
def update_pools(pool_id: str) -> bool: if pool_id == "all": for pool in fetch_all("Pool"): pool.compute_pool() else: fetch("Pool", id=int(pool_id)).compute_pool() db.session.commit() return True
def export_to_google_earth() -> bool: kml_file = Kml() for device in fetch_all("Device"): point = kml_file.newpoint(name=device.name) point.coords = [(device.longitude, device.latitude)] point.style = google_earth_styles[device.subtype] point.style.labelstyle.scale = request.form["label_size"] for link in fetch_all("Link"): line = kml_file.newlinestring(name=link.name) line.coords = [ (link.source.longitude, link.source.latitude), (link.destination.longitude, link.destination.latitude), ] line.style = google_earth_styles[link.subtype] line.style.linestyle.width = request.form["line_width"] filepath = app.path / "google_earth" / f'{request.form["name"]}.kmz' kml_file.save(filepath) return True
def update(self, **kwargs: Any) -> None: super().update(**kwargs) for pool in fetch_all("Pool"): if pool.never_update: continue if pool.object_match(self): pool.devices.append(self) elif self in pool.devices: pool.devices.remove(self)
def test_create_logs(user_client: FlaskClient) -> None: for log in (log1, log2): kwargs = { "ip_address": "192.168.1.88", "content": log, "log_rules": [] } log_object = Log(**kwargs) db.session.add(log_object) db.session.commit() assert len(fetch_all("Log")) == 2
def update(self, **kwargs: Any) -> None: super().update(**kwargs) if kwargs.get("dont_update_pools", False): return for pool in fetch_all("Pool"): if pool.never_update: continue if pool.object_match(self): pool.devices.append(self) elif self in pool.devices: pool.devices.remove(self)
def cluster_monitoring(self, payload: dict) -> dict: parameters = get_one("Parameters") protocol = parameters.cluster_scan_protocol for instance in fetch_all("Instance"): factory( "Instance", **get( f"{protocol}://{instance.ip_address}/rest/is_alive", timeout=parameters.cluster_scan_timeout, ).json(), ) return {"success": True}
def database_helpers() -> bool: delete_all(*request.form["deletion_types"]) clear_logs_date = request.form["clear_logs_date"] if clear_logs_date: clear_date = datetime.strptime(clear_logs_date, "%d/%m/%Y %H:%M:%S") for job in fetch_all("Job"): job.logs = { date: log for date, log in job.logs.items() if datetime.strptime(date, "%Y-%m-%d-%H:%M:%S.%f") > clear_date } db.session.commit() return True
def dashboard() -> dict: on_going = { "Running services": len([ service for service in fetch_all("Service") if service.status == "Running" ]), "Running workflows": len([ workflow for workflow in fetch_all("Workflow") if workflow.status == "Running" ]), "Scheduled tasks": len([task for task in fetch_all("Task") if task.status == "Active"]), } return dict( properties=type_to_diagram_properties, default_properties=default_diagrams_properties, counters={ **{cls: len(fetch_all_visible(cls)) for cls in classes}, **on_going }, )
def update(self, **kwargs: Any) -> None: if "source_name" in kwargs: kwargs["source"] = fetch("Device", name=kwargs.pop("source_name")).id kwargs["destination"] = fetch( "Device", name=kwargs.pop("destination_name") ).id kwargs.update( {"source_id": kwargs["source"], "destination_id": kwargs["destination"]} ) super().update(**kwargs) for pool in fetch_all("Pool"): if pool.never_update: continue if pool.object_match(self): pool.links.append(self) elif self in pool.links: pool.links.remove(self)
def calendar() -> dict: tasks = {} for task in fetch_all("Task"): # javascript dates range from 0 to 11, we must account for that by # substracting 1 to the month for the date to be properly displayed in # the calendar date = task.next_run_time if not date: continue python_month = search(r".*-(\d{2})-.*", date).group(1) # type: ignore month = "{:02}".format((int(python_month) - 1) % 12) js_date = [ int(i) for i in sub(r"(\d+)-(\d+)-(\d+) (\d+):(\d+).*", r"\1," + month + r",\3,\4,\5", date).split(",") ] tasks[task.name] = {**task.serialized, **{"date": js_date}} return dict(tasks=tasks, scheduling_form=SchedulingForm(request.form))
def update_database_configurations_from_git(self, app: Flask) -> None: for dir in scandir(app.path / "git" / "configurations"): if dir.name == ".git": continue device = fetch("Device", name=dir.name) if device: with open(Path(dir.path) / "data.yml") as data: parameters = load(data) device.update(**parameters) with open(Path(dir.path) / dir.name) as f: time = parameters["last_update"] device.current_configuration = device.configurations[ time] = f.read() db.session.commit() for pool in fetch_all("Pool"): if pool.device_current_configuration: pool.compute_pool()
def get_counters(property: str, type: str) -> Counter: objects = fetch_all(type) if property in reverse_pretty_names: property = reverse_pretty_names[property] return Counter(map(lambda o: str(getattr(o, property)), objects))
def test_object_creation_europe(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") assert len(fetch_all("Device")) == 33 assert len(fetch_all("Link")) == 49
def test_object_creation_type(user_client: FlaskClient) -> None: create_from_file(user_client, "device_counters.xls") assert len(fetch_all("Device")) == 27 assert len(fetch_all("Link")) == 0
def reset_status() -> bool: for job in fetch_all("Job"): job.is_running = False db.session.commit() return True
def get_logs(device_id: int) -> Union[str, bool]: device_logs = [ log.content for log in fetch_all("Log") if log.source == fetch("Device", id=device_id).ip_address ] return "\n".join(device_logs) or True
def test_getters_service(user_client: FlaskClient) -> None: user_client.post("/update/NapalmGettersService", data=getters_dict) assert len(fetch_all("NapalmGettersService")) == 5
def test_netmiko_napalm_config(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") user_client.post("/update/task", data=instant_task) assert len(fetch_all("Task")) == 3 user_client.post("/update/task", data=scheduled_task) assert len(fetch_all("Task")) == 4
def test_ansible_services(user_client: FlaskClient) -> None: user_client.post("/update/AnsiblePlaybookService", data=ansible_service) assert len(fetch_all("AnsiblePlaybookService")) == 1 assert len(fetch_all("Service")) == 26
def test_rest_api_basic(user_client: FlaskClient) -> None: assert len(fetch_all("Device")) == 28 post( "http://192.168.105.2:5000/rest/instance/device", json={ "name": "new_router", "model": "Cisco" }, auth=HTTPBasicAuth("admin", "admin"), ) # assert len(fetch_all("Device")) == 29 result = get( "http://192.168.105.2:5000/rest/instance/device/Washington", auth=HTTPBasicAuth("admin", "admin"), ).json() assert result["model"] == "Arista" and len(result) == 21 post( "http://192.168.105.2:5000/rest/instance/device", json={ "name": "Washington", "model": "Cisco" }, auth=HTTPBasicAuth("admin", "admin"), ) result = get( "http://192.168.105.2:5000/rest/instance/device/Washington", auth=HTTPBasicAuth("admin", "admin"), ).json() assert result["model"] == "Cisco" and len(result) == 21 result = get( "http://192.168.105.2:5000/rest/instance/service/get_facts", auth=HTTPBasicAuth("admin", "admin"), ).json() assert result["description"] == "Getter: get_facts" and len(result) == 39 put( "http://192.168.105.2:5000/rest/instance/service", json={ "name": "get_facts", "description": "Get facts" }, auth=HTTPBasicAuth("admin", "admin"), ) result = get( "http://192.168.105.2:5000/rest/instance/service/get_facts", auth=HTTPBasicAuth("admin", "admin"), ).json() assert result["description"] == "Getter: get_facts" and len(result) == 39 assert len(fetch_all("Service")) == 25 result = post( "http://192.168.105.2:5000/rest/instance/service", json={ "name": "new_service", "vendor": "Cisco" }, auth=HTTPBasicAuth("admin", "admin"), ).json() assert result["vendor"] == "Cisco" and len(fetch_all("Service")) == 26 assert len(fetch_all("Workflow")) == 5 result = post( "http://192.168.105.2:5000/rest/instance/workflow", json={ "name": "new_workflow", "description": "New" }, auth=HTTPBasicAuth("admin", "admin"), ).json() assert result["description"] == "New" and len(fetch_all("Workflow")) == 6
def get_cluster_status() -> dict: instances = fetch_all("Instance") return { attr: [getattr(instance, attr) for instance in instances] for attr in ("status", "cpu_load") }