def scheduler_job( job_id: int, aps_job_id: Optional[str] = None, targets: Optional[Set[Device]] = None, payload: Optional[dict] = None, ) -> None: with controller.app.app_context(): task = fetch("Task", creation_time=aps_job_id) job = fetch("Job", id=job_id) if targets: targets = {fetch("Device", id=device_id) for device_id in targets} results, now = job.try_run(targets=targets, payload=payload) parameters = get_one("Parameters") if job.push_to_git and parameters.git_automation: path_git_folder = Path.cwd() / "git" / "automation" with open(path_git_folder / job.name, "w") as file: file.write(str_dict(results)) repo = Repo(str(path_git_folder)) try: repo.git.add(A=True) repo.git.commit(m=f"Automatic commit ({job.name})") except GitCommandError: pass repo.remotes.origin.push() if task and not task.frequency: task.is_active = False db.session.commit()
def delete_node(workflow_id: int, job_id: int) -> dict: workflow, job = fetch("Workflow", id=workflow_id), fetch("Job", id=job_id) workflow.jobs.remove(job) now = str(datetime.now()) workflow.last_modified = now db.session.commit() return {"job": job.serialized, "update_time": now}
def __init__(self, **kwargs: Any) -> None: end = fetch("Service", name="End") default = [fetch("Service", name="Start"), end] self.jobs.extend(default) super().__init__(**kwargs) if self.name not in end.positions: end.positions[self.name] = (500, 0)
def create_default_workflows() -> None: name = "Configuration Management Workflow" workflow = factory( "Workflow", **{ "name": name, "description": "Poll configuration and push to gitlab", "use_workflow_targets": False, "creator": fetch("User", name="admin").id, }, ) workflow.jobs.extend([ fetch("Service", name="poller_service"), fetch("Service", name="git_push_configurations"), ]) edges = [(0, 2, True), (2, 3, True), (2, 3, False), (3, 1, True)] for x, y, edge_type in edges: factory( "WorkflowEdge", **{ "name": f"{workflow.name} {x} -> {y} ({edge_type})", "workflow": workflow.id, "subtype": "success" if edge_type else "failure", "source": workflow.jobs[x].id, "destination": workflow.jobs[y].id, }, ) positions = [(-30, 0), (20, 0), (0, -20), (0, 30)] for index, (x, y) in enumerate(positions): workflow.jobs[index].positions[name] = x * 10, y * 10
def update_pools(pool_id: str) -> bool: if pool_id == "all": for pool in fetch_all("Pool"): pool.compute_pool() else: fetch("Pool", id=int(pool_id)).compute_pool() db.session.commit() return True
def save_positions(workflow_id: int) -> str: now = str(datetime.now()) workflow = fetch("Workflow", id=workflow_id) workflow.last_modified = now session["workflow"] = workflow.id for job_id, position in request.json.items(): job = fetch("Job", id=job_id) job.positions[workflow.name] = (position["x"], position["y"]) db.session.commit() return now
def notify(self, results: dict, time: str) -> None: fetch("Job", name=self.send_notification_method).try_run( { "job": self.serialized, "results": self.results, "runtime": time, "result": results["results"]["success"], "content": self.build_notification(results, time), } )
def start(name: str, devices: str, payload: str) -> None: if devices: targets = { fetch("Device", name=name) for name in devices.split(",") } else: targets = set() if payload: payload = loads(payload) job = fetch("Job", name=name) echo(str_dict(job.try_run(targets=targets, payload=payload)[0]))
def test_pool_management(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") user_client.post("/update/pool", data=pool1) user_client.post("/update/pool", data=pool2) p1, p2 = fetch("Pool", name="pool1"), fetch("Pool", name="pool2") assert len(p1.devices) == 21 assert len(p1.links) == 20 assert len(p2.devices) == 12 assert len(p2.links) == 4 assert len(fetch_all("Pool")) == 8 user_client.post(f"/delete/pool/{p1.id}") user_client.post(f"/delete/pool/{p2.id}") assert len(fetch_all("Pool")) == 6
def post(self) -> Union[str, dict]: try: errors, targets, data = [], set(), request.get_json() job = fetch("Job", name=data["name"]) if job.is_running: return {"error": "Job is already running."} handle_asynchronously = data.get("async", False) for device_name in data.get("devices", ""): device = fetch("Device", name=device_name) if device: targets.add(device) else: errors.append(f"No device with the name '{device_name}'") for device_ip in data.get("ip_addresses", ""): device = fetch("Device", ip_address=device_ip) if device: targets.add(device) else: errors.append( f"No device with the IP address '{device_ip}'") for pool_name in data.get("pools", ""): pool = fetch("Pool", name=pool_name) if pool: targets |= set(pool.devices) else: errors.append(f"No pool with the name '{pool_name}'") if errors and not targets: return {"errors": errors} except Exception as e: info(f"REST API run_job endpoint failed ({str(e)})") return str(e) if handle_asynchronously: scheduler.add_job( id=str(datetime.now()), func=scheduler_job, run_date=datetime.now(), args=[ job.id, None, [d.id for d in targets], data.get("payload") ], trigger="date", ) return {**job.serialized, "errors": errors} else: return { **job.try_run(targets=targets, payload=data.get("payload"))[0], "errors": errors, }
def results(id: int, runtime: str) -> str: job = fetch("Job", id=id) if not job: message = "The associated job has been deleted." else: message = job.results.get(runtime, "Results have been removed") return f"<pre>{dumps(message, indent=4)}</pre>"
def test_link_deletion(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") for link_name in links: link = fetch("Link", name=link_name) user_client.post(f"/delete/link/{link.id}") assert len(fetch_all("Device")) == 33 assert len(fetch_all("Link")) == 38
def get_diff(device_id: int, v1: str, v2: str) -> dict: device = fetch("Device", id=device_id) d1, d2 = [datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f") for d in (v1, v2)] first = device.configurations[d1].splitlines() second = device.configurations[d2].splitlines() opcodes = SequenceMatcher(None, first, second).get_opcodes() return {"first": first, "second": second, "opcodes": opcodes}
def connection(device_id: int) -> dict: parameters, device = get_one("Parameters"), fetch("Device", id=device_id) cmd = [str(app.path / "applications" / "gotty"), "-w"] port, protocol = parameters.get_gotty_port(), request.form["protocol"] address = getattr(device, request.form["address"]) cmd.extend(["-p", str(port)]) if "accept-once" in request.form: cmd.append("--once") if "multiplexing" in request.form: cmd.extend(f"tmux new -A -s gotty{port}".split()) if app.config["GOTTY_BYPASS_KEY_PROMPT"]: options = "-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" else: options = "" if protocol == "telnet": cmd.extend(f"telnet {address}".split()) elif "authentication" in request.form: if request.form["credentials"] == "device": login, pwd = device.username, device.password else: login, pwd = current_user.name, current_user.password cmd.extend(f"sshpass -p {pwd} ssh {options} {login}@{address}".split()) else: cmd.extend(f"ssh {options} {address}".split()) if protocol != "telnet": cmd.extend(f"-p {device.port}".split()) Popen(cmd) return { "device": device.name, "port": port, "redirection": app.config["GOTTY_PORT_REDIRECTION"], "server_addr": app.config["ENMS_SERVER_ADDR"], }
def test_device_deletion(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") for device_name in routers: device = fetch("Device", name=device_name) user_client.post(f"/delete/device/{device.id}") assert len(fetch_all("Device")) == 18 assert len(fetch_all("Link")) == 18
def update(self, **kwargs: Any) -> None: if "source_name" in kwargs: kwargs["source"] = fetch("Device", name=kwargs.pop("source_name")).id kwargs["destination"] = fetch( "Device", name=kwargs.pop("destination_name") ).id kwargs.update( {"source_id": kwargs["source"], "destination_id": kwargs["destination"]} ) super().update(**kwargs) for pool in fetch_all("Pool"): if pool.never_update: continue if pool.object_match(self): pool.links.append(self) elif self in pool.links: pool.links.remove(self)
def add_jobs_to_workflow(workflow_id: int) -> Dict[str, Any]: workflow = fetch("Workflow", id=workflow_id) jobs = objectify("Job", request.form["add_jobs"]) for job in jobs: job.workflows.append(workflow) now = str(datetime.now()) workflow.last_modified = now db.session.commit() return {"jobs": [job.serialized for job in jobs], "update_time": now}
def create_default_services() -> None: admin = fetch("User", name="admin").id for service in ( { "type": "SwissArmyKnifeService", "name": "Start", "description": "Start point of a workflow", "creator": admin, "hidden": True, }, { "type": "SwissArmyKnifeService", "name": "End", "description": "End point of a workflow", "creator": admin, "hidden": True, }, { "type": "SwissArmyKnifeService", "name": "mail_feedback_notification", "description": "Mail notification (service logs)", "creator": admin, }, { "type": "SwissArmyKnifeService", "name": "slack_feedback_notification", "description": "Slack notification (service logs)", "creator": admin, }, { "type": "SwissArmyKnifeService", "name": "mattermost_feedback_notification", "description": "Mattermost notification (service logs)", "creator": admin, }, { "type": "SwissArmyKnifeService", "name": "cluster_monitoring", "description": "Monitor eNMS cluster", "creator": admin, }, { "type": "SwissArmyKnifeService", "name": "git_push_configurations", "description": "Push configurations to Gitlab", "creator": admin, }, { "type": "SwissArmyKnifeService", "name": "poller_service", "description": "Configuration Management Poller", "creator": admin, "hidden": True, }, ): factory(service.pop("type"), **service)
def workflow_builder() -> dict: workflow = fetch("Workflow", id=session.get("workflow", None)) return dict( workflow=workflow.serialized if workflow else None, add_job_form=AddJobForm(request.form), workflow_builder_form=WorkflowBuilderForm(request.form), workflow_creation_form=JobForm(request.form), compare_results_form=CompareResultsForm(request.form), service_form=JobForm(request.form), services_classes=sorted(service_classes), )
def create_default_users() -> None: if not fetch("User", name="admin"): factory( "User", **{ "name": "admin", "email": "*****@*****.**", "password": "******", "permissions": ["Admin"], }, )
def create_default_tasks(app: Flask) -> None: tasks = [ { "aps_job_id": "Poller", "name": "Poller", "description": "Back-up device configurations", "job": fetch("Workflow", name="Configuration Management Workflow").id, "frequency": 3600, }, { "aps_job_id": "Cluster Monitoring", "name": "Cluster Monitoring", "description": "Monitor eNMS cluster", "job": fetch("Service", name="cluster_monitoring").id, "frequency": 15, "is_active": app.config["CLUSTER"], }, ] for task in tasks: if not fetch("Task", name=task["name"]): factory("Task", **task)
def test_user_management(user_client: FlaskClient) -> None: for user in ("user1", "user2", "user3"): dict_user = { "list_fields": "permissions", "name": user, "email": f"{user}@test.com", "permissions": ["Admin"], "password": user, } user_client.post("/update/user", data=dict_user) assert len(fetch_all("User")) == 4 user1 = fetch("User", name="user1") user_client.post("/delete/user/{}".format(user1.id)) assert len(fetch_all("User")) == 3
def create_napalm_workflow() -> None: admin = fetch("User", name="admin").id devices = [ fetch("Device", name="Washington").id, fetch("Device", name="Austin").id ] services = [ factory( "NapalmConfigurationService", **{ "name": "napalm_create_vrf_test", "description": 'Create a VRF "test" with Napalm', "waiting_time": 0, "devices": devices, "creator": admin, "driver": "eos", "vendor": "Arista", "operating_system": "eos", "content_type": "simple", "action": "load_merge_candidate", "content": "vrf definition test\n", }, ) ] services.extend([ fetch("Job", name="netmiko_check_vrf_test"), fetch("Job", name=f"netmiko_delete_vrf_test"), fetch("Job", name=f"netmiko_check_no_vrf_test"), ]) workflow = factory( "Workflow", **{ "name": "Napalm_VRF_workflow", "description": "Create and delete a VRF with Napalm", "creator": admin, "devices": devices, "vendor": "Arista", "operating_system": "eos", }, ) workflow.jobs.extend(services) edges = [(0, 2), (2, 3), (3, 4), (4, 5), (5, 1)] for x, y in edges: factory( "WorkflowEdge", **{ "name": f"{workflow.name} {x} -> {y}", "workflow": workflow.id, "subtype": "success", "source": workflow.jobs[x].id, "destination": workflow.jobs[y].id, }, ) positions = [(-20, 0), (20, 0), (0, -15), (0, -5), (0, 5), (0, 15)] for index, (x, y) in enumerate(positions): workflow.jobs[index].positions["Napalm_VRF_workflow"] = x * 10, y * 10
def add_edge(workflow_id: int, subtype: str, source: int, dest: int) -> dict: workflow_edge = factory( "WorkflowEdge", **{ "name": f"{workflow_id}-{subtype}:{source}->{dest}", "workflow": workflow_id, "subtype": subtype, "source": source, "destination": dest, }, ) now = str(datetime.now()) fetch("Workflow", id=workflow_id).last_modified = now db.session.commit() return {"edge": workflow_edge.serialized, "update_time": now}
def create_workflow_of_workflows() -> None: admin = fetch("User", name="admin").id devices = [fetch("Device", name="Washington").id] workflow = factory( "Workflow", **{ "name": "Workflow_of_workflows", "description": "Test the inner workflow system", "devices": devices, "creator": admin, "vendor": "Arista", "operating_system": "eos", }, ) workflow.jobs.extend([ fetch("Job", name="payload_transfer_workflow"), fetch("Job", name="get_interfaces"), fetch("Job", name="Napalm_VRF_workflow"), ]) edges = [(0, 2), (2, 3), (3, 4), (4, 1)] for x, y in edges: factory( "WorkflowEdge", **{ "name": f"{workflow.name} {x} -> {y}", "workflow": workflow.id, "subtype": "success", "devices": devices, "source": workflow.jobs[x].id, "destination": workflow.jobs[y].id, }, ) positions = [(-30, 0), (30, 0), (0, -20), (0, 0), (0, 20)] for index, (x, y) in enumerate(positions): workflow.jobs[index].positions[ "Workflow_of_workflows"] = x * 10, y * 10
def update_database_configurations_from_git(self, app: Flask) -> None: for dir in scandir(app.path / "git" / "configurations"): if dir.name == ".git": continue device = fetch("Device", name=dir.name) if device: with open(Path(dir.path) / "data.yml") as data: parameters = load(data) device.update(**parameters) with open(Path(dir.path) / dir.name) as f: time = parameters["last_update"] device.current_configuration = device.configurations[ time] = f.read() db.session.commit() for pool in fetch_all("Pool"): if pool.device_current_configuration: pool.compute_pool()
def run_job(job_id: int) -> dict: job = fetch("Job", id=job_id) if job.is_running: return {"error": "Job is already running."} targets = job.compute_targets() if hasattr(job, "has_targets"): if job.has_targets and not targets: return {"error": "Set devices or pools as targets first."} if not job.has_targets and targets: return {"error": "This service should not have targets configured."} scheduler.add_job( id=str(datetime.now()), func=scheduler_job, run_date=datetime.now(), args=[job.id], trigger="date", ) return job.serialized
def update(self, **kwargs: Any) -> None: serial = rel.get(self.__tablename__, rel["Service"]) for property, value in kwargs.items(): property_type = property_types.get(property, None) if property in serial: value = fetch(serial[property], id=value) elif property[:-1] in serial: value = objectify(serial[property[:-1]], value) elif property in boolean_properties: value = kwargs[property] not in (None, False) elif "regex" in property: value = property in kwargs elif property_type == "dict" and type(value) == str: value = loads(value) if value else {} elif property_type in ["float", "int"]: default_value = getattr(self.__table__.c, property).default if default_value and not value: value = default_value.arg value = {"float": float, "int": int}[property_type](value or 0) setattr(self, property, value)
def duplicate_workflow(workflow_id: int) -> dict: parent_workflow = fetch("Workflow", id=workflow_id) new_workflow = factory("Workflow", **request.form) for job in parent_workflow.jobs: new_workflow.jobs.append(job) job.positions[new_workflow.name] = job.positions[parent_workflow.name] for edge in parent_workflow.edges: subtype, src, destination = edge.subtype, edge.source, edge.destination new_workflow.edges.append( factory( "WorkflowEdge", **{ "name": f"{new_workflow.id}-{subtype}:{src.id}->{destination.id}", "workflow": new_workflow.id, "subtype": subtype, "source": src.id, "destination": destination.id, }, ) ) db.session.commit() return new_workflow.serialized
def request_loader(request: Request) -> User: return fetch("User", name=request.form.get("name"))