def __init__(self, **kwargs: Any) -> None: end = fetch("Service", name="End") default = [fetch("Service", name="Start"), end] self.jobs.extend(default) super().__init__(**kwargs) if self.name not in end.positions: end.positions[self.name] = (500, 0)
def create_default_workflows() -> None: name = "Configuration Management Workflow" workflow = factory( "Workflow", **{ "name": name, "description": "Poll configuration and push to gitlab", "use_workflow_targets": False, "creator": fetch("User", name="admin").id, }, ) workflow.jobs.extend([ fetch("Service", name="poller_service"), fetch("Service", name="git_push_configurations"), ]) edges = [(0, 2, True), (2, 3, True), (2, 3, False), (3, 1, True)] for x, y, edge_type in edges: factory( "WorkflowEdge", **{ "name": f"{workflow.name} {x} -> {y} ({edge_type})", "workflow": workflow.id, "subtype": "success" if edge_type else "failure", "source": workflow.jobs[x].id, "destination": workflow.jobs[y].id, }, ) positions = [(-30, 0), (20, 0), (0, -20), (0, 30)] for index, (x, y) in enumerate(positions): workflow.jobs[index].positions[name] = x * 10, y * 10
def post(self) -> Union[str, dict]: payload = request.get_json() job = fetch("Job", name=payload["name"]) handle_asynchronously = payload.get("async", False) try: targets = { fetch("Device", name=device_name) for device_name in payload.get("devices", "") } | { fetch("Device", ip_address=ip_address) for ip_address in payload.get("ip_addresses", "") } for pool_name in payload.get("pools", ""): targets |= {d for d in fetch("Pool", name=pool_name).devices} except Exception as e: info(f"REST API run_job endpoint failed ({str(e)})") return str(e) if handle_asynchronously: scheduler.add_job( id=str(datetime.now()), func=scheduler_job, run_date=datetime.now(), args=[job.id, None, [d.id for d in targets]], trigger="date", ) return job.serialized else: return job.try_run(targets=targets)[0]
def update_pools(pool_id: str) -> bool: if pool_id == "all": for pool in fetch_all("Pool"): pool.compute_pool() else: fetch("Pool", id=int(pool_id)).compute_pool() db.session.commit() return True
def save_positions(workflow_id: int) -> bool: workflow = fetch("Workflow", id=workflow_id) session["workflow"] = workflow.id for job_id, position in request.json.items(): job = fetch("Job", id=job_id) job.positions[workflow.name] = (position["x"], position["y"]) db.session.commit() return True
def update(self, **kwargs: Any) -> None: if "source_name" in kwargs: kwargs["source"] = fetch("Device", name=kwargs.pop("source_name")).id kwargs["destination"] = fetch( "Device", name=kwargs.pop("destination_name")).id kwargs.update({ "source_id": kwargs["source"], "destination_id": kwargs["destination"] }) super().update(**kwargs)
def notify(self, results: dict, time: str) -> None: fetch("Job", name=self.send_notification_method).try_run({ "job": self.serialized, "logs": self.logs, "runtime": time, "result": results["success"], "content": self.build_notification(results, time), })
def test_pool_management(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") user_client.post("/update/pool", data=pool1) user_client.post("/update/pool", data=pool2) p1, p2 = fetch("Pool", name="pool1"), fetch("Pool", name="pool2") assert len(p1.devices) == 21 assert len(p1.links) == 20 assert len(p2.devices) == 12 assert len(p2.links) == 4 assert len(fetch_all("Pool")) == 5 user_client.post(f"/delete/pool/{p1.id}") user_client.post(f"/delete/pool/{p2.id}") assert len(fetch_all("Pool")) == 3
def test_link_deletion(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") for link_name in links: link = fetch("Link", name=link_name) user_client.post(f"/delete/link/{link.id}") assert len(fetch_all("Device")) == 33 assert len(fetch_all("Link")) == 38
def logs(id: int, runtime: str) -> str: job = fetch("Job", id=id) if not job: message = "The associated job has been deleted." else: message = job.logs.get(runtime, "Logs have been removed") return f"<pre>{dumps(message, indent=4)}</pre>"
def test_device_deletion(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") for device_name in routers: device = fetch("Device", name=device_name) user_client.post(f"/delete/device/{device.id}") assert len(fetch_all("Device")) == 18 assert len(fetch_all("Link")) == 18
def get_diff(device_id: int, v1: str, v2: str) -> dict: device = fetch("Device", id=device_id) d1, d2 = [datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f") for d in (v1, v2)] first = device.configurations[d1].splitlines() second = device.configurations[d2].splitlines() opcodes = SequenceMatcher(None, first, second).get_opcodes() return {"first": first, "second": second, "opcodes": opcodes}
def connection(device_id: int) -> dict: parameters, device = get_one("Parameters"), fetch("Device", id=device_id) cmd = [str(app.path / "applications" / "gotty"), "-w"] port, protocol = parameters.get_gotty_port(), request.form["protocol"] address = getattr(device, request.form["address"]) cmd.extend(["-p", str(port)]) if "accept-once" in request.form: cmd.append("--once") if "multiplexing" in request.form: cmd.extend(f"tmux new -A -s gotty{port}".split()) if app.config["GOTTY_BYPASS_KEY_PROMPT"]: options = "-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" else: options = "" if protocol == "telnet": cmd.extend(f"telnet {address}".split()) elif "authentication" in request.form: if request.form["credentials"] == "device": login, pwd = device.username, device.password else: login, pwd = current_user.name, current_user.password cmd.extend(f"sshpass -p {pwd} ssh {options} {login}@{address}".split()) else: cmd.extend(f"ssh {options} {address}".split()) if protocol != "telnet": cmd.extend(f"-p {device.port}".split()) Popen(cmd) return { "device": device.name, "port": port, "redirection": app.config["GOTTY_PORT_REDIRECTION"], "server_addr": app.config["ENMS_SERVER_ADDR"], }
def add_jobs_to_workflow(workflow_id: int) -> List[dict]: workflow = fetch("Workflow", id=workflow_id) jobs = objectify("Job", request.form["add_jobs"]) for job in jobs: job.workflows.append(workflow) workflow.last_modified = str(datetime.now()) db.session.commit() return [job.serialized for job in jobs]
def update_database_configurations_from_git(self, app: Flask) -> None: for file in scandir(app.path / "git" / "configurations"): device = fetch("Device", name=file.name) if device: time = max(device.configurations, default=datetime.now()) with open(file) as f: device.current_configurations = device.configurations[ time] = f.read() db.session.commit()
def create_default_services() -> None: admin = fetch("User", name="admin").id for service in ( { "type": "SwissArmyKnifeService", "name": "Start", "description": "Start point of a workflow", "creator": admin, "hidden": True, }, { "type": "SwissArmyKnifeService", "name": "End", "description": "End point of a workflow", "creator": admin, "hidden": True, }, { "type": "SwissArmyKnifeService", "name": "mail_feedback_notification", "description": "Mail notification (service logs)", "creator": admin, }, { "type": "SwissArmyKnifeService", "name": "slack_feedback_notification", "description": "Slack notification (service logs)", "creator": admin, }, { "type": "SwissArmyKnifeService", "name": "mattermost_feedback_notification", "description": "Mattermost notification (service logs)", "creator": admin, }, { "type": "SwissArmyKnifeService", "name": "cluster_monitoring", "description": "Monitor eNMS cluster", "creator": admin, }, { "type": "SwissArmyKnifeService", "name": "git_push_configurations", "description": "Push configurations to Gitlab", "creator": admin, }, { "type": "SwissArmyKnifeService", "name": "poller_service", "description": "Configuration Management Poller", "creator": admin, "hidden": True, }, ): factory(service.pop("type"), **service)
def create_default_users() -> None: if not fetch("User", name="admin"): factory( "User", **{ "name": "admin", "email": "*****@*****.**", "password": "******", "permissions": ["Admin"], }, )
def workflow_builder() -> dict: workflow = fetch("Workflow", id=session.get("workflow", None)) return dict( workflow=workflow.serialized if workflow else None, add_job_form=AddJobForm(request.form), workflow_builder_form=WorkflowBuilderForm(request.form), workflow_creation_form=JobForm(request.form), compare_logs_form=CompareLogsForm(request.form), service_form=JobForm(request.form), services_classes=list(service_classes), )
def create_default_tasks(app: Flask) -> None: tasks = [ { "aps_job_id": "Poller", "name": "Poller", "description": "Back-up device configurations", "job": fetch("Workflow", name="Configuration Management Workflow").id, "frequency": 3600, }, { "aps_job_id": "Cluster Monitoring", "name": "Cluster Monitoring", "description": "Monitor eNMS cluster", "job": fetch("Service", name="cluster_monitoring").id, "frequency": 15, "is_active": app.config["CLUSTER"], }, ] for task in tasks: if not fetch("Task", name=task["name"]): factory("Task", **task)
def test_user_management(user_client: FlaskClient) -> None: for user in ("user1", "user2", "user3"): dict_user = { "list_fields": "permissions", "name": user, "email": f"{user}@test.com", "permissions": ["Admin"], "password": user, } user_client.post("/update/user", data=dict_user) assert len(fetch_all("User")) == 4 user1 = fetch("User", name="user1") user_client.post("/delete/user/{}".format(user1.id)) assert len(fetch_all("User")) == 3
def add_edge(workflow_id: int, subtype: str, source: int, dest: int) -> dict: workflow_edge = factory( "WorkflowEdge", **{ "name": f"{workflow_id}-{subtype}:{source}->{dest}", "workflow": workflow_id, "subtype": subtype, "source": source, "destination": dest, }, ) fetch("Workflow", id=workflow_id).last_modified = str(datetime.now()) db.session.commit() return workflow_edge.serialized
def create_napalm_workflow() -> None: admin = fetch("User", name="admin").id devices = [ fetch("Device", name="Washington").id, fetch("Device", name="Austin").id ] services = [ factory( "NapalmConfigurationService", **{ "name": "napalm_create_vrf_test", "description": 'Create a VRF "test" with Napalm', "waiting_time": 0, "devices": devices, "creator": admin, "driver": "eos", "vendor": "Arista", "operating_system": "eos", "content_type": "simple", "action": "load_merge_candidate", "content": "vrf definition test\n", }, ) ] services.extend([ fetch("Job", name="netmiko_check_vrf_test"), fetch("Job", name=f"netmiko_delete_vrf_test"), fetch("Job", name=f"netmiko_check_no_vrf_test"), ]) workflow = factory( "Workflow", **{ "name": "Napalm_VRF_workflow", "description": "Create and delete a VRF with Napalm", "creator": admin, "devices": devices, "vendor": "Arista", "operating_system": "eos", }, ) workflow.jobs.extend(services) edges = [(0, 2), (2, 3), (3, 4), (4, 5), (5, 1)] for x, y in edges: factory( "WorkflowEdge", **{ "name": f"{workflow.name} {x} -> {y}", "workflow": workflow.id, "subtype": "success", "source": workflow.jobs[x].id, "destination": workflow.jobs[y].id, }, ) positions = [(-20, 0), (20, 0), (0, -15), (0, -5), (0, 5), (0, 15)] for index, (x, y) in enumerate(positions): workflow.jobs[index].positions["Napalm_VRF_workflow"] = x * 10, y * 10
def scheduler_job(job_id: int, aps_job_id: Optional[str] = None, targets: Optional[Set[Device]] = None) -> None: with scheduler.app.app_context(): task = fetch("Task", creation_time=aps_job_id) job = fetch("Job", id=job_id) if targets: targets = {fetch("Device", id=device_id) for device_id in targets} results, now = job.try_run(targets=targets) parameters = get_one("Parameters") if job.push_to_git and parameters.git_automation: path_git_folder = Path.cwd() / "git" / "automation" with open(path_git_folder / job.name, "w") as file: file.write(str_dict(results)) repo = Repo(str(path_git_folder)) try: repo.git.add(A=True) repo.git.commit(m=f"Automatic commit ({job.name})") except GitCommandError: pass repo.remotes.origin.push() if task and not task.frequency: task.is_active = False db.session.commit()
def create_workflow_of_workflows() -> None: admin = fetch("User", name="admin").id devices = [fetch("Device", name="Washington").id] workflow = factory( "Workflow", **{ "name": "Workflow_of_workflows", "description": "Test the inner workflow system", "devices": devices, "creator": admin, "vendor": "Arista", "operating_system": "eos", }, ) workflow.jobs.extend([ fetch("Job", name="payload_transfer_workflow"), fetch("Job", name="get_interfaces"), fetch("Job", name="Napalm_VRF_workflow"), ]) edges = [(0, 2), (2, 3), (3, 4), (4, 1)] for x, y in edges: factory( "WorkflowEdge", **{ "name": f"{workflow.name} {x} -> {y}", "workflow": workflow.id, "subtype": "success", "devices": devices, "source": workflow.jobs[x].id, "destination": workflow.jobs[y].id, }, ) positions = [(-30, 0), (30, 0), (0, -20), (0, 0), (0, 20)] for index, (x, y) in enumerate(positions): workflow.jobs[index].positions[ "Workflow_of_workflows"] = x * 10, y * 10
def update(self, **kwargs: Any) -> None: serial = rel.get(self.__tablename__, rel["Service"]) for property, value in kwargs.items(): property_type = property_types.get(property, None) if property in serial: value = fetch(serial[property], id=value) elif property[:-1] in serial: value = objectify(serial[property[:-1]], value) elif property in boolean_properties: value = kwargs[property] not in (None, False) elif "regex" in property: value = property in kwargs elif property_type == "dict" and type(value) == str: value = loads(value) if value else {} elif property_type in ["float", "int"]: default_value = getattr(self.__table__.c, property).default if default_value and not value: value = default_value.arg value = {"float": float, "int": int}[property_type](value or 0) setattr(self, property, value)
def run_job(job_id: int) -> dict: job = fetch("Job", id=job_id) if job.is_running: return {"error": "Job is already running."} targets = job.compute_targets() if hasattr(job, "has_targets"): if job.has_targets and not targets: return {"error": "Set devices or pools as targets first."} if not job.has_targets and targets: return { "error": "This service should not have targets configured." } scheduler.add_job( id=str(datetime.now()), func=scheduler_job, run_date=datetime.now(), args=[job.id], trigger="date", ) return job.serialized
def duplicate_workflow(workflow_id: int) -> dict: parent_workflow = fetch("Workflow", id=workflow_id) new_workflow = factory("Workflow", **request.form) for job in parent_workflow.jobs: new_workflow.jobs.append(job) job.positions[new_workflow.name] = job.positions[parent_workflow.name] for edge in parent_workflow.edges: subtype, src, destination = edge.subtype, edge.source, edge.destination new_workflow.edges.append( factory( "WorkflowEdge", **{ "name": f"{new_workflow.id}-{subtype}:{src.id}->{destination.id}", "workflow": new_workflow.id, "subtype": subtype, "source": src.id, "destination": destination.id, }, )) db.session.commit() return new_workflow.serialized
def login() -> Union[Response, str]: if request.method == "POST": name, password = request.form["name"], request.form["password"] try: if request.form["authentication_method"] == "Local User": user = fetch("User", name=name) if user and password == user.password: login_user(user) return redirect(url_for("base_blueprint.dashboard")) elif request.form["authentication_method"] == "LDAP Domain": with Connection( ldap_client, user=f'{app.config["LDAP_USERDN"]}\\{name}', password=password, auto_bind=True, authentication=NTLM, ) as connection: connection.search( app.config["LDAP_BASEDN"], f"(&(objectClass=person)(samaccountname={name}))", search_scope=SUBTREE, get_operational_attributes=True, attributes=["cn", "memberOf", "mail"], ) json_response = loads( connection.response_to_json())["entries"][0] if json_response: user = { "name": name, "password": password, "email": json_response["attributes"].get("mail", ""), } if any(group in s for group in app.config["LDAP_ADMIN_GROUP"] for s in json_response["attributes"]["memberOf"]): user["permissions"] = ["Admin"] new_user = factory("User", **user) login_user(new_user) return redirect(url_for("base_blueprint.dashboard")) elif request.form["authentication_method"] == "TACACS": if tacacs_client.authenticate(name, password).valid: user = factory("User", **{ "name": name, "password": password }) login_user(user) return redirect(url_for("base_blueprint.dashboard")) abort(403) except Exception as e: info(f"Authentication failed ({str(e)})") abort(403) if not current_user.is_authenticated: login_form = LoginForm(request.form) authentication_methods = [("Local User", ) * 2] if USE_LDAP: authentication_methods.append(("LDAP Domain", ) * 2) if USE_TACACS: authentication_methods.append(("TACACS", ) * 2) login_form.authentication_method.choices = authentication_methods return render_template("login.html", login_form=login_form) return redirect(url_for("base_blueprint.dashboard"))
def request_loader(request: Request) -> User: return fetch("User", name=request.form.get("name"))
def user_loader(id: int) -> User: return fetch("User", id=id)