def create_default_workflows(): name = 'Configuration Management Workflow' workflow = factory( 'Workflow', **{ 'name': name, 'description': 'Poll configuration and push to gitlab', 'use_workflow_targets': False, 'creator': fetch('User', name='admin').id }) workflow.jobs.extend([ fetch('Service', name='configuration_backup'), fetch('Service', name='git_push_configurations') ]) edges = [(0, 2, True), (2, 3, True), (2, 3, False), (3, 1, True)] for x, y, edge_type in edges: factory( 'WorkflowEdge', **{ 'name': f'{workflow.name} {x} -> {y} ({edge_type})', 'workflow': workflow.id, 'subtype': 'success' if edge_type else 'failure', 'source': workflow.jobs[x].id, 'destination': workflow.jobs[y].id }) positions = [(-30, 0), (20, 0), (0, -20), (0, 30)] for index, (x, y) in enumerate(positions): workflow.jobs[index].positions[name] = x * 10, y * 10
def __init__(self, **kwargs): default = [ fetch('Service', name='Start'), fetch('Service', name='End') ] self.jobs.extend(default) super().__init__(**kwargs)
def post(self): payload = request.get_json() job = fetch('Job', name=payload['name']) handle_asynchronously = payload.get('async', True) try: targets = { fetch('Device', name=device_name) for device_name in payload.get('devices', '') } | { fetch('Device', ip_address=ip_address) for ip_address in payload.get('ip_addresses', '') } for pool_name in payload.get('pools', ''): targets |= {d for d in fetch('Pool', name=pool_name).devices} except Exception as e: info(f'REST API run_job endpoint failed ({str(e)})') return str(e) if handle_asynchronously: scheduler.add_job(id=str(datetime.now()), func=scheduler_job, run_date=datetime.now(), args=[job.id, None, [d.id for d in targets]], trigger='date') return job.serialized else: return job.try_run(targets=targets)[0]
def post(self) -> Union[str, dict]: payload = request.get_json() job = fetch("Job", name=payload["name"]) handle_asynchronously = payload.get("async", False) try: targets = { fetch("Device", name=device_name) for device_name in payload.get("devices", "") } | { fetch("Device", ip_address=ip_address) for ip_address in payload.get("ip_addresses", "") } for pool_name in payload.get("pools", ""): targets |= {d for d in fetch("Pool", name=pool_name).devices} except Exception as e: info(f"REST API run_job endpoint failed ({str(e)})") return str(e) if handle_asynchronously: scheduler.add_job( id=str(datetime.now()), func=scheduler_job, run_date=datetime.now(), args=[job.id, None, [d.id for d in targets]], trigger="date", ) return job.serialized else: return job.try_run(targets=targets)[0]
def create_workflow_of_workflows(): admin = fetch('User', name='admin').id devices = [fetch('Device', name='Washington').id] workflow = factory( 'Workflow', **{ 'name': 'Workflow_of_workflows', 'description': 'Test the inner workflow system', 'devices': devices, 'creator': admin, 'vendor': 'Arista', 'operating_system': 'eos' }) workflow.jobs.extend([ fetch('Job', name='payload_transfer_workflow'), fetch('Job', name='get_interfaces'), fetch('Job', name='Napalm_VRF_workflow') ]) edges = [(0, 2), (2, 3), (3, 4), (4, 1)] for x, y in edges: factory( 'WorkflowEdge', **{ 'name': f'{workflow.name} {x} -> {y}', 'workflow': workflow.id, 'subtype': 'success', 'devices': devices, 'source': workflow.jobs[x].id, 'destination': workflow.jobs[y].id }) positions = [(-30, 0), (30, 0), (0, -20), (0, 0), (0, 20)] for index, (x, y) in enumerate(positions): workflow.jobs[index].positions[ 'Workflow_of_workflows'] = x * 10, y * 10
def __init__(self, **kwargs): end = fetch('Service', name='End') default = [fetch('Service', name='Start'), end] self.jobs.extend(default) super().__init__(**kwargs) if self.name not in end.positions: end.positions[self.name] = (500, 0)
def create_default_workflows() -> None: name = "Configuration Management Workflow" workflow = factory( "Workflow", **{ "name": name, "description": "Poll configuration and push to gitlab", "use_workflow_targets": False, "creator": fetch("User", name="admin").id, }, ) workflow.jobs.extend([ fetch("Service", name="poller_service"), fetch("Service", name="git_push_configurations"), ]) edges = [(0, 2, True), (2, 3, True), (2, 3, False), (3, 1, True)] for x, y, edge_type in edges: factory( "WorkflowEdge", **{ "name": f"{workflow.name} {x} -> {y} ({edge_type})", "workflow": workflow.id, "subtype": "success" if edge_type else "failure", "source": workflow.jobs[x].id, "destination": workflow.jobs[y].id, }, ) positions = [(-30, 0), (20, 0), (0, -20), (0, 30)] for index, (x, y) in enumerate(positions): workflow.jobs[index].positions[name] = x * 10, y * 10
def scheduler_job(job_id, aps_job_id=None, targets=None): with scheduler.app.app_context(): job = fetch('Job', id=job_id) if targets: targets = [ fetch('Device', id=device_id) for device_id in targets ] results, now = job.try_run(targets=targets) task = fetch('Task', creation_time=aps_job_id) if task and not task.frequency: task.status = 'Completed' parameters = get_one('Parameters') if job.push_to_git and parameters.git_repository_automation: path_git_folder = Path.cwd() / 'git' / 'automation' with open(path_git_folder / job.name, 'w') as file: file.write(str_dict(results)) repo = Repo(str(path_git_folder)) try: repo.git.add(A=True) repo.git.commit(m=f'Automatic commit ({job.name})') except GitCommandError: pass repo.remotes.origin.push() db.session.commit()
def create_payload_transfer_workflow(): services = [] for service in [{ 'name': 'GET_Washington', 'type': service_classes['rest_call_service'], 'description': 'Use GET ReST call on Washington', 'username': '******', 'password': '******', 'waiting_time': 0, 'devices': [fetch(Device, name='Washington')], 'content_match': '', 'call_type': 'GET', 'url': 'http://127.0.0.1:5000/rest/object/device/Washington', 'payload': '' }] + [{ 'name': f'{getter}', 'type': service_classes['napalm_getters_service'], 'description': f'Getter: {getter}', 'waiting_time': 0, 'devices': [fetch(Device, name='Washington')], 'driver': 'eos', 'content_match': '', 'getters': [getter] } for getter in ('get_facts', 'get_interfaces', 'get_interfaces_ip', 'get_config') ] + [{ 'name': 'process_payload1', 'type': service_classes['swiss_army_knife_service'], 'description': 'Process Payload in example workflow', 'waiting_time': 0, 'devices': [fetch(Device, name='Washington')] }]: instance = factory(service.pop('type'), **service) services.append(instance) workflow = factory( Workflow, **{ 'name': 'payload_transfer_workflow', 'description': 'ReST call, Napalm getters, etc', 'vendor': 'Arista', 'operating_system': 'eos' }) workflow.jobs.extend(services) # create workflow edges with following schema: positions = [(-20, 0), (50, 0), (-5, 0), (-5, -10), (15, 10), (15, -10), (30, -10), (30, 0)] for index, (x, y) in enumerate(positions): job = workflow.jobs[index] job.positions['payload_transfer_workflow'] = x * 10, y * 10 edges = [(0, 2), (2, 3), (2, 4), (3, 5), (5, 6), (6, 7), (4, 7), (7, 1)] for x, y in edges: factory( WorkflowEdge, **{ 'name': f'{workflow.name} {x} -> {y}', 'workflow': workflow, 'type': True, 'source': workflow.jobs[x], 'destination': workflow.jobs[y] })
def save_pool_objects(pool_id): pool = fetch(Pool, id=pool_id) pool.devices = [ fetch(Device, id=id) for id in request.form.getlist('devices') ] pool.links = [fetch(Link, id=id) for id in request.form.getlist('links')] db.session.commit() return jsonify(pool.name)
def update_pools(pool): if pool == 'all': for pool in fetch_all('Pool'): pool.compute_pool() else: fetch('Pool', id=pool).compute_pool() db.session.commit() return True
def save_positions(workflow_id): workflow = fetch(Workflow, id=workflow_id) session['workflow'] = workflow.serialized for job_id, position in request.json.items(): job = fetch(Job, id=job_id) job.positions[workflow.name] = (position['x'], position['y']) db.session.commit() return jsonify({'success': True})
def update_pools(pool_id: str) -> bool: if pool_id == "all": for pool in fetch_all("Pool"): pool.compute_pool() else: fetch("Pool", id=int(pool_id)).compute_pool() db.session.commit() return True
def save_positions(workflow_id): workflow = fetch('Workflow', id=workflow_id) session['workflow'] = workflow.id for job_id, position in request.json.items(): job = fetch('Job', id=job_id) job.positions[workflow.name] = (position['x'], position['y']) db.session.commit() return True
def save_positions(workflow_id: int) -> bool: workflow = fetch("Workflow", id=workflow_id) session["workflow"] = workflow.id for job_id, position in request.json.items(): job = fetch("Job", id=job_id) job.positions[workflow.name] = (position["x"], position["y"]) db.session.commit() return True
def update(self, **kwargs): source = fetch('Device', name=kwargs['source_name']).id destination = fetch('Device', name=kwargs['destination_name']).id kwargs.update({ 'source_id': source, 'destination_id': destination, 'source': source, 'destination': destination }) super().update(**kwargs)
def add_edge(wf_id, type, source, dest): workflow_edge = factory( WorkflowEdge, **{ 'name': f'{wf_id}-{type}:{source}->{dest}', 'workflow': fetch(Workflow, id=wf_id), 'type': type == 'true', 'source': fetch(Job, id=source), 'destination': fetch(Job, id=dest) }) return jsonify(workflow_edge.serialized)
def update(self, **kwargs): if 'source_name' in kwargs: source = fetch('Device', name=kwargs.pop('source_name')) destination = fetch('Device', name=kwargs.pop('destination_name')) kwargs.update({ 'source_id': source.id, 'destination_id': destination.id, 'source': source.id, 'destination': destination.id }) super().update(**kwargs)
def notify(self, results, time): fetch('Job', name=self.send_notification_method).try_run({ 'job': self.serialized, 'logs': self.logs, 'runtime': time, 'result': self.build_notification(results, time) })
def create_napalm_workflow(): services, admin = [], fetch('User', name='admin').id devices = [ fetch('Device', name='Washington').id, fetch('Device', name='Austin').id ] for service in ({ 'type': 'NapalmConfigurationService', 'name': 'napalm_create_vrf_test', 'description': 'Create a VRF "test" with Napalm', 'waiting_time': 0, 'devices': devices, 'creator': admin, 'driver': 'eos', 'vendor': 'Arista', 'operating_system': 'eos', 'content_type': 'simple', 'action': 'load_merge_candidate', 'content': 'vrf definition test\n' }, { 'type': 'NapalmRollbackService', 'name': 'Napalm eos Rollback', 'driver': 'eos', 'description': 'Rollback a configuration with Napalm eos', 'devices': devices, 'creator': admin, 'waiting_time': 0 }): instance = factory(service.pop('type'), **service) services.append(instance) services.insert(1, fetch('Job', name='netmiko_check_vrf_test')) services.append(fetch('Job', name=f'netmiko_check_no_vrf_test')) workflow = factory( 'Workflow', **{ 'name': 'Napalm_VRF_workflow', 'description': 'Create and delete a VRF with Napalm', 'creator': admin, 'vendor': 'Arista', 'operating_system': 'eos' }) workflow.jobs.extend(services) edges = [(0, 2), (2, 3), (3, 4), (4, 5), (5, 1)] for x, y in edges: factory( 'WorkflowEdge', **{ 'name': f'{workflow.name} {x} -> {y}', 'workflow': workflow.id, 'subtype': True, 'source': workflow.jobs[x].id, 'destination': workflow.jobs[y].id }) positions = [(-20, 0), (20, 0), (0, -15), (0, -5), (0, 5), (0, 15)] for index, (x, y) in enumerate(positions): workflow.jobs[index].positions['Napalm_VRF_workflow'] = x * 10, y * 10
def edit_workflow(): form = dict(request.form.to_dict()) for property in boolean_properties: if property not in form: form[property] = 'off' form['devices'] = [ fetch(Device, id=id) for id in request.form.getlist('devices') ] form['pools'] = [ fetch(Pool, id=id) for id in request.form.getlist('pools') ] return jsonify(factory(Workflow, **form).serialized)
def factory(cls, **kwargs): if 'id' in kwargs: instance = fetch(cls, id=kwargs.pop('id')) else: instance = fetch(cls, name=kwargs['name']) if instance: instance.update(**kwargs) else: instance = cls(**kwargs) db.session.add(instance) db.session.commit() return instance
def test_pool_management(user_client): create_from_file(user_client, 'europe.xls') user_client.post('/update/pool', data=pool1) user_client.post('/update/pool', data=pool2) p1, p2 = fetch('Pool', name='pool1'), fetch('Pool', name='pool2') assert len(p1.devices) == 21 assert len(p1.links) == 20 assert len(p2.devices) == 12 assert len(p2.links) == 4 assert len(fetch_all('Pool')) == 5 user_client.post(f'/delete/pool/{p1.id}') user_client.post(f'/delete/pool/{p2.id}') assert len(fetch_all('Pool')) == 3
def test_pool_management(user_client): create_from_file(user_client, 'europe.xls') user_client.post('/objects/process_pool', data=pool1) user_client.post('/objects/process_pool', data=pool2) p1, p2 = fetch(Pool, name='pool1'), fetch(Pool, name='pool2') assert len(p1.devices) == 21 assert len(p1.links) == 20 assert len(p2.devices) == 12 assert len(p2.links) == 4 assert len(Pool.query.all()) == 5 user_client.post(f'/objects/delete_pool/{p1.id}') user_client.post(f'/objects/delete_pool/{p2.id}') assert len(Pool.query.all()) == 3
def test_pool_management(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") user_client.post("/update/pool", data=pool1) user_client.post("/update/pool", data=pool2) p1, p2 = fetch("Pool", name="pool1"), fetch("Pool", name="pool2") assert len(p1.devices) == 21 assert len(p1.links) == 20 assert len(p2.devices) == 12 assert len(p2.links) == 4 assert len(fetch_all("Pool")) == 5 user_client.post(f"/delete/pool/{p1.id}") user_client.post(f"/delete/pool/{p2.id}") assert len(fetch_all("Pool")) == 3
def create_example_services(): admin = fetch('User', name='admin').id for service in ({ 'type': 'ConfigureBgpService', 'name': 'napalm_configure_bgp_1', 'description': 'Configure BGP Peering with Napalm', 'devices': [fetch('Device', name='Washington').id], 'creator': admin, 'local_as': 100, 'loopback': 'Lo100', 'loopback_ip': '100.1.1.1', 'neighbor_ip': '100.1.2.1', 'remote_as': 200, 'vrf_name': 'configure_BGP_test', 'waiting_time': 0 }, { 'type': 'GenericFileTransferService', 'name': 'test_file_transfer_service', 'description': 'Test the file transfer service', 'devices': [fetch('Device', name='Aserver').id], 'creator': admin, 'direction': 'get', 'protocol': 'scp', 'source_file': '/media/sf_VM/eNMS/tests/file_transfer/a.bin', 'destination_file': '/media/sf_VM/eNMS/tests/file_transfer/b.bin', 'missing_host_key_policy': True }, { 'type': 'LogBackupService', 'name': 'test_log_backup_service', 'description': 'Test the log backup service', 'devices': [fetch('Device', name='Aserver').id], 'creator': admin, 'protocol': 'scp', 'destination_ip_address': '127.0.0.1', 'destination_path': '/media/sf_VM/eNMS/tests/file_transfer', 'delete_archive': True, 'delete_folder': True }, { 'type': 'DatabaseBackupService', 'name': 'test_database_backup_service', 'description': 'Test the log backup service', 'devices': [fetch('Device', name='Aserver').id], 'creator': admin, 'protocol': 'scp', 'destination_ip_address': '127.0.0.1', 'destination_path': '/media/sf_VM/eNMS/tests/file_transfer', 'delete_archive': True, 'delete_folder': True }): factory(service.pop('type'), **service)
def configure_poller(): parameters = get_one('Parameters') remote_git = request.form['remote_git_repository'] if parameters.git_repository_configurations != remote_git: Repo.clone_from(remote_git, app.path / 'git' / 'configurations') parameters.git_repository_configurations = remote_git service = fetch('Service', name='configuration_backup') task = fetch('Task', name='configuration_backup') service.devices = objectify('Device', request.form.get('devices', '')) service.pools = objectify('Pool', request.form.get('pools', '')) task.frequency = request.form['polling_frequency'] db.session.commit() task.reschedule() return True
def create_default_services(): admin = fetch('User', name='admin').id for service in ({ 'type': 'SwissArmyKnifeService', 'name': 'Start', 'description': 'Start point of a workflow', 'creator': admin, 'hidden': True }, { 'type': 'SwissArmyKnifeService', 'name': 'End', 'description': 'End point of a workflow', 'creator': admin, 'hidden': True }, { 'type': 'SwissArmyKnifeService', 'name': 'mail_feedback_notification', 'description': 'Mail notification (service logs)', 'creator': admin }, { 'type': 'SwissArmyKnifeService', 'name': 'slack_feedback_notification', 'description': 'Slack notification (service logs)', 'creator': admin }, { 'type': 'SwissArmyKnifeService', 'name': 'mattermost_feedback_notification', 'description': 'Mattermost notification (service logs)', 'creator': admin }, { 'type': 'SwissArmyKnifeService', 'name': 'cluster_monitoring', 'description': 'Monitor eNMS cluster', 'creator': admin }, { 'type': 'SwissArmyKnifeService', 'name': 'git_push_configurations', 'description': 'Push configurations to Gitlab', 'creator': admin }, { 'type': 'ConfigurationBackupService', 'name': 'configuration_backup', 'description': 'Back up device configurations', 'pools': [fetch('Pool', name='Devices only').id], 'multiprocessing': True, 'creator': admin }): factory(service.pop('type'), **service)
def connection(device_id: int) -> dict: parameters, device = get_one("Parameters"), fetch("Device", id=device_id) cmd = [str(app.path / "applications" / "gotty"), "-w"] port, protocol = parameters.get_gotty_port(), request.form["protocol"] address = getattr(device, request.form["address"]) cmd.extend(["-p", str(port)]) if "accept-once" in request.form: cmd.append("--once") if "multiplexing" in request.form: cmd.extend(f"tmux new -A -s gotty{port}".split()) if app.config["GOTTY_BYPASS_KEY_PROMPT"]: options = "-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" else: options = "" if protocol == "telnet": cmd.extend(f"telnet {address}".split()) elif "authentication" in request.form: if request.form["credentials"] == "device": login, pwd = device.username, device.password else: login, pwd = current_user.name, current_user.password cmd.extend(f"sshpass -p {pwd} ssh {options} {login}@{address}".split()) else: cmd.extend(f"ssh {options} {address}".split()) if protocol != "telnet": cmd.extend(f"-p {device.port}".split()) Popen(cmd) return { "device": device.name, "port": port, "redirection": app.config["GOTTY_PORT_REDIRECTION"], "server_addr": app.config["ENMS_SERVER_ADDR"], }
def get_diff(device_id: int, v1: str, v2: str) -> dict: device = fetch("Device", id=device_id) d1, d2 = [datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f") for d in (v1, v2)] first = device.configurations[d1].splitlines() second = device.configurations[d2].splitlines() opcodes = SequenceMatcher(None, first, second).get_opcodes() return {"first": first, "second": second, "opcodes": opcodes}