def test_link_deletion(user_client): create_from_file(user_client, "europe.xls") for link_name in links: link = db.fetch("link", name=link_name) user_client.post(f"/delete_instance/link/{link.id}") assert len(db.fetch_all("device")) == 33 assert len(db.fetch_all("link")) == 38
def verify_password(username, password): user = db.fetch("user", name=username, allow_none=True) if not user or not password: return False if app.authenticate_user(name=username, password=password): login_user(user) return True
def test_device_deletion(user_client): create_from_file(user_client, "europe.xls") for device_name in routers: device = db.fetch("device", name=device_name) user_client.post(f"/delete_instance/device/{device.id}") assert len(db.fetch_all("device")) == 18 assert len(db.fetch_all("link")) == 18
def get_runtimes(self, type, id): runs = db.fetch("run", allow_none=True, all_matches=True, service_id=id) return sorted( set((run.parent_runtime, run.parent_runtime) for run in runs))
def get_device_logs(self, device_id): device_logs = [ log.name for log in db.fetch_all("log") if log.source == db.fetch("device", id=device_id).ip_address ] return "\n".join(device_logs)
def verify_password(username, password): if not username or not password: return False user = db.fetch("user", name=username) hash = app.settings["security"]["hash_user_passwords"] verify = argon2.verify if hash else str.__eq__ return verify(password, app.get_password(user.password))
def run_service(self, path, **kwargs): path_ids = str(path).split(">") if kwargs.get("restart_from_top_level_workflow", False): kwargs[ "restart_path"] = f"{path}>{'-'.join(kwargs['start_services'])}" service_id = path_ids[0] else: service_id = path_ids[-1] for property in ("user", "csrf_token", "form_type"): kwargs.pop(property, None) kwargs["creator"] = getattr(current_user, "name", "") service = db.fetch("service", id=service_id) kwargs["runtime"] = runtime = self.get_time() if kwargs.get("asynchronous", True): self.scheduler.add_job( id=runtime, func=self.run, run_date=datetime.now(), args=[service_id], kwargs=kwargs, trigger="date", ) else: service.run(runtime=runtime) return { "service": service.serialized, "runtime": runtime, "user": current_user.name, }
def desktop_connection(self, id, **kwargs): device = db.fetch("device", id=id, rbac="connect") credentials = ((device.username, self.get_password(device.password)) if kwargs["credentials"] == "device" else (current_user.name, self.get_password(current_user.password)) if kwargs["credentials"] == "user" else (kwargs["username"], kwargs["password"])) uuid, port = str(uuid4()), self.get_ssh_port() session = db.factory( "session", name=uuid, user=current_user.name, timestamp=self.get_time(), device=device.id, ) db.session.commit() try: ssh_connection = SshConnection(device.ip_address, *credentials, session.id, uuid, port) Thread( target=ssh_connection.start_session, args=(session.id, uuid, port), ).start() return { "port": port, "username": uuid, "device_name": device.name, "device_ip": device.ip_address, } except Exception as exc: return {"error": exc.args}
def desktop_connection(self, id, **kwargs): if not self.settings["ssh"]["credentials"][kwargs["credentials"]]: return {"alert": "Unauthorized authentication method."} device = db.fetch("device", id=id, rbac="connect") uuid, port = str(uuid4()), self.get_ssh_port() session = db.factory( "session", name=uuid, user=current_user.name, timestamp=self.get_time(), device=device.id, ) db.session.commit() try: credentials = self.get_credentials(device, **kwargs) args = (session.id, uuid, port) ssh_connection = SshConnection(device.ip_address, credentials, *args) Thread(target=ssh_connection.start_session, args=args).start() return { "port": port, "username": uuid, "device_name": device.name, "device_ip": device.ip_address, } except Exception as exc: return {"error": exc.args}
def get_workflow_tree(self, full_path): def rec(service, path=""): path += ">" * bool(path) + str(service.id) if service.scoped_name in ("Start", "End", "Placeholder"): return return { "data": {"path": path, **service.base_properties}, "id": service.id, "state": {"opened": full_path.startswith(path)}, "text": service.scoped_name, "children": sorted( filter(None, [rec(child, path) for child in service.services]), key=lambda node: node["text"].lower(), ) if service.type == "workflow" else False, "a_attr": { "class": "no_checkbox", "style": ( f"color: #{'FF1694' if service.shared else '6666FF'};" "width: 100%" ), }, "type": service.type, } return rec(db.fetch("workflow", id=full_path.split(">")[0]))
def authenticate_user(self, **kwargs): name, password = kwargs["name"], kwargs["password"] if not name or not password: return False user = db.fetch("user", allow_none=True, name=name) default_method = self.settings["authentication"]["default"] user_method = getattr(user, "authentication", default_method) method = kwargs.get("authentication_method", user_method) if method not in self.settings["authentication"]["methods"]: return False elif method == "database": hash = self.settings["security"]["hash_user_passwords"] verify = argon2.verify if hash else str.__eq__ user_password = self.get_password(user.password) success = user and user_password and verify( password, user_password) return user if success else False else: response = getattr(self, f"{method}_authentication")(user, name, password) if not response: return False elif not user: user = db.factory("user", authentication=method, **response) db.session.commit() return user
def update_database_configurations_from_git(self): for dir in scandir(self.path / "network_data"): device = db.fetch("device", allow_none=True, name=dir.name) timestamp_path = Path(dir.path) / "timestamps.json" if not device: continue try: with open(timestamp_path) as file: timestamps = load(file) except Exception: timestamps = {} for property in self.configuration_properties: for timestamp, value in timestamps.get(property, {}).items(): setattr(device, f"last_{property}_{timestamp}", value) filepath = Path(dir.path) / property if not filepath.exists(): continue with open(filepath) as file: setattr(device, property, file.read()) db.session.commit() for pool in db.fetch_all("pool"): if any( getattr(pool, f"device_{property}") for property in self.configuration_properties): pool.compute_pool()
def get_service_state(self, path, runtime=None): service_id, state = path.split(">")[-1], None service = db.fetch("service", id=service_id, allow_none=True) if not service: raise db.rbac_error runs = db.fetch_all("run", service_id=service_id) if not runtime: runtime = "latest" if runs and runtime != "normal": if runtime == "latest": runtime = runs[-1].parent_runtime latest_runs = [r for r in runs if r.parent_runtime == runtime] if latest_runs: state = latest_runs[0].get_state() return { "service": service.to_dict(include=["services", "edges", "superworkflow"]), "runtimes": sorted( set((run.parent_runtime, f"{run.parent_runtime} ({run.creator})") for run in runs), reverse=True, ), "state": state, "runtime": runtime, }
def copy_service_in_workflow(self, workflow_id, **kwargs): service_sets = list(set(kwargs["services"].split(","))) service_instances = db.objectify("service", service_sets) workflow = db.fetch("workflow", id=workflow_id) services, errors = [], [] if kwargs["mode"] == "shallow": for service in service_instances: if not service.shared: errors.append(f"'{service.name}' is not a shared service.") elif service in workflow.services: errors.append(f"This workflow already contains '{service.name}'.") if errors: return {"alert": errors} for service in service_instances: if kwargs["mode"] == "deep": service = service.duplicate(workflow) else: workflow.services.append(service) services.append(service) workflow.last_modified = self.get_time() db.session.commit() return { "services": [service.serialized for service in services], "update_time": workflow.last_modified, }
def compare(self, type, device_name, v1, v2): if type in ("result", "device_result"): first = self.str_dict(getattr(db.fetch("result", id=v1), "result")) second = self.str_dict(getattr(db.fetch("result", id=v2), "result")) else: first = self.get_git_network_data(device_name, v1)[type] second = self.get_git_network_data(device_name, v2)[type] return "\n".join( unified_diff( first.splitlines(), second.splitlines(), fromfile="-", tofile="-", lineterm="", ))
def reset_run_status(self): for run in db.fetch("run", all_matches=True, allow_none=True, status="Running"): run.status = "Aborted (RELOAD)" db.session.commit()
def get(self, model): properties = request.args.to_dict() results = db.fetch(model, all_matches=True, **properties) return [ result.get_properties(exclude=["positions"]) for result in results ]
def get(self, cls): results = db.fetch(cls, all_matches=True, **request.args.to_dict()) return [ result.get_properties(exclude=["positions"]) for result in results ]
def post(self): data = { "trigger": "REST", "creator": request.authorization["username"], **request.get_json(force=True), } errors, devices, pools = [], [], [] service = db.fetch("service", name=data["name"]) handle_asynchronously = data.get("async", False) for device_name in data.get("devices", ""): device = db.fetch("device", name=device_name) if device: devices.append(device.id) else: errors.append( f"No device with the name '{device_name}'") for device_ip in data.get("ip_addresses", ""): device = db.fetch("device", ip_address=device_ip) if device: devices.append(device.id) else: errors.append( f"No device with the IP address '{device_ip}'") for pool_name in data.get("pools", ""): pool = db.fetch("pool", name=pool_name) if pool: pools.append(pool.id) else: errors.append(f"No pool with the name '{pool_name}'") if errors: return {"errors": errors} if devices or pools: data.update({"devices": devices, "pools": pools}) data["runtime"] = runtime = app.get_time() if handle_asynchronously: app.scheduler.add_job( id=runtime, func=app.run, run_date=datetime.now(), args=[service.id], kwargs=data, trigger="date", ) return {"errors": errors, "runtime": runtime} else: return {**app.run(service.id, **data), "errors": errors}
def save_positions(self, workflow_id, **kwargs): now, old_position = self.get_time(), None workflow = db.fetch("workflow", allow_none=True, id=workflow_id, rbac="edit") if not workflow: return for id, position in kwargs.items(): new_position = [position["x"], position["y"]] if "-" not in id: service = db.fetch("service", id=id, rbac="edit") old_position = service.positions.get(workflow.name) service.positions[workflow.name] = new_position elif id in workflow.labels: old_position = workflow.labels[id].pop("positions") workflow.labels[id] = {"positions": new_position, **workflow.labels[id]} if new_position != old_position: workflow.last_modified = now return now
def stop_workflow(self, runtime): run = db.fetch("run", allow_none=True, runtime=runtime) if run and run.status == "Running": if self.redis_queue: self.redis("set", f"stop/{runtime}", "true") else: self.run_stop[run.parent_runtime] = True return True
def create_label(self, workflow_id, x, y, **kwargs): workflow, label_id = db.fetch("workflow", id=workflow_id), str(uuid4()) label = { "positions": [x, y], "content": kwargs["text"], "alignment": kwargs["alignment"], } workflow.labels[label_id] = label return {"id": label_id, **label}
def add_instances_in_bulk(self, **kwargs): target = db.fetch(kwargs["relation_type"], id=kwargs["relation_id"]) if target.type == "pool" and not target.manually_defined: return {"alert": "Adding objects to a dynamic pool is not allowed."} model, property = kwargs["model"], kwargs["property"] instances = set(db.objectify(model, kwargs["instances"])) if kwargs["names"]: for name in [instance.strip() for instance in kwargs["names"].split(",")]: instance = db.fetch(model, allow_none=True, name=name) if not instance: return {"alert": f"{model.capitalize()} '{name}' does not exist."} instances.add(instance) instances = instances - set(getattr(target, property)) for instance in instances: getattr(target, property).append(instance) target.last_modified = self.get_time() self.update_rbac(*instances) return {"number": len(instances), "target": target.base_properties}
def handle(self): address = self.client_address[0] device = db.fetch("device", allow_none=True, ip_address=address) properties = { "source": device.name if device else address, "content": str(bytes.decode(self.request[0].strip())), } for event in db.fetch_all("event"): event.match_log(**properties)
def post(self): data = request.get_json(force=True) db.factory( "pool", **{ "name": data["name"], "devices": [ db.fetch("device", name=name).id for name in data.get("devices", "") ], "links": [ db.fetch("link", name=name).id for name in data.get("links", "") ], "manually_defined": True, }, ) return data
def update(self, type, **kwargs): try: must_be_new = kwargs.get("id") == "" for arg in ("name", "scoped_name"): if arg in kwargs: kwargs[arg] = kwargs[arg].strip() kwargs["last_modified"] = self.get_time() kwargs["creator"] = kwargs["user"] = getattr(current_user, "name", "") instance = db.factory(type, must_be_new=must_be_new, **kwargs) if kwargs.get("original"): db.fetch(type, id=kwargs["original"]).duplicate(clone=instance) db.session.flush() return instance.serialized except Exception as exc: db.session.rollback() if isinstance(exc, IntegrityError): return {"alert": f"There is already a {type} with the same parameters."} return {"alert": str(exc)}
def get_service_logs(self, service, runtime): log_instance = db.fetch( "service_log", allow_none=True, runtime=runtime, service_id=service ) if log_instance: log = log_instance.content else: log = "\n".join(self.log_queue(runtime, service, mode="get") or []) return {"logs": log, "refresh": not log_instance}
def compare(self, type, id, v1, v2, context_lines): if type in ("result", "device_result"): first = self.str_dict(getattr(db.fetch("result", id=v1), "result")) second = self.str_dict(getattr(db.fetch("result", id=v2), "result")) else: device = db.fetch("device", id=id) result1, v1 = self.get_git_network_data(device.name, v1) result2, v2 = self.get_git_network_data(device.name, v2) first, second = result1[type], result2[type] return "\n".join( unified_diff( first.splitlines(), second.splitlines(), fromfile=f"V1 ({v1})", tofile=f"V2 ({v2})", lineterm="", n=int(context_lines), ) )
def post(self): data = { "trigger": "REST", "creator": request.authorization["username"], **request.get_json(force=True), } errors, devices, pools = [], [], [] service = db.fetch("service", name=data["name"], rbac="run") handle_asynchronously = data.get("async", False) for device_name in data.get("devices", ""): device = db.fetch("device", name=device_name) if device: devices.append(device.id) else: errors.append( f"No device with the name '{device_name}'") for device_ip in data.get("ip_addresses", ""): device = db.fetch("device", ip_address=device_ip) if device: devices.append(device.id) else: errors.append( f"No device with the IP address '{device_ip}'") for pool_name in data.get("pools", ""): pool = db.fetch("pool", name=pool_name) if pool: pools.append(pool.id) else: errors.append(f"No pool with the name '{pool_name}'") if errors: return {"errors": errors} if devices or pools: data.update({ "target_devices": devices, "target_pools": pools }) data["runtime"] = runtime = app.get_time() if handle_asynchronously: Thread(target=app.run, args=(service.id, ), kwargs=data).start() return {"errors": errors, "runtime": runtime} else: return {**app.run(service.id, **data), "errors": errors}
def create_label(self, workflow_id, x, y, label_id, **kwargs): workflow = db.fetch("workflow", id=workflow_id, rbac="edit") label_id = str(uuid4()) if label_id == "undefined" else label_id label = { "positions": [x, y], "content": kwargs["text"], "alignment": kwargs["alignment"], } workflow.labels[label_id] = label return {"id": label_id, **label}