예제 #1
0
 def query_opennms(self, **kwargs: str) -> None:
     login, password = self.opennms_login, kwargs["password"]
     Session.commit()
     json_devices = http_get(
         self.opennms_devices,
         headers={"Accept": "application/json"},
         auth=(login, password),
     ).json()["node"]
     devices = {
         device["id"]: {
             "name": device.get("label", device["id"]),
             "description": device["assetRecord"].get("description", ""),
             "location": device["assetRecord"].get("building", ""),
             "vendor": device["assetRecord"].get("manufacturer", ""),
             "model": device["assetRecord"].get("modelNumber", ""),
             "operating_system": device.get("operatingSystem", ""),
             "os_version": device["assetRecord"].get("sysDescription", ""),
             "longitude": device["assetRecord"].get("longitude", 0.0),
             "latitude": device["assetRecord"].get("latitude", 0.0),
         }
         for device in json_devices
     }
     for device in list(devices):
         link = http_get(
             f"{self.opennms_rest_api}/nodes/{device}/ipinterfaces",
             headers={"Accept": "application/json"},
             auth=(login, password),
         ).json()
         for interface in link["ipInterface"]:
             if interface["snmpPrimary"] == "P":
                 devices[device]["ip_address"] = interface["ipAddress"]
                 factory("Device", **devices[device])
예제 #2
0
 def import_jobs(self, **kwargs: Any) -> None:
     jobs = kwargs["jobs_to_import"]
     path = self.path / "projects" / "exported_jobs"
     for file in scandir(path / "services"):
         if file.name == ".gitkeep" or file.name not in jobs:
             continue
         with open(file.path, "r") as instance_file:
             instance = yaml.load(instance_file)
             model = instance.pop("type")
             factory(model, **self.objectify(model, instance))
     Session.commit()
     for workflow in listdir(path / "workflows"):
         if workflow == ".gitkeep" or workflow not in jobs:
             continue
         workflow_name = workflow.split(".")[0]
         with open_tar(path / "workflows" / workflow) as tar_file:
             tar_file.extractall(path=path / "workflows")
         for instance_type in ("jobs", "workflow", "edges"):
             path_job = path / "workflows" / workflow_name / instance_type
             for file in scandir(path_job):
                 with open(path_job / file.name, "r") as instance_file:
                     instance = yaml.load(instance_file)
                     model = instance.pop("type")
                     factory(model, **self.objectify(model, instance))
             Session.commit()
         rmtree(path / "workflows" / workflow_name)
예제 #3
0
 def migration_import(self, folder="migrations", **kwargs):
     status, models = "Import successful.", kwargs["import_export_types"]
     skip_update_pools_after_import = kwargs.get(
         "skip_update_pools_after_import", False
     )
     if kwargs.get("empty_database_before_import", False):
         for model in models:
             delete_all(model)
             Session.commit()
     workflow_edges, workflow_services = [], {}
     folder_path = self.path / "files" / folder / kwargs["name"]
     for model in models:
         path = folder_path / f"{model}.yaml"
         if not path.exists():
             continue
         with open(path, "r") as migration_file:
             instances = yaml.load(migration_file)
             if model == "workflow_edge":
                 workflow_edges = deepcopy(instances)
                 continue
             for instance in instances:
                 instance_type = (
                     instance.pop("type") if model == "service" else model
                 )
                 if instance_type == "workflow":
                     workflow_services[instance["name"]] = instance.pop("services")
                 try:
                     instance = self.objectify(instance_type, instance)
                     factory(
                         instance_type, **{"dont_update_pools": True, **instance}
                     )
                     Session.commit()
                 except Exception:
                     info(
                         f"{str(instance)} could not be imported :"
                         f"{chr(10).join(format_exc().splitlines())}"
                     )
                     status = "Partial import (see logs)."
     try:
         for name, services in workflow_services.items():
             workflow = fetch("workflow", name=name)
             workflow.services = [
                 fetch("service", name=service_name) for service_name in services
             ]
         Session.commit()
         for edge in workflow_edges:
             for property in ("source", "destination", "workflow"):
                 edge[property] = fetch("service", name=edge[property]).id
             factory("workflow_edge", **edge)
             Session.commit()
         for service in fetch_all("service"):
             service.set_name()
         if not skip_update_pools_after_import:
             for pool in fetch_all("pool"):
                 pool.compute_pool()
         self.log("info", status)
     except Exception:
         info(chr(10).join(format_exc().splitlines()))
         status = "Partial import (see logs)."
     return status
예제 #4
0
 def topology_import(self, file):
     book = open_workbook(file_contents=file.read())
     status = "Topology successfully imported."
     for obj_type in ("device", "link"):
         try:
             sheet = book.sheet_by_name(obj_type)
         except XLRDError:
             continue
         properties = sheet.row_values(0)
         for row_index in range(1, sheet.nrows):
             values = {"dont_update_pools": True}
             for index, property in enumerate(properties):
                 if not property:
                     continue
                 func = field_conversion[property_types.get(
                     property, "str")]
                 values[property] = func(sheet.row_values(row_index)[index])
             try:
                 factory(obj_type, **values).serialized
             except Exception as exc:
                 info(f"{str(values)} could not be imported ({str(exc)})")
                 status = "Partial import (see logs)."
         Session.commit()
     for pool in fetch_all("pool"):
         pool.compute_pool()
     self.log("info", status)
     return status
예제 #5
0
 def duplicate_workflow(self, workflow_id, **kwargs):
     parent_workflow = fetch("workflow", id=workflow_id)
     new_workflow = factory("workflow", **kwargs)
     Session.commit()
     for service in parent_workflow.services:
         new_workflow.services.append(service)
         service.positions[new_workflow.name] = service.positions[
             parent_workflow.name]
     Session.commit()
     for edge in parent_workflow.edges:
         subtype, src, destination = edge.subtype, edge.source, edge.destination
         new_workflow.edges.append(
             factory(
                 "workflow_edge",
                 **{
                     "name": (f"{new_workflow.id}-{subtype}:"
                              f"{src.id}->{destination.id}"),
                     "workflow":
                     new_workflow.id,
                     "subtype":
                     subtype,
                     "source":
                     src.id,
                     "destination":
                     destination.id,
                 },
             ))
     return new_workflow.serialized
예제 #6
0
 def duplicate_workflow(self, workflow_id: int, **kwargs: Any) -> dict:
     parent_workflow = fetch("Workflow", id=workflow_id)
     new_workflow = factory("Workflow", **kwargs)
     Session.commit()
     for job in parent_workflow.jobs:
         new_workflow.jobs.append(job)
         job.positions[new_workflow.name] = job.positions[
             parent_workflow.name]
     Session.commit()
     for edge in parent_workflow.edges:
         subtype, src, destination = edge.subtype, edge.source, edge.destination
         new_workflow.edges.append(
             factory(
                 "WorkflowEdge",
                 **{
                     "name": (f"{new_workflow.id}-{subtype}:"
                              f"{src.id}->{destination.id}"),
                     "workflow":
                     new_workflow.id,
                     "subtype":
                     subtype,
                     "source":
                     src.id,
                     "destination":
                     destination.id,
                 },
             ))
     return new_workflow.serialized
예제 #7
0
 def create_result(self,
                   results: dict,
                   device: Optional["Device"] = None) -> None:
     self.success = results["success"]
     result_kw = {"run": self, "result": results}
     if device:
         result_kw["device"] = device.id
     factory("Result", **result_kw)
예제 #8
0
 def configure_server_id(self):
     factory(
         "server",
         **{
             "name": str(getnode()),
             "description": "Localhost",
             "ip_address": "0.0.0.0",
             "status": "Up",
         },
     )
예제 #9
0
 def log(self, severity, content):
     factory(
         "changelog",
         **{
             "severity": severity,
             "content": content,
             "user": getattr(current_user, "name", "admin"),
         },
     )
     self.log_severity[severity](content)
예제 #10
0
 def cluster_monitoring(self, run: "Run", payload: dict) -> dict:
     protocol = controller.cluster_scan_protocol
     for instance in fetch_all("Instance"):
         factory(
             "Instance",
             **get(
                 f"{protocol}://{instance.ip_address}/rest/is_alive",
                 timeout=controller.cluster_scan_timeout,
             ).json(),
         )
     return {"success": True}
예제 #11
0
 def cluster_monitoring(self, run, payload):
     protocol = app.settings["cluster"]["scan_protocol"]
     for instance in fetch_all("instance"):
         factory(
             "instance",
             **get(
                 f"{protocol}://{instance.ip_address}/rest/is_alive",
                 timeout=app.settings["cluster"]["scan_timeout"],
             ).json(),
         )
     return {"success": True}
예제 #12
0
 def scan_cluster(self, **kwargs: Union[float, str]) -> None:
     for ip_address in IPv4Network(self.cluster_scan_subnet):
         try:
             server = http_get(
                 f"{self.cluster_scan_protocol}://{ip_address}/rest/is_alive",
                 timeout=self.cluster_scan_timeout,
             ).json()
             if self.cluster_id != server.pop("cluster_id"):
                 continue
             factory("Server", **{**server, **{"ip_address": str(ip_address)}})
         except ConnectionError:
             continue
예제 #13
0
 def scan_cluster(self, **kwargs):
     protocol = self.config["cluster"]["scan_protocol"]
     for ip_address in IPv4Network(self.config["cluster"]["scan_subnet"]):
         try:
             server = http_get(
                 f"{protocol}://{ip_address}/rest/is_alive",
                 timeout=self.config["cluster"]["scan_timeout"],
             ).json()
             if self.config["cluster"]["id"] != server.pop("cluster_id"):
                 continue
             factory("server", **{**server, **{"ip_address": str(ip_address)}})
         except ConnectionError:
             continue
예제 #14
0
 def create_result(self, results, device=None):
     self.success = results["success"]
     result_kw = {
         "run": self,
         "result": results,
         "service": self.service_id
     }
     if self.service.type == "workflow":
         result_kw["workflow"] = self.service_id
     elif self.workflow_id:
         result_kw["workflow"] = self.workflow_id
     if device:
         result_kw["device"] = device.id
     factory("result", **result_kw)
예제 #15
0
파일: automation.py 프로젝트: mww012/eNMS
 def create_result(self, results, device=None):
     self.success = results["success"]
     result_kw = {
         "run": self,
         "result": results,
         "service": self.service_id,
         "parent_runtime": self.parent_runtime,
     }
     if self.workflow_id:
         result_kw["workflow"] = self.workflow_id
     if self.parent_device_id:
         result_kw["parent_device"] = self.parent_device_id
     if device:
         result_kw["device"] = device.id
     factory("result", **result_kw)
예제 #16
0
 def device_iteration(self, payload, device):
     derived_devices = self.compute_devices_from_query(
         self.iteration_devices, self.iteration_devices_property,
         **locals())
     derived_run = factory(
         "run",
         **{
             "service":
             self.service.id,
             "devices":
             [derived_device.id for derived_device in derived_devices],
             "workflow":
             self.workflow.id,
             "parent_device":
             device.id,
             "restart_run":
             self.restart_run,
             "parent":
             self,
             "parent_runtime":
             self.parent_runtime,
         },
     )
     derived_run.properties = self.properties
     return derived_run.run(payload)["success"]
예제 #17
0
 def duplicate(self, workflow=None, clone=None):
     if not clone:
         clone = super().duplicate(workflow)
     clone_services = {}
     Session.commit()
     for service in self.services:
         if service.shared:
             service_clone = service
             if service not in clone.services:
                 clone.services.append(service)
         else:
             service_clone = service.duplicate(clone)
         service_clone.positions[clone.name] = service.positions.get(
             self.name, (0, 0))
         clone_services[service.id] = service_clone
     Session.commit()
     for edge in self.edges:
         clone.edges.append(
             factory(
                 "workflow_edge",
                 **{
                     "workflow": clone.id,
                     "subtype": edge.subtype,
                     "source": clone_services[edge.source.id].id,
                     "destination": clone_services[edge.destination.id].id,
                 },
             ))
         Session.commit()
     return clone
예제 #18
0
 def handoffssh(self, id, **kwargs):
     device = fetch("device", id=id)
     credentials = (
         (device.username,
          device.password) if kwargs["credentials"] == "device" else
         (current_user.name,
          current_user.password) if kwargs["credentials"] == "user" else
         (kwargs["username"], kwargs["password"]))
     uuid, port = str(uuid4()), self.get_ssh_port()
     session = factory(
         "session",
         name=uuid,
         user=current_user.name,
         timestamp=self.get_time(),
         device=device.id,
     )
     Session.commit()
     try:
         ssh_connection = SshConnection(device.ip_address, *credentials,
                                        session.id, uuid, port)
         Thread(
             target=ssh_connection.start_session,
             args=(session.id, uuid, port),
         ).start()
         return {
             "port": port,
             "username": uuid,
             "device_name": device.name,
             "device_ip": device.ip_address,
         }
     except Exception as exc:
         return {"error": exc.args}
예제 #19
0
파일: automation.py 프로젝트: mww012/eNMS
 def run(service, **kwargs):
     run_kwargs = {
         key: kwargs.pop(key)
         for key in (
             "creator",
             "restart_path",
             "start_services",
             "runtime",
             "task",
             "devices",
             "pools",
         ) if kwargs.get(key)
     }
     restart_run = fetch(
         "run",
         allow_none=True,
         runtime=kwargs.get("restart_runtime"),
     )
     if restart_run:
         run_kwargs["restart_run"] = restart_run
     initial_payload = fetch("service", id=service).initial_payload
     run = factory("run", service=service, **run_kwargs)
     run.properties = kwargs
     payload = {**initial_payload, **kwargs}
     return run.run(payload)
예제 #20
0
파일: automation.py 프로젝트: mww012/eNMS
 def device_iteration(self, payload, device):
     derived_devices = self.compute_devices_from_query(
         self.service.iteration_devices,
         self.service.iteration_devices_property,
         **locals(),
     )
     derived_run = factory(
         "run",
         **{
             "service":
             self.service.id,
             "devices":
             [derived_device.id for derived_device in derived_devices],
             "workflow":
             self.workflow.id,
             "parent_device":
             device.id,
             "restart_run":
             self.restart_run,
             "parent":
             self,
             "parent_runtime":
             self.parent_runtime,
         },
     )
     derived_run.properties = self.properties
     success = derived_run.run(payload)["success"]
     key = "success" if success else "failure"
     self.run_state["summary"][key].append(device.name)
     return success
예제 #21
0
파일: base.py 프로젝트: swamim/eNMS
 def update(self, instance_type, **kwargs):
     try:
         must_be_new = kwargs.get("id") == ""
         for arg in ("name", "scoped_name"):
             if arg in kwargs:
                 kwargs[arg] = kwargs[arg].strip()
         kwargs["last_modified"] = self.get_time()
         kwargs["creator"] = kwargs["user"] = getattr(
             current_user, "name", "admin")
         instance = factory(instance_type,
                            must_be_new=must_be_new,
                            **kwargs)
         if kwargs.get("original"):
             fetch(instance_type,
                   id=kwargs["original"]).duplicate(clone=instance)
         Session.flush()
         return instance.serialized
     except Exception as exc:
         Session.rollback()
         if isinstance(exc, IntegrityError):
             return {
                 "alert":
                 (f"There already is a {instance_type} with the same name")
             }
         return {"alert": str(exc)}
예제 #22
0
파일: rest.py 프로젝트: swamim/eNMS
 def post(self):
     data = request.get_json(force=True)
     factory(
         "pool",
         **{
             "name": data["name"],
             "devices": [
                 fetch("device", name=name).id for name in data.get("devices", "")
             ],
             "links": [
                 fetch("link", name=name).id for name in data.get("links", "")
             ],
             "never_update": True,
         },
     )
     Session.commit()
     return data
예제 #23
0
 def duplicate(self, **kwargs):
     properties = {
         k: v
         for (k, v) in self.get_properties().items()
         if k not in ("id", "name")
     }
     instance = factory(self.type, **{**properties, **kwargs})
     return instance
예제 #24
0
 def post(self, cls: str) -> dict:
     try:
         data = request.get_json(force=True)
         object_data = controller.objectify(cls.capitalize(), data)
         result = factory(cls, **object_data).serialized
         Session.commit()
         return result
     except Exception as exc:
         return abort(500, message=f"Update failed ({exc})")
예제 #25
0
 def post(self, cls):
     try:
         data = request.get_json(force=True)
         object_data = app.objectify(cls, data)
         result = factory(cls, **object_data).serialized
         Session.commit()
         return result
     except Exception as exc:
         return abort(500, message=f"Update failed ({exc})")
예제 #26
0
 def query_netbox(self, **kwargs: str) -> None:
     nb = netbox_api(kwargs["netbox_address"], token=kwargs["netbox_token"])
     for device in nb.dcim.devices.all():
         device_ip = device.primary_ip4 or device.primary_ip6
         factory(
             "Device",
             **{
                 "name": device.name,
                 "ip_address": str(device_ip).split("/")[0],
                 "subtype": str(device.device_role),
                 "model": str(device.device_type),
                 "location": str(device.site),
                 "vendor": str(device.device_type.manufacturer),
                 "operating_system": str(device.platform),
                 "longitude": str(nb.dcim.sites.get(name=device.site).longitude),
                 "latitude": str(nb.dcim.sites.get(name=device.site).latitude),
             },
         )
예제 #27
0
def run_job(job: int, **kwargs: Any) -> dict:
    run_kwargs = {
        key: kwargs.pop(key)
        for key in ("creator", "runtime", "task", "restart_runtime")
        if kwargs.get(key)
    }
    run = factory("Run", job=job, **run_kwargs)
    run.properties = kwargs
    return run.run(kwargs.get("payload"))
예제 #28
0
파일: base.py 프로젝트: ammoam/eNMS
 def update(self, cls: str, **kwargs: Any) -> dict:
     try:
         instance = factory(cls, **kwargs)
         Session.commit()
         return instance.serialized
     except JSONDecodeError:
         return {"error": "Invalid JSON syntax (JSON field)"}
     except IntegrityError:
         return {"error": "An object with the same name already exists"}
예제 #29
0
 def migration_import(self, **kwargs: Any) -> str:
     status, types = "Import successful.", kwargs["import_export_types"]
     if kwargs.get("empty_database_before_import", False):
         for type in types:
             delete_all(type)
             Session.commit()
     workflow_edges: list = []
     for cls in types:
         path = (self.path / "projects" / "migrations" / kwargs["name"] /
                 f"{cls}.yaml")
         with open(path, "r") as migration_file:
             objects = yaml.load(migration_file)
             if cls == "Workflow":
                 workflow_jobs = {
                     workflow["name"]: workflow.pop("jobs")
                     for workflow in objects
                 }
             if cls == "WorkflowEdge":
                 workflow_edges = deepcopy(objects)
             if cls == "Service":
                 objects.sort(key=lambda s: s["type"] == "IterationService")
             for obj in objects:
                 obj_cls = obj.pop("type") if cls == "Service" else cls
                 obj = self.objectify(obj_cls, obj)
                 try:
                     factory(obj_cls, **obj)
                     Session.commit()
                 except Exception as e:
                     info(f"{str(obj)} could not be imported ({str(e)})")
                     if cls in ("Service", "Workflow"):
                         Session.commit()
                     status = "Partial import (see logs)."
                 if cls not in ("Service", "Workflow"):
                     Session.commit()
     for name, jobs in workflow_jobs.items():
         fetch("Workflow",
               name=name).jobs = [fetch("Job", name=name) for name in jobs]
         Session.commit()
     for edge in workflow_edges:
         for property in ("source", "destination", "workflow"):
             edge[property] = fetch("Job", name=edge[property]).id
         factory("WorkflowEdge", **edge)
         Session.commit()
     return status
예제 #30
0
 def migration_import(self, folder="migrations", **kwargs):
     status, models = "Import successful.", kwargs["import_export_types"]
     if kwargs.get("empty_database_before_import", False):
         for model in models:
             delete_all(model)
             Session.commit()
     workflow_edges, workflow_services = [], {}
     folder_path = self.path / "projects" / folder / kwargs["name"]
     for model in models:
         path = folder_path / f"{model}.yaml"
         if not path.exists():
             continue
         with open(path, "r") as migration_file:
             instances = yaml.load(migration_file)
             if model == "workflow_edge":
                 workflow_edges = deepcopy(instances)
                 continue
             for instance in instances:
                 instance_type = (instance.pop("type")
                                  if model == "service" else model)
                 if instance_type == "workflow":
                     workflow_services[instance["name"]] = instance.pop(
                         "services")
                 instance = self.objectify(instance_type, instance)
                 try:
                     factory(instance_type, **instance)
                     Session.commit()
                 except Exception as e:
                     info(
                         f"{str(instance)} could not be imported ({str(e)})"
                     )
                     status = "Partial import (see logs)."
     for name, services in workflow_services.items():
         fetch("workflow", name=name).services = [
             fetch("service", name=service_name)
             for service_name in services
         ]
         Session.commit()
     for edge in workflow_edges:
         for property in ("source", "destination", "workflow"):
             edge[property] = fetch("service", name=edge[property]).id
         factory("workflow_edge", **edge)
         Session.commit()
     return status