Exemple #1
0
 def export_topology(self, **kwargs):
     workbook = Workbook()
     filename = kwargs["export_filename"]
     if "." not in filename:
         filename += ".xls"
     for obj_type in ("device", "link"):
         sheet = workbook.add_sheet(obj_type)
         for index, property in enumerate(table_properties[obj_type]):
             sheet.write(0, index, property)
             for obj_index, obj in enumerate(fetch_all(obj_type), 1):
                 sheet.write(obj_index, index, getattr(obj, property))
     workbook.save(self.path / "files" / "spreadsheets" / filename)
Exemple #2
0
 def calendar_init(self, type):
     results = {}
     for instance in fetch_all(type):
         if getattr(instance, "workflow", None):
             continue
         date = getattr(instance, "next_run_time" if type == "task" else "runtime")
         if date:
             results[instance.name] = {
                 "start": self.convert_date(date),
                 **instance.serialized,
             }
     return results
Exemple #3
0
 def handle(self) -> None:
     address = self.client_address[0]
     device = fetch("Device", allow_none=True, ip_address=address)
     properties = {
         "source": device.name if device else address,
         "content": str(bytes.decode(self.request[0].strip())),
     }
     for event in fetch_all("Event"):
         event.match_log(**properties)
     log = factory("Syslog", **properties)
     Session.add(log)
     Session.commit()
Exemple #4
0
 def update(self, **kwargs: Any) -> None:
     super().update(**kwargs)
     if kwargs.get("dont_update_pools", False):
         return
     for pool in fetch_all("Pool"):
         if pool.never_update:
             continue
         match = pool.object_match(self)
         if match and self not in pool.devices:
             pool.devices.append(self)
         if self in pool.devices and not match:
             pool.devices.remove(self)
Exemple #5
0
 def get_service_state(self, service_id, runtime="latest"):
     state, service = None, fetch("service", id=service_id)
     runs = fetch_all("run", service_id=service_id)
     if runs and runtime != "normal":
         if runtime == "latest":
             runtime = runs[-1].runtime
         state = self.run_db.get(runtime) or fetch("run", runtime=runtime).state
     return {
         "service": service.to_dict(include=["services", "edges"]),
         "runtimes": [(r.runtime, r.creator) for r in runs],
         "state": state,
         "runtime": runtime,
     }
Exemple #6
0
 def migration_import(self, folder="migrations", **kwargs):
     status, models = "Import successful.", kwargs["import_export_types"]
     if kwargs.get("empty_database_before_import", False):
         for model in models:
             delete_all(model)
             Session.commit()
     workflow_edges, workflow_services = [], {}
     folder_path = self.path / "files" / folder / kwargs["name"]
     for model in models:
         path = folder_path / f"{model}.yaml"
         if not path.exists():
             continue
         with open(path, "r") as migration_file:
             instances = yaml.load(migration_file)
             if model == "workflow_edge":
                 workflow_edges = deepcopy(instances)
                 continue
             for instance in instances:
                 instance_type = (instance.pop("type")
                                  if model == "service" else model)
                 if instance_type == "workflow":
                     workflow_services[instance["name"]] = instance.pop(
                         "services")
                 try:
                     instance = self.objectify(instance_type, instance)
                     factory(instance_type, **instance)
                     Session.commit()
                 except Exception:
                     info(f"{str(instance)} could not be imported :"
                          f"{chr(10).join(format_exc().splitlines())}")
                     status = "Partial import (see logs)."
     try:
         for name, services in workflow_services.items():
             workflow = fetch("workflow", name=name)
             workflow.services = [
                 fetch("service", name=service_name)
                 for service_name in services
             ]
         Session.commit()
         for edge in workflow_edges:
             for property in ("source", "destination", "workflow"):
                 edge[property] = fetch("service", name=edge[property]).id
             factory("workflow_edge", **edge)
             Session.commit()
         for service in fetch_all("service"):
             service.set_name()
         self.log("info", status)
     except Exception:
         info(chr(10).join(format_exc().splitlines()))
         status = "Partial import (see logs)."
     return status
Exemple #7
0
def test_base_services(user_client: FlaskClient) -> None:
    number_of_services = len(fetch_all("Service"))
    user_client.post("/update/NetmikoConfigurationService", data=netmiko_ping)
    assert len(fetch_all("NetmikoConfigurationService")) == 3
    user_client.post("/update/NetmikoFileTransferService",
                     data=file_transfer_service)
    assert len(fetch_all("NetmikoFileTransferService")) == 1
    number_of_napalm_services = len(fetch_all("NapalmGettersService"))
    user_client.post("/update/NapalmGettersService", data=getters_dict)
    assert len(
        fetch_all("NapalmGettersService")) == number_of_napalm_services + 1
    number_of_ansible_services = len(fetch_all("AnsiblePlaybookService"))
    user_client.post("/update/AnsiblePlaybookService", data=ansible_service)
    assert len(
        fetch_all("AnsiblePlaybookService")) == number_of_ansible_services + 1
    assert len(fetch_all("Service")) == number_of_services + 4
Exemple #8
0
 def count_models(self):
     return {
         "counters": {
             instance_type: count(instance_type)
             for instance_type in properties["dashboard"]
         },
         "properties": {
             instance_type: Counter(
                 str(getattr(instance, properties["dashboard"][instance_type][0]))
                 for instance in fetch_all(instance_type)
             )
             for instance_type in properties["dashboard"]
         },
     }
Exemple #9
0
 def update(self, **kwargs):
     super().update(**kwargs)
     if kwargs.get("dont_update_pools", False):
         return
     for pool in fetch_all("pool"):
         if pool.never_update:
             continue
         match = pool.object_match(self)
         relation, number = f"{self.class_type}s", f"{self.class_type}_number"
         if match and self not in pool.devices:
             getattr(pool, relation).append(self)
             setattr(pool, number, getattr(pool, number) + 1)
         if self in pool.devices and not match:
             getattr(pool, relation).remove(self)
             setattr(pool, number, getattr(pool, number) - 1)
Exemple #10
0
 def count_models(self):
     return {
         "counters": {
             instance_type: count(instance_type)
             for instance_type in diagram_classes
         },
         "properties": {
             instance_type: Counter(
                 str(
                     getattr(instance,
                             type_to_diagram_properties[instance_type][0]))
                 for instance in fetch_all(instance_type))
             for instance_type in diagram_classes
         },
     }
Exemple #11
0
 def count_models(self) -> dict:
     return {
         "counters": {
             **{cls: count(cls)
                for cls in diagram_classes},
             **{
                 "active-Service": count("Service", status="Running"),
                 "active-Workflow": count("Workflow", status="Running"),
                 "active-Task": count("Task", status="Active"),
             },
         },
         "properties": {
             cls: Counter(
                 str(getattr(instance, type_to_diagram_properties[cls][0]))
                 for instance in fetch_all(cls))
             for cls in diagram_classes
         },
     }
Exemple #12
0
 def update_database_configurations_from_git(self):
     for dir in scandir(self.path / "network_data"):
         device = fetch("device", allow_none=True, name=dir.name)
         if not device:
             continue
         with open(Path(dir.path) / "data.yml") as data:
             parameters = yaml.load(data)
             device.update(**{"dont_update_pools": True, **parameters})
         for data in ("configuration", "operational_data"):
             filepath = Path(dir.path) / data
             if not filepath.exists():
                 continue
             with open(filepath) as file:
                 setattr(device, data, file.read())
     Session.commit()
     for pool in fetch_all("pool"):
         if pool.device_configuration or pool.device_operational_data:
             pool.compute_pool()
Exemple #13
0
 def get_service_state(self, path, runtime=None):
     service_id = path.split(">")[-1]
     state, service = None, fetch("service", id=service_id)
     runs = fetch_all("run", service_id=service_id)
     if not runtime:
         runtime = "latest"
     else:
         session["path"] = path
     if runs and runtime != "normal":
         if runtime == "latest":
             runtime = runs[-1].parent_runtime
         state = self.run_db.get(runtime) or fetch("run", runtime=runtime).state
     return {
         "service": service.to_dict(include=["services", "edges"]),
         "runtimes": [(r.parent_runtime, r.creator) for r in runs],
         "state": state,
         "runtime": runtime,
     }
Exemple #14
0
 def update(self, **kwargs: Any) -> None:
     if "source_name" in kwargs:
         kwargs["source"] = fetch("Device", name=kwargs.pop("source_name")).id
         kwargs["destination"] = fetch(
             "Device", name=kwargs.pop("destination_name")
         ).id
     kwargs.update(
         {"source_id": kwargs["source"], "destination_id": kwargs["destination"]}
     )
     super().update(**kwargs)
     if kwargs.get("dont_update_pools", False):
         return
     for pool in fetch_all("Pool"):
         if pool.never_update:
             continue
         if pool.object_match(self):
             pool.links.append(self)
         elif self in pool.links:
             pool.links.remove(self)
Exemple #15
0
 def update_database_configurations_from_git(self) -> None:
     for dir in scandir(self.path / "git" / "configurations"):
         if dir.name == ".git":
             continue
         device = fetch("Device", allow_none=True, name=dir.name)
         if device:
             with open(Path(dir.path) / "data.yml") as data:
                 parameters = load(data)
                 device.update(**parameters)
                 config_file = Path(dir.path) / dir.name
                 if not config_file.exists():
                     continue
                 with open(config_file) as f:
                     device.current_configuration = device.configurations[
                         str(parameters["last_update"])] = f.read()
     Session.commit()
     for pool in fetch_all("Pool"):
         if pool.device_current_configuration:
             pool.compute_pool()
Exemple #16
0
 def export_topology(self, **kwargs):
     workbook = Workbook()
     filename = kwargs["export_filename"]
     if "." not in filename:
         filename += ".xls"
     for obj_type in ("device", "link"):
         sheet = workbook.add_sheet(obj_type)
         for index, property in enumerate(model_properties[obj_type]):
             if property in (
                     "id",
                     "source_id",
                     "destination_id",
                     "configuration",
                     "operational_data",
             ):
                 continue
             sheet.write(0, index, property)
             for obj_index, obj in enumerate(fetch_all(obj_type), 1):
                 sheet.write(obj_index, index, getattr(obj, property))
     workbook.save(self.path / "files" / "spreadsheets" / filename)
Exemple #17
0
 def calendar_init(self) -> dict:
     tasks = {}
     for task in fetch_all("Task"):
         # javascript dates range from 0 to 11, we must account for that by
         # substracting 1 to the month for the date to be properly displayed in
         # the calendar
         date = task.next_run_time
         if not date:
             continue
         python_month = search(r".*-(\d{2})-.*", date).group(1)  # type: ignore
         month = "{:02}".format((int(python_month) - 1) % 12)
         js_date = [
             int(i)
             for i in sub(
                 r"(\d+)-(\d+)-(\d+) (\d+):(\d+).*",
                 r"\1," + month + r",\3,\4,\5",
                 date,
             ).split(",")
         ]
         tasks[task.name] = {**task.serialized, **{"date": js_date}}
     return tasks
Exemple #18
0
 def topology_import(self, file: BinaryIO) -> str:
     book = open_workbook(file_contents=file.read())
     result = "Topology successfully imported."
     for obj_type in ("Device", "Link"):
         try:
             sheet = book.sheet_by_name(obj_type)
         except XLRDError:
             continue
         properties = sheet.row_values(0)
         for row_index in range(1, sheet.nrows):
             values = {"dont_update_pools": True}
             for index, property in enumerate(properties):
                 func = field_conversion[property_types[property]]
                 values[property] = func(sheet.row_values(row_index)[index])
             try:
                 factory(obj_type, **values).serialized
             except Exception as e:
                 info(f"{str(values)} could not be imported ({str(e)})")
                 result = "Partial import (see logs)."
         Session.commit()
     for pool in fetch_all("Pool"):
         pool.compute_pool()
     return result
Exemple #19
0
 def get_workflow_services(self, id, node):
     parents = list(self.get_parent_workflows(fetch("workflow", id=id)))
     if node == "all":
         return [{
             "id": "standalone",
             "text": "Standalone services",
             "children": True,
             "state": {
                 "disabled": True
             },
             "a_attr": {
                 "class": "no_checkbox",
                 "style": "color: #000000"
             },
             "type": "category",
         }] + sorted(
             ({
                 "data": {
                     "id": workflow.id
                 },
                 "text": workflow.name,
                 "children": True,
                 "type": "workflow",
                 "state": {
                     "disabled": workflow in parents
                 },
                 "a_attr": {
                     "class": "no_checkbox" if workflow in parents else "",
                     "style": "color: #6666FF",
                 },
             } for workflow in fetch_all("workflow")
              if not workflow.workflows),
             key=itemgetter("text"),
         )
     elif node == "standalone":
         return sorted(
             ({
                 "data": {
                     "id": service.id
                 },
                 "text": service.scoped_name,
                 "a_attr": {
                     "style":
                     (f"color: #{'FF1694' if service.shared else '6666FF'}"
                      ),
                 },
             } for service in fetch_all("service")
              if not service.workflows and service.type != "workflow"),
             key=itemgetter("text"),
         )
     else:
         return sorted(
             ({
                 "data": {
                     "id": service.id
                 },
                 "text": service.scoped_name,
                 "children": service.type == "workflow",
                 "type":
                 "workflow" if service.type == "workflow" else "service",
                 "state": {
                     "disabled": service in parents
                 },
                 "a_attr": {
                     "class":
                     "no_checkbox" if service in parents else "",
                     "style":
                     (f"color: #{'FF1694' if service.shared else '6666FF'}"
                      ),
                 },
             } for service in fetch("workflow", id=node).services
              if service.scoped_name not in ("Start", "End")),
             key=itemgetter("text"),
         )
Exemple #20
0
 def get_cluster_status(self):
     return {
         attr: [getattr(server, attr) for server in fetch_all("server")]
         for attr in ("status", "cpu_load")
     }
Exemple #21
0
def test_object_creation_europe(user_client: FlaskClient) -> None:
    create_from_file(user_client, "europe.xls")
    assert len(fetch_all("Device")) == 33
    assert len(fetch_all("Link")) == 49
Exemple #22
0
 def get_all(self, instance_type):
     return [
         instance.get_properties() for instance in fetch_all(instance_type)
     ]
Exemple #23
0
def test_object_creation_type(user_client: FlaskClient) -> None:
    create_from_file(user_client, "device_counters.xls")
    assert len(fetch_all("Device")) == 27
    assert len(fetch_all("Link")) == 0
Exemple #24
0
 def counters(self, property: str, type: str) -> Counter:
     return Counter(str(getattr(instance, property)) for instance in fetch_all(type))
Exemple #25
0
 def get_view_topology(self) -> dict:
     return {
         "devices": [d.view_properties for d in fetch_all("Device")],
         "links": [d.view_properties for d in fetch_all("Link")],
     }
Exemple #26
0
 def update_all_pools(self) -> None:
     for pool in fetch_all("Pool"):
         pool.compute_pool()
Exemple #27
0
 def workflow_name_update(workflow: Base, new_name: str, old_name: str,
                          *args: Any) -> None:
     for job in fetch_all("Job"):
         if old_name in job.positions:
             job.positions[new_name] = job.positions.pop(old_name)
Exemple #28
0
 def workflow_name_update(workflow, new_name, old_name, *args):
     for service in fetch_all("service"):
         if old_name in service.positions:
             service.positions[new_name] = service.positions.pop(old_name)
Exemple #29
0
 def get_top_level_workflows(self):
     return [
         workflow.get_properties() for workflow in fetch_all("workflow")
         if not workflow.workflows
     ]
Exemple #30
0
 def get_all(self, cls):
     return [instance.get_properties() for instance in fetch_all(cls)]