Пример #1
0
 def update(self, **kwargs: Any) -> None:
     relation = relationships[self.__tablename__]
     modified = False
     for property, value in kwargs.items():
         if not hasattr(self, property):
             continue
         property_type = property_types.get(property, None)
         if property in relation:
             if relation[property]["list"]:
                 value = objectify(relation[property]["model"], value)
             else:
                 value = fetch(relation[property]["model"], id=value)
         if property_type == "bool":
             value = value not in (False, "false")
         old_value = getattr(self, property)
         if old_value != value:
             if (
                 isinstance(value, list)
                 and old_value
                 and set(value) == set(old_value)
             ):
                 continue
             modified = True
             setattr(self, property, value)
     if modified:
         self.last_modified = controller.get_time()
Пример #2
0
 def job(self,
         payload: dict,
         device: Device,
         parent: Optional[Job] = None) -> dict:
     path_backup = Path.cwd() / "logs" / "job_logs"
     now = controller.strip_all(controller.get_time())
     path_dir = path_backup / f"logs_{now}"
     source = path_backup / f"logs_{now}.tgz"
     makedirs(path_dir)
     for job in fetch_all("Job"):
         with open(path_dir / f"{job.name}.json", "w") as log_file:
             dump(job.logs, log_file)
     with open_tar(source, "w:gz") as tar:
         tar.add(path_dir, arcname="/")
     ssh_client = SSHClient()
     ssh_client.set_missing_host_key_policy(AutoAddPolicy())
     ssh_client.connect(
         device.ip_address,
         username=device.username,
         password=device.password,
         look_for_keys=False,
     )
     destination = f"{self.sub(self.destination_path, locals())}/logs_{now}.tgz"
     self.transfer_file(ssh_client, [(source, destination)])
     ssh_client.close()
     if self.delete_folder:
         rmtree(path_dir)
     if self.delete_archive:
         remove(source)
     return {
         "success": True,
         "result": f"logs stored in {destination} ({device.ip_address})",
     }
Пример #3
0
 def job(self, run: "Run", payload: dict, device: Device) -> dict:
     now = controller.strip_all(controller.get_time())
     source = Path.cwd() / "projects" / "migrations" / f"backup_{now}.tgz"
     controller.migrate_export(
         Path.cwd(), {"import_export_types": import_classes, "name": f"backup_{now}"}
     )
     with open_tar(source, "w:gz") as tar:
         tar.add(
             Path.cwd() / "projects" / "migrations" / f"backup_{now}", arcname="/"
         )
     ssh_client = SSHClient()
     ssh_client.set_missing_host_key_policy(AutoAddPolicy())
     ssh_client.connect(
         device.ip_address,
         username=device.username,
         password=device.password,
         look_for_keys=False,
     )
     destination = f"{run.sub(run.destination_path, locals())}/backup_{now}.tgz"
     run.transfer_file(ssh_client, [(source, destination)])
     ssh_client.close()
     if run.delete_folder:
         rmtree(Path.cwd() / "projects" / "migrations" / f"backup_{now}")
     if run.delete_archive:
         remove(source)
     return {
         "success": True,
         "result": f"backup stored in {destination} ({device.ip_address})",
     }
Пример #4
0
 def run(
     self,
     payload: Optional[dict] = None,
     targets: Optional[Set["Device"]] = None,
     parent: Optional["Job"] = None,
     task: Optional["Task"] = None,
     origin: Optional["Job"] = None,
 ) -> Tuple[dict, str]:
     current_job = parent or self
     runtime = controller.get_time()
     self.init_run(parent)
     results = self.build_results(payload, targets, parent, origin)
     self.results[runtime] = {**results, "logs": list(current_job.logs)}
     self.end_run(runtime, results, parent, task)
     return results, runtime
Пример #5
0
 def post(self) -> Union[str, dict]:
     try:
         errors, data = [], request.get_json(force=True)
         devices, pools = [], []
         job = fetch("Job", name=data["name"])
         handle_asynchronously = data.get("async", False)
         for device_name in data.get("devices", ""):
             device = fetch("Device", name=device_name)
             if device:
                 devices.append(device.id)
             else:
                 errors.append(f"No device with the name '{device_name}'")
         for device_ip in data.get("ip_addresses", ""):
             device = fetch("Device", ip_address=device_ip)
             if device:
                 devices.append(device.id)
             else:
                 errors.append(
                     f"No device with the IP address '{device_ip}'")
         for pool_name in data.get("pools", ""):
             pool = fetch("Pool", name=pool_name)
             if pool:
                 pools.append(pool.id)
             else:
                 errors.append(f"No pool with the name '{pool_name}'")
         if errors:
             return {"errors": errors}
     except Exception as e:
         info(f"REST API run_job endpoint failed ({str(e)})")
         return str(e)
     if devices or pools:
         data.update({"devices": devices, "pools": pools})
     data["runtime"] = runtime = controller.get_time()
     if handle_asynchronously:
         controller.scheduler.add_job(
             id=runtime,
             func=run_job,
             run_date=datetime.now(),
             args=[job.id],
             kwargs=data,
             trigger="date",
         )
         return {"errors": errors, "runtime": runtime}
     else:
         return {**run_job(job.id, **data), "errors": errors}
Пример #6
0
 def post(self) -> Union[str, dict]:
     try:
         errors, targets, data = [], set(), request.get_json(force=True)
         job = fetch("Job", name=data["name"])
         if job.is_running:
             return {"error": "Job is already running."}
         handle_asynchronously = data.get("async", False)
         for device_name in data.get("devices", ""):
             device = fetch("Device", name=device_name)
             if device:
                 targets.add(device)
             else:
                 errors.append(f"No device with the name '{device_name}'")
         for device_ip in data.get("ip_addresses", ""):
             device = fetch("Device", ip_address=device_ip)
             if device:
                 targets.add(device)
             else:
                 errors.append(f"No device with the IP address '{device_ip}'")
         for pool_name in data.get("pools", ""):
             pool = fetch("Pool", name=pool_name)
             if pool:
                 targets |= set(pool.devices)
             else:
                 errors.append(f"No pool with the name '{pool_name}'")
         if errors and not targets:
             return {"errors": errors}
     except Exception as e:
         info(f"REST API run_job endpoint failed ({str(e)})")
         return str(e)
     if handle_asynchronously:
         controller.scheduler.add_job(
             id=controller.get_time(),
             func=threaded_job,
             run_date=datetime.now(),
             args=[job.id, None, [d.id for d in targets], data.get("payload")],
             trigger="date",
         )
         return {**job.serialized, "errors": errors}
     else:
         return {
             **job.run(targets=targets, payload=data.get("payload"))[0],
             "errors": errors,
         }
Пример #7
0
 def get_results(self,
                 payload: dict,
                 device: Optional["Device"] = None) -> dict:
     self.log(
         "info",
         f"Running {self.job.type}{f' on {device.name}' if device else ''}")
     results: Dict[Any, Any] = {"runtime": controller.get_time()}
     try:
         args = (device, ) if device else ()
         if self.job.iteration_targets:
             targets_results = {
                 target: self.job.job(self, {
                     **payload, "value": target
                 }, *args)
                 for target in self.eval(self.job.iteration_targets,
                                         **locals())
             }
             results.update({
                 "results":
                 targets_results,
                 "success":
                 all(r["success"] for r in targets_results.values()),
             })
         else:
             results.update(self.job.job(self, payload, *args))
     except Exception:
         results.update({
             "success": False,
             "result": chr(10).join(format_exc().splitlines())
         })
     self.log(
         "info",
         f"Finished running {self.job.type} '{self.job.name}'"
         f"({'SUCCESS' if results['success'] else 'FAILURE'})"
         f"{f' on {device.name}' if device else ''}",
     )
     completed, failed = self.get_state("completed"), self.get_state(
         "failed")
     self.set_state(failed=failed + 1 - results["success"],
                    completed=completed + 1)
     return results
Пример #8
0
 def run(self, payload: Optional[dict] = None) -> dict:
     try:
         self.log("info", f"{self.job.type} {self.job.name}: Starting")
         self.set_state(status="Running", type=self.job.type)
         controller.job_db[self.job.id]["runs"] += 1
         Session.commit()
         results = self.job.build_results(self, payload
                                          or self.initial_payload)
         self.close_connection_cache()
         self.log("info", f"{self.job.type} {self.job.name}: Finished")
     except Exception:
         result = (f"Running {self.job.type} '{self.job.name}'"
                   " raised the following exception:\n"
                   f"{chr(10).join(format_exc().splitlines())}\n\n"
                   "Run aborted...")
         self.log("error", result)
         results = {"success": False, "results": result}
     finally:
         status = f"Completed ({'success' if results['success'] else 'failure'})"
         self.status = status  # type: ignore
         self.set_state(status=status)
         controller.job_db[self.job.id]["runs"] -= 1
         results["endtime"] = self.endtime = controller.get_time(
         )  # type: ignore
         results["state"] = controller.run_db.pop(self.runtime)
         results["logs"] = controller.run_logs.pop(
             self.runtime)  # type: ignore
         if self.task and not self.task.frequency:
             self.task.is_active = False
         results["properties"] = {
             "run": self.properties,
             "service": self.job.to_dict(True),
         }
         self.create_result(results)
         Session.commit()
     if not self.workflow and self.send_notification:
         self.notify(results)
     return results
Пример #9
0
 def __init__(self, **kwargs: Any) -> None:
     super().update(**kwargs)
     self.creation_time = controller.get_time()
     self.aps_job_id = kwargs.get("aps_job_id", self.creation_time)
     if self.is_active:
         self.schedule()
Пример #10
0
 def __init__(self, **kwargs: Any) -> None:
     self.runtime = kwargs.get(
         "runtime") or controller.get_time()  # type: ignore
     if not kwargs.get("parent_runtime"):
         self.parent_runtime = self.runtime
     super().__init__(**kwargs)