def job(self, payload: dict, device: Device) -> dict: path_backup = Path.cwd() / "logs" / "job_logs" now = strip_all(str(datetime.now())) path_dir = path_backup / f"logs_{now}" source = path_backup / f"logs_{now}.tgz" makedirs(path_dir) for job in fetch_all("Job"): with open(path_dir / f"{job.name}.json", "w") as log_file: dump(job.logs, log_file) with open_tar(source, "w:gz") as tar: tar.add(path_dir, arcname="/") ssh_client = SSHClient() ssh_client.set_missing_host_key_policy(AutoAddPolicy()) ssh_client.connect( device.ip_address, username=device.username, password=device.password, look_for_keys=False, ) destination = f"{self.destination_path}/logs_{now}.tgz" self.transfer_file(ssh_client, [(source, destination)]) ssh_client.close() if self.delete_folder: rmtree(path_dir) if self.delete_archive: remove(source) return { "success": True, "result": f"logs stored in {destination} ({device.ip_address})", }
def job(self, payload: dict, device: Device) -> dict: now = strip_all(str(datetime.now())) source = Path.cwd() / "migrations" / f"backup_{now}.tgz" migrate_export( Path.cwd(), { "import_export_types": list(import_properties), "name": f"backup_{now}" }, ) with open_tar(source, "w:gz") as tar: tar.add(Path.cwd() / "migrations" / f"backup_{now}", arcname="/") ssh_client = SSHClient() ssh_client.set_missing_host_key_policy(AutoAddPolicy()) ssh_client.connect( device.ip_address, username=device.username, password=device.password, look_for_keys=False, ) destination = f"{self.destination_path}/backup_{now}.tgz" self.transfer_file(ssh_client, [(source, destination)]) ssh_client.close() if self.delete_folder: rmtree(Path.cwd() / "migrations" / f"backup_{now}") if self.delete_archive: remove(source) return { "success": True, "result": f"backup stored in {destination} ({device.ip_address})", }