def run(self, id, snapshot=True, dry_run=False): if not self.datastore.exists('backup', ('id', '=', id)): raise TaskException(errno.ENOENT, 'Backup {0} not found'.format(id)) # Check for previous manifest manifest = None snapshots = [] backup = self.datastore.get_by_id('backup', id) try: manifest = self.run_subtask_sync('backup.query', id) if manifest: snapshots = manifest['snapshots'] except RpcException as err: if err.code != errno.ENOENT: raise if snapshot: self.run_subtask_sync( 'volume.snapshot_dataset', backup['dataset'], True, 365 * 24 * 60 * 60, 'backup', True ) self.set_progress(0, 'Calculating send delta') actions, send_size = self.run_subtask_sync( 'replication.calculate_delta', backup['dataset'], backup['dataset'], snapshots, True, True ) if dry_run: return actions new_manifest, snaps = self.generate_manifest(backup, manifest, actions) for idx, i in enumerate(snaps): ds, tosnap = i['name'].split('@') rfd, wfd = os.pipe() progress = float(idx) / len(snaps) * 100 self.set_progress(progress, 'Uploading stream of {0}'.format(i['name'])) self.join_subtasks( self.run_subtask( 'backup.{0}.put'.format(backup['provider']), backup['properties'], i['filename'], FileDescriptor(rfd) ), self.run_subtask('zfs.send', ds, i.get('anchor'), tosnap, FileDescriptor(wfd)), ) self.set_progress(100, 'Writing backup manifest') self.upload(backup['provider'], backup['properties'], MANIFEST_FILENAME, dumps(new_manifest, indent=4))
def run(self, id, dataset=None, snapshot=None): backup = self.datastore.get_by_id('backup', id) if not backup: raise TaskException(errno.ENOENT, 'Backup {0} not found'.format(backup['id'])) manifest = self.run_subtask_sync('backup.query', id) if not manifest: raise TaskException(errno.ENOENT, 'No valid backup found in specified location') if not dataset: dataset = manifest['dataset'] created_datasets = [] snapshots = manifest['snapshots'] unique_datasets = list(set(map(lambda s: s['name'].split('@')[0], snapshots))) unique_datasets.sort(key=lambda d: d.count('/')) provider = backup['provider'] total = len(snapshots) done = 0 for i in unique_datasets: snaps = list(filter(lambda s: s['name'].split('@')[0] == i, snapshots)) snap = first_or_default(lambda s: not s['incremental'], snaps) local_dataset = i.replace(manifest['dataset'], dataset, 1) while True: self.set_progress(done / total * 100, 'Receiving {0} into {1}'.format(snap['name'], local_dataset)) if local_dataset != dataset and local_dataset not in created_datasets: self.run_subtask_sync( 'zfs.create_dataset', local_dataset, 'FILESYSTEM' ) created_datasets.append(local_dataset) rfd, wfd = os.pipe() self.join_subtasks( self.run_subtask( 'backup.{0}.get'.format(provider), backup['properties'], snap['filename'], FileDescriptor(wfd) ), self.run_subtask('zfs.receive', local_dataset, FileDescriptor(rfd), True) ) if snap['name'] == snapshot: break snap = first_or_default(lambda s: '{0}@{1}'.format(i, s['anchor']) == snap['name'], snaps) if not snap: break done += 1
def run(self, path): file = open(path, 'wb+') self.join_subtasks(self.run_subtask( 'debug.collect', FileDescriptor(file.fileno()), progress_callback=lambda p, m, e=None: self.chunk_progress(0, 100, '', p, m, e) ))
def run(self, path, logs=True, cores=False): fd = os.open(path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) self.run_subtask_sync('debug.collect', FileDescriptor(fd), logs, cores, progress_callback=lambda p, m, e=None: self. chunk_progress(0, 100, '', p, m, e))
def run(self, context, args, kwargs, opargs): if not kwargs: raise CommandException(_("Download requires more arguments. For help see 'help download'")) if 'path' not in kwargs: raise CommandException(_("Please specify path to the target debug file." "For help see 'help download'")) p = Path(kwargs['path']) with p.open('w') as fo: context.call_task_sync('debug.collect', FileDescriptor(fd=fo.fileno(), close=False))
def run(self, context, args, kwargs, opargs): if not kwargs: raise CommandException(_("Upload requires more arguments. For help see 'help upload'")) if 'path' not in kwargs: raise CommandException(_("Please specify path to the source config file." "For help see 'help upload'")) p = Path(kwargs['path']) with p.open('r') as fo: output_msg(_('Restoring the Database. Reboot will occur immediately after the restore operation.')) context.call_task_sync('database.restore', FileDescriptor(fd=fo.fileno(), close=False))
def upload(self, provider, props, path, data): rfd, wfd = os.pipe() def worker(): x = os.write(wfd, data.encode('utf-8')) os.close(wfd) logger.info('written {0} bytes'.format(x)) thr = threading.Thread(target=worker) thr.start() self.dispatcher.call_task_sync('backup.{0}.put'.format(provider), props, path, FileDescriptor(rfd)) thr.join(timeout=1)
def download(self, provider, props, path): rfd, wfd = os.pipe() result = None with os.fdopen(rfd, 'rb') as fd: def worker(): nonlocal result result = fd.read() thr = threading.Thread(target=worker) thr.start() self.run_subtask_sync('backup.{0}.get'.format(provider), props, path, FileDescriptor(wfd)) thr.join(timeout=1) return result.decode('utf-8')
def run(self): rfd, wfd = os.pipe() def feed(): with os.fdopen(wfd, 'w') as f: for i in range(0, 100): f.write(str(uuid.uuid4()) + '\n') t = threading.Thread(target=feed) t.start() url, = self.join_subtasks( self.run_subtask('file.prepare_url_download', FileDescriptor(rfd))) t.join(timeout=1) return url
def submit_with_download(self, task_name, args, sender, env=None): task_metadata = self.dispatcher.tasks[task_name]._get_metadata() schema = task_metadata['schema'] url_list = [] if schema is None: raise RpcException( errno.ENOENT, "Task {0} has no schema associated with it".format(task_name)) for idx, arg in enumerate(schema): if arg.get('type') == 'fd': rfd, wfd = os.pipe() url_list.append("/dispatcher/filedownload?token={0}".format( self.dispatcher.token_store.issue_token( FileToken(user=sender.user, lifetime=60, direction='download', file=FileObjectPosix(rfd, 'rb', close=True), name=args[idx])))) args[idx] = FileDescriptor(wfd) task_id = self.submit(task_name, args, sender, env) return task_id, url_list
def submit_with_upload(self, task_name, args, sender, env=None): task_metadata = self.dispatcher.tasks[task_name]._get_metadata() schema = task_metadata['schema'] if schema is None: raise RpcException( errno.ENOENT, "Task {0} has no schema associated with it".format(task_name)) upload_token_list = [] for idx, arg in enumerate(schema): if arg.get('type') == 'fd': rfd, wfd = os.pipe() token = self.dispatcher.token_store.issue_token( FileToken(user=sender.user, lifetime=600, direction='upload', file=FileObjectPosix(wfd, 'wb', close=True), name=str(uuid.uuid4()), size=None)) upload_token_list.append(token) args[idx] = FileDescriptor(rfd) task_id = self.submit(task_name, args, sender, env) return task_id, upload_token_list
def run(self): with open(FACTORY_DB, 'r') as fd: self.run_subtask_sync('database.restore', FileDescriptor(fd.fileno(), close=False))
def run(self): r, w = os.pipe() self.run_subtask_sync('test.nested_fd', {'file_descriptor': FileDescriptor(w)}) return os.read(r, 1024).decode('utf-8')