def collect_debug(dispatcher): yield AttachFile('rc.conf', '/etc/rc.conf') yield AttachCommandOutput('servicectl-list', ['/usr/local/sbin/servicectl', 'list']) yield AttachData( 'service-query', dumps(list(dispatcher.call_sync('service.query')), indent=4))
def run(self, id, snapshot=True, dry_run=False): if not self.datastore.exists('backup', ('id', '=', id)): raise TaskException(errno.ENOENT, 'Backup {0} not found'.format(id)) # Check for previous manifest manifest = None snapshots = [] backup = self.datastore.get_by_id('backup', id) try: manifest = self.run_subtask_sync('backup.query', id) if manifest: snapshots = manifest['snapshots'] except RpcException as err: if err.code != errno.ENOENT: raise if snapshot: self.run_subtask_sync( 'volume.snapshot_dataset', backup['dataset'], True, 365 * 24 * 60 * 60, 'backup', True ) self.set_progress(0, 'Calculating send delta') actions, send_size = self.run_subtask_sync( 'replication.calculate_delta', backup['dataset'], backup['dataset'], snapshots, True, True ) if dry_run: return actions new_manifest, snaps = self.generate_manifest(backup, manifest, actions) for idx, i in enumerate(snaps): ds, tosnap = i['name'].split('@') rfd, wfd = os.pipe() progress = float(idx) / len(snaps) * 100 self.set_progress(progress, 'Uploading stream of {0}'.format(i['name'])) self.join_subtasks( self.run_subtask( 'backup.{0}.put'.format(backup['provider']), backup['properties'], i['filename'], FileDescriptor(rfd) ), self.run_subtask('zfs.send', ds, i.get('anchor'), tosnap, FileDescriptor(wfd)), ) self.set_progress(100, 'Writing backup manifest') self.upload(backup['provider'], backup['properties'], MANIFEST_FILENAME, dumps(new_manifest, indent=4))
def pack(namespace, name, args): return dumps({ 'namespace': namespace, 'name': name, 'args': args, 'id': str(uuid.uuid4()) })
def format_value(value, vt): if value is None and vt == ValueType.STRING: return "none" if value is None: return None if vt == ValueType.BOOLEAN: value = bool(value) if vt == ValueType.STRING: return str(value) if vt == ValueType.TEXT_FILE: return str(value[:10] + '(...)') if vt == ValueType.SET: return set(value) if vt == ValueType.ARRAY: return list(value) if vt == ValueType.PASSWORD: return "*****" if vt == ValueType.DATE: return '{:%Y-%m-%d %H:%M:%S}'.format(value) return dumps(value)
def save_config(conf_path, name_mod, entry, file_perms=None): file_name = os.path.join(conf_path, '.config-{0}.json'.format(name_mod)) with open(file_name, 'w', encoding='utf-8') as conf_file: conf_file.write(dumps(entry)) if file_perms: with contextlib.suppress(OSError): os.chmod(file_name, file_perms)
def output_table(table): output = [] for row in table.data: rowdata = {} for col in table.columns: rowdata[col.label] = JsonOutputFormatter.format_value(resolve_cell(row, col.accessor), col.vt) output.append(rowdata) six.print_(dumps(output, indent=4))
def save_config(conf_path, name_mod, entry, file_perms=None, version=None): file_name = os.path.join(conf_path, f'.config-{name_mod}.json') if version: entry['%version'] = version with open(file_name, 'w', encoding='utf-8') as conf_file: conf_file.write(dumps(entry)) if file_perms: with contextlib.suppress(OSError): os.chmod(file_name, file_perms)
def run(self, id, snapshot=True, dry_run=False): # Check for previous manifest manifest = None snapshots = [] backup = self.datastore.get_by_id('backup', id) try: manifest, = self.join_subtasks(self.run_subtask('backup.query', id)) if manifest: snapshots = manifest['snapshots'] except RpcException: pass self.set_progress(0, 'Calculating send delta') (actions, send_size), = self.join_subtasks(self.run_subtask( 'replication.calculate_delta', backup['dataset'], backup['dataset'], snapshots, True, True )) if dry_run: return actions new_manifest, snaps = self.generate_manifest(backup, manifest, actions) for idx, i in enumerate(snaps): rfd, wfd = os.pipe() progress = float(idx) / len(snaps) * 100 self.set_progress(progress, 'Uploading stream of {0}'.format(i['name'])) self.join_subtasks( self.run_subtask('zfs.send', i['name'], i.get('anchor'), i.get('anchor'), FileDescriptor(wfd)), self.run_subtask( 'backup.{0}.put'.format(backup['provider']), backup['properties'], i['filename'], FileDescriptor(rfd) ) ) self.set_progress(100, 'Writing backup manifest') self.upload(backup['provider'], backup['properties'], MANIFEST_FILENAME, dumps(new_manifest, indent=4))
def output_msg(data, **kwargs): six.print_(dumps(data, indent=4))
def output_tree(data, children, label): six.print_(dumps(list(data), indent=4))
def output_dict(data, key_label, value_label): six.print_(dumps(dict(data), indent=4))
def output_list(data, label): six.print_(dumps(list(data), indent=4))
def collect_debug(dispatcher): yield AttachFile('rc.conf', '/etc/rc.conf') yield AttachCommandOutput('servicectl-list', ['/usr/local/sbin/servicectl', 'list']) yield AttachData('service-query', dumps(list(dispatcher.call_sync('service.query')), indent=4))
def collect_debug(dispatcher): yield AttachData( 'alert-filter-query', dumps(list(dispatcher.call_sync('alert.filter.query')), indent=4))
def output_object(obj): output = {} for item in obj: output[item.name] = JsonOutputFormatter.format_value(item.value, item.vt) six.print_(dumps(output, indent=4))
def process_hook(self, cmd, plugin, tar): if cmd['type'] == 'AttachData': info = tarfile.TarInfo(os.path.join(plugin, cmd['name'])) info.size = len(cmd['data']) tar.addfile( info, io.BytesIO( cmd['data'] if isinstance(cmd['data'], bytes) else cmd['data'].encode('utf-8') ) ) if cmd['type'] == 'AttachRPC': try: result = self.dispatcher.call_sync(cmd['rpc'], *cmd['args']) if hasattr(result, '__next__'): result = list(result) except RpcException as err: self.add_warning(TaskWarning( err.code, f'{plugin}: Cannot add output of {cmd["rpc"]} call, error: {err.message}' )) else: data = dumps(result, debug=True, indent=4) info = tarfile.TarInfo(os.path.join(plugin, cmd['name'])) info.size = len(data) tar.addfile( info, io.BytesIO( data if isinstance(data, bytes) else data.encode('utf-8') ) ) if cmd['type'] == 'AttachCommandOutput': try: out, _ = system(*cmd['command'], shell=cmd['shell'], decode=cmd['decode'], merge_stderr=True) except SubprocessException as err: out = 'Exit code: {0}\n'.format(err.returncode) if cmd['decode']: out += 'Output:\n:{0}'.format(err.out) info = tarfile.TarInfo(os.path.join(plugin, cmd['name'])) info.size = len(out) tar.addfile( info, io.BytesIO(out if isinstance(out, bytes) else out.encode('utf-8')) ) if cmd['type'] in ('AttachDirectory', 'AttachFile'): try: tar.add( cmd['path'], arcname=os.path.join(plugin, cmd['name']), recursive=cmd.get('recursive') ) except OSError as err: self.add_warning(TaskWarning( err.errno, '{0}: Cannot add file {1}, error: {2}'.format(plugin, cmd['path'], err.strerror) )) logger.error( "Error occured when adding {0} to the tarfile for plugin: {1}".format(cmd['path'], plugin), exc_info=True )
def collect_debug(dispatcher): yield AttachData('alert-filter-query', dumps(list(dispatcher.call_sync('alert.filter.query')), indent=4)) yield AttachData('alert-emitter-query', dumps(list(dispatcher.call_sync('alert.emitter.query')), indent=4))
def save_config(conf_path, name_mod, entry): with open(os.path.join(conf_path, '.config-{0}.json'.format(name_mod)), 'w', encoding='utf-8') as conf_file: conf_file.write(dumps(entry))