def __handle_backup_result(self, str_result, notify_app=None, backup_duration=None, copy_to=None, copy_complete=None, copy_duration=None): archiver = self.archiver() if archiver is None: raise RuntimeError('Archiver must be set before call') if notify_app is None: return WPlainCommandResult(str_result) meta_data = archiver.meta() meta_data[WBackupMeta.BackupNotificationOptions. created_archive] = archiver.archive_path() meta_data[WBackupMeta.BackupNotificationOptions. backup_duration] = backup_duration meta_data[WBackupMeta.BackupNotificationOptions. total_archive_size] = os.stat( archiver.archive_path()).st_size meta_data[WBackupMeta.BackupNotificationOptions.copy_to] = copy_to meta_data[WBackupMeta.BackupNotificationOptions. copy_completion] = copy_complete meta_data[WBackupMeta.BackupNotificationOptions. copy_duration] = copy_duration notify(meta_data, notify_app, encode_strict_cls=(WBackupMeta.Archive.MetaOptions, WBackupMeta.BackupNotificationOptions)) return WPlainCommandResult(str_result)
def result_template(self, *command_tokens, **command_env): if WAppsGlobals.scheduler is None: return WPlainCommandResult.error('Scheduler was not loaded') result = WTemplateBrokerCommand.result_template( self, *command_tokens, **command_env) result.update_context(scheduler=WAppsGlobals.scheduler) return result
def submit_task(self): scheduler_name = self.scheduler_instance() task_source_name = self.task_source() task_source = WAppsGlobals.scheduler.task_source(task_source_name, scheduler_name) if task_source is None: return WPlainCommandResult.error('Unable to find suitable scheduler. Command rejected') schedule_record = self.schedule_record() task_uid = schedule_record.task_uid() if WAppsGlobals.broker_calls is None: WAppsGlobals.log.error('Unable to register task "%s" in calls registry' % task_uid) else: WAppsGlobals.broker_calls.add_task(task_uid, scheduler_name) task_source.add_record(schedule_record) return WPlainCommandResult('Task submitted. Task id: %s' % task_uid, broker_last_task=task_uid)
def exec(self, *command_tokens, **command_env): if len(command_tokens) == 0: context = WContext(self.__main_context) if self.__app_name is not None: context = WContext(self.__app_name, linked_context=context) return WPlainCommandResult('', command_context=context) raise RuntimeError('Invalid tokens')
def result_template(self, *command_tokens, **command_env): if WAppsGlobals.scheduler_history is None: return WPlainCommandResult.error( 'Scheduler history is not available') result = WTemplateBrokerCommand.result_template( self, *command_tokens, **command_env) result.update_context( history_records=WAppsGlobals.scheduler_history) return result
def _exec(self, *command_tokens, **command_env): result = self.__help_info__ for command in self.__command_selector.commands(): if isinstance(command, WCommandHelp) is True: continue result += 'Command "%s" is able to: %s\n' % (command.__command__, command.__description__) result += command.command_help() result += '\n\n' return WPlainCommandResult(result)
def _exec(self, command_arguments, **command_env): archive = command_arguments['backup-archive'] io_read_rate = None if 'io-read-rate' in command_arguments.keys(): io_read_rate = command_arguments['io-read-rate'] try: self.__checker = WArchiveIntegrityChecker( archive, self.logger(), stop_event=self.stop_event(), io_read_rate=io_read_rate ) result, original_hash, calculated_hash = self.__checker.check_archive() finally: self.__checker = None if result is True: return WPlainCommandResult('Archive "%s" is OK' % archive) return WPlainCommandResult.error( 'Archive "%s" is corrupted. Calculated hash - "%s". Original hash - "%s"' % (archive, calculated_hash, original_hash) )
def exec_broker_command(self, *command_tokens, **command_env): command_obj = self.__internal_set.commands().select( *command_tokens, **command_env) if command_obj is None: return WPlainCommandResult.error('No suitable command found') try: return command_obj.exec(*command_tokens, **command_env) except Exception as e: return WExceptionResult('Command execution error', e, traceback.format_exc())
def __details(cls, task_uid, task, scheduler_name): output = 'Task with uid "%s" selected.\n' % task_uid output += 'Task was registered on scheduler: %s\n' % na_formatter( scheduler_name, none_value='<default instance>') event_record = WAppsGlobals.scheduler_history.last_record( task_uid, WTrackerEvents.start, WTrackerEvents.wait, WTrackerEvents.drop) if event_record is not None: record_date = local_datetime_formatter( event_record.registered_at) if event_record.record_type == WTrackerEvents.drop: output += 'Task was dropped at %s\n' % record_date elif event_record.record_type == WTrackerEvents.wait: output += 'Task has been waited since %s\n' % record_date elif event_record.record_type == WTrackerEvents.start: output += 'Task has been started at %s\n' % record_date task_status = task.state_details() if task_status is not None: output += '\n' + task_status return WPlainCommandResult(output)
def _exec(self, *command_tokens, command_context=None, **command_env): if command_context is not None: return WPlainCommandResult( '', command_context=command_context.linked_context()) return WPlainCommandResult('', command_context=None)
def __stop(cls, task_uid, task, scheduler_name): header = 'Task with uid "%s" selected.\n' % task_uid task.stop() return WPlainCommandResult(header + 'Task was requested to stop')
def exec(self, *command_tokens, broker_last_task=None, broker_selected_task=None, **command_env): if self.match(*command_tokens, **command_env) is False: raise RuntimeError('Invalid tokens') uid = command_tokens[1] if uid == 'last': uid = broker_last_task elif uid == 'selected': uid = broker_selected_task try: scheduler_name = WAppsGlobals.broker_calls.get_scheduler(uid) except ValueError: return WPlainCommandResult.error( 'Invalid task "%s" selected. Type "help calls" for help information', ) header = 'Task with uid "%s" selected.\n' % uid scheduler = WAppsGlobals.scheduler.instance(scheduler_name) if scheduler is None: return WPlainCommandResult.error( header + 'Invalid task "%s" selected. Unable to find scheduler ' 'instance. Type "help calls" for help information' % uid, ) task = None for running_record in scheduler.running_records(): if running_record.task_uid() == uid: task = running_record.task() break task_result = WAppsGlobals.broker_calls.get_result(uid) if task_result is not None: task_result = 'Task result: %s' % str(task_result) else: task_result = 'Task result is unavailable' if task is None: if len(command_tokens) > 2: header += 'Unable to submit command "%s" ' % command_tokens[ 2] header += 'to the task for the following reason.\n' history_record = WAppsGlobals.scheduler_history.last_record( uid) if history_record is None: return WPlainCommandResult( header + 'Task have not been processed by a scheduler yet. ' 'Please wait and call this command again\n' + task_result) record_type = history_record.record_type record_date = local_datetime_formatter( history_record.registered_at) if record_type == WTrackerEvents.start: return WPlainCommandResult( header + 'Task was started at %s. Please wait and call this ' 'command again for detailed information\n' % record_date + task_result) elif record_type == WTrackerEvents.stop: return WPlainCommandResult( header + 'Task was completed and stopped at %s\n' % record_date + task_result) elif record_type == WTrackerEvents.termination: return WPlainCommandResult( header + 'Task was terminated at %s (Task may be incomplete)\n' % record_date + task_result) elif record_type == WTrackerEvents.exception: output = 'Task completion was terminated by an exception at %s.\n' % record_date output += 'Exception information: %s\n' % str( history_record.exception) output += history_record.exception_details return WPlainCommandResult(header + output) elif record_type == WTrackerEvents.wait: return WPlainCommandResult( header + 'Task was not started and have been postponed at %s. ' 'Please wait and call this command again' % record_date) elif record_type == WTrackerEvents.drop: return WPlainCommandResult( header + 'Task was not started and have been drop by a scheduler ' 'at %s. If you want to start this task again - call task command again' ) raise RuntimeError('Invalid history record type spotted: %s' % str(record_type)) command_key = command_tokens[2] if len( command_tokens) > 2 else None return self.__commands[command_key](uid, task, scheduler_name)
def exec(self, *command_tokens, **command_env): if self.match(*command_tokens, **command_env) is False: raise RuntimeError('Invalid tokens') uid = command_tokens[2] return WPlainCommandResult('Task "%s" was selected' % uid, broker_selected_task=uid)
def create_plain_result(construction_keys): str_value = construction_keys['__str__'].value environment_pair = construction_keys['environment'].value return WPlainCommandResult(str_value, **environment_pair)
def _exec(self, *command_tokens, command_context=None, **command_env): return WPlainCommandResult('', command_context=None)
def exec(self, *command_tokens, **command_env): broker = self.__console.broker() handler = broker.handler() receive_agent = broker.receive_agent() send_agent = broker.send_agent() session_flow = WMessengerOnionSessionFlow.sequence_flow( WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-general.composer-packer-layer', mode=WMessengerComposerLayer.Mode.decompose, composer_factory=self.__composer_factory ), WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-general.json-packer-layer', mode=WMessengerOnionPackerLayerProto.Mode.pack ), WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-general.encoding-layer', mode=WMessengerOnionCoderLayerProto.Mode.encode ), WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-launcher.console-output-layer', feedback='Command is sending', refresh_window=True ), WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-general.send-agent-layer', send_agent=send_agent, handler=handler ), WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-launcher.console-output-layer', feedback='Response is awaiting', undo_previous=True, refresh_window=True ), WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-general.sync-receive-agent-layer', receive_agent=receive_agent ), WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-launcher.console-output-layer', undo_previous=True, refresh_window=True ), WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-general.encoding-layer', mode=WMessengerOnionCoderLayerProto.Mode.decode ), WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-general.json-packer-layer', mode=WMessengerOnionPackerLayerProto.Mode.unpack ), WMessengerOnionSessionFlowProto.IteratorInfo( "com.binblob.wasp-general.simple-casting-layer", from_envelope=WMessengerEnvelope, to_envelope=WMessengerDictEnvelope ), WMessengerOnionSessionFlowProto.IteratorInfo( 'com.binblob.wasp-general.composer-packer-layer', mode=WMessengerComposerLayer.Mode.compose, composer_factory=self.__composer_factory ) ) session = WMessengerOnionSession(self.__onion, session_flow) try: command_request = WCommandRequest(*command_tokens, **command_env) envelope = session.process(WMessengerEnvelope(command_request)) return envelope.message() except TimeoutError: self.__console_output_layer.undo_feedback() broker.discard_queue_messages() return WPlainCommandResult('Error. Command completion timeout expired')
def _exec(self, command_arguments, **command_env): location = command_arguments['backup-location'] uri = WURI.parse(location) network_client = __default_client_collection__.open(uri) archives = network_client.request( WCommonNetworkClientCapability.list_dir) archive_selection_re = re.compile( command_arguments['archive-selection']) re_selected_archives = tuple( filter(lambda x: archive_selection_re.match(x) is not None, archives)) if command_arguments['timezone'] != 'local': tz = timezone(command_arguments['timezone']) else: tz = local_tz() now = datetime.now(tz=tz) age_helper = self.__age_helper(command_arguments, network_client, tz) archive_ages = [(x, age_helper(x)) for x in re_selected_archives] archive_ages = list( filter(lambda x: (now - x[1]).total_seconds() > 0, archive_ages)) # remove list fn archive_ages.sort(key=lambda x: (now - x[1]).total_seconds()) sorted_archives = [x[0] for x in archive_ages] archive_to_keep = set() for period_keep in command_arguments['period-keep']: archive_to_keep.update( filter( WRetentionBackupCommand.PeriodKeepFilter( now, tz, *period_keep), archive_ages)) keep_archives = [x[0] for x in archive_to_keep] extra_archives_required = command_arguments['minimum-archives'] - len( keep_archives) if extra_archives_required > 0: for i in range(len(sorted_archives)): archive_name = sorted_archives[i] if archive_name not in keep_archives: keep_archives.append(archive_name) extra_archives_required -= 1 if extra_archives_required <= 0: break files_to_remove = set(sorted_archives).difference(keep_archives) for file_name in files_to_remove: network_client.request(WCommonNetworkClientCapability.remove_file, file_name) if 'notify-app' in command_arguments: notify( { WBackupMeta.RetentionNotificationOptions.retention_location: location, WBackupMeta.RetentionNotificationOptions.kept_archives: list(keep_archives), WBackupMeta.RetentionNotificationOptions.removed_archives: list(files_to_remove) }, command_arguments['notify-app'], encode_strict_cls=(WBackupMeta.RetentionNotificationOptions)) return WPlainCommandResult( 'Archives deleted - %i, archives kept - %i' % (len(files_to_remove), len(set(re_selected_archives).difference(files_to_remove))))
def exec(self, *command_tokens, **command_env): if self.match(*command_tokens, **command_env) is False: raise RuntimeError('Invalid tokens') return WPlainCommandResult( WBrokerInternalCommandSet.__help_info__[command_tokens[1]])