def select_files(arguments): """ Generate digest for the username. """ if len(arguments) < 3: cli_error('The host UUID, the backup name, and at least one directory ' 'with the files should be passed!') else: my_uuid, ds_name = (try_parse_uuid(arguments.popleft()), arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) paths = __get_all_remaining_args(arguments) if not paths: cli_error('No paths passed!') else: host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage()) ugroup_uuid = host_app.host.user.base_group.uuid path_map = { k: { 'f+': ['all'], 'f-': [], 'stat': safe_stat(k) } for k in paths } host_app.select_paths_for_backup(ds_name=ds_name, ds_uuid=gen_uuid(), ugroup_uuid=ugroup_uuid, sync=False, paths_map=path_map)
def init_host(arguments): """ Initialize the host and run the first ever authentication. """ if len(arguments) < 2: cli_error('You must pass at least 2 arguments to this command.') else: my_listen_port_str, username = (arguments.popleft(), str(arguments.popleft())) password = _get_password_from_arguments(arguments) try: my_listen_port = int(my_listen_port_str) except ValueError: cli_error('Not an integer port number: %r', my_listen_port_str) if username is not None and password is not None: digest = \ crypto.generate_digest(username, password, common_settings.HTTP_AUTH_REALM_NODE) else: digest = None edition = uhost_settings.detect_edition() UHostApp.init_host(edition=edition, chunk_storage_cb=__create_chunk_storage, proceed_func=proceed_with_host_uuid_cli, on_end_func=__cli_handle_init_finish, username=username, digest=digest, my_listen_port=my_listen_port)
def select_files(arguments): """ Generate digest for the username. """ if len(arguments) < 3: cli_error('The host UUID, the backup name, and at least one directory ' 'with the files should be passed!') else: my_uuid, ds_name = (try_parse_uuid(arguments.popleft()), arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) paths = __get_all_remaining_args(arguments) if not paths: cli_error('No paths passed!') else: host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage()) ugroup_uuid = host_app.host.user.base_group.uuid path_map = {k: {'f+': ['all'], 'f-': [], 'stat': safe_stat(k)} for k in paths} host_app.select_paths_for_backup(ds_name=ds_name, ds_uuid=gen_uuid(), ugroup_uuid=ugroup_uuid, sync=False, paths_map=path_map)
def print_cloud_stats(arguments): """ Print the overall cloud statistics. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_cloud_stats_received(cloud_stats): """ @type cloud_stats: col.Mapping """ try: _total_mb, _used_mb = (cloud_stats['total_mb'], cloud_stats['used_mb']) print('The following statistics is available: \n' ' Total hosts count: {0:d}\n' ' Alive hosts now: {1:d}\n' ' Cloud size: {2:d} MiB\n' ' Used cloud size: {3:d} MiB ({4: 5.2f}%)\n' .format(cloud_stats['total_hosts_count'], cloud_stats['alive_hosts_count'], int(_total_mb), int(_used_mb), _used_mb / _total_mb)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_overall_cloud_stats(on_cloud_stats_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def request_info_dataset_files(arguments): """ Request info on particular dataset. """ if len(arguments) < 2: cli_error('You must pass at least the host UUID ' 'and the dataset UUID to this command.') else: my_uuid, ds_uuid = (try_parse_uuid(arguments.popleft()), try_parse_uuid(arguments.popleft())) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_datasets_received(ds_uuid, dataset_files_dict): """ @type ds_uuid: UUID @type dataset_files_dict: dict """ try: print('The following files are present in the dataset {}:' .format(ds_uuid)) _format = u' {0:<36s} {1}' print(_format.format('File UUID', 'File path')) print(_format.format('-' * 36, '-' * len('File path'))) for root_dir in sorted(dataset_files_dict.iterkeys()): print(u" {}".format(root_dir)) for f in dataset_files_dict[root_dir]: print(_format.format(f.uuid, f.full_path)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_dataset_files(ds_uuid, on_datasets_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def request_info_dataset_files(arguments): """ Request info on particular dataset. """ if len(arguments) < 2: cli_error('You must pass at least the host UUID ' 'and the dataset UUID to this command.') else: my_uuid, ds_uuid = (try_parse_uuid(arguments.popleft()), try_parse_uuid(arguments.popleft())) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_datasets_received(ds_uuid, dataset_files_dict): """ @type ds_uuid: UUID @type dataset_files_dict: dict """ try: print('The following files are present in the dataset {}:'. format(ds_uuid)) _format = u' {0:<36s} {1}' print(_format.format('File UUID', 'File path')) print(_format.format('-' * 36, '-' * len('File path'))) for root_dir in sorted(dataset_files_dict.iterkeys()): print(u" {}".format(root_dir)) for f in dataset_files_dict[root_dir]: print(_format.format(f.uuid, f.full_path)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_dataset_files(ds_uuid, on_datasets_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def print_cloud_stats(arguments): """ Print the overall cloud statistics. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_cloud_stats_received(cloud_stats): """ @type cloud_stats: col.Mapping """ try: _total_mb, _used_mb = (cloud_stats['total_mb'], cloud_stats['used_mb']) print( 'The following statistics is available: \n' ' Total hosts count: {0:d}\n' ' Alive hosts now: {1:d}\n' ' Cloud size: {2:d} MiB\n' ' Used cloud size: {3:d} MiB ({4: 5.2f}%)\n'.format( cloud_stats['total_hosts_count'], cloud_stats['alive_hosts_count'], int(_total_mb), int(_used_mb), _used_mb / _total_mb)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_overall_cloud_stats(on_cloud_stats_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def print_data_stats(arguments): """ Print the backup data statistics in the cloud. """ if len(arguments) < 3: cli_error('The host UUID, the dataset UUID (or asterisk), ' 'and the path (or asterisk)\n' 'should be passed as the arguments!') else: my_uuid, ds_uuid, path = (try_parse_uuid(arguments.popleft()), try_parse_uuid(arguments.popleft()), arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ def on_data_stats_received(ds_uuid, path, data_stats): try: data_stats['chunk_ratio'] = \ float(data_stats['chunk_replicas_count']) / \ data_stats['chunk_count'] __print_data_stats(data_stats, ds_uuid, path) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_data_replication_stats(ds_uuid if ds_uuid != '*' else None, path if path != '*' else None, on_data_stats_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def logs_action(arguments): """ Perform some action on the log files. """ global _LOG_ACTIONS if len(arguments) < 2: cli_error('At least the host UUID and the action should be passed!') else: my_uuid, action = (try_parse_uuid(arguments.popleft()), arguments.popleft()) if action not in _LOG_ACTIONS: cli_error('Action %r unsupported, only the following actions ' 'are supported: %s', action, ', '.join(imap('{!r}'.format, _LOG_ACTIONS))) else: proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) def on_action_completed(result): app.terminate_host() send_settings = uhost_settings.get_log_reporting_settings() app.action_with_error_logs(action=action, report_settings=send_settings, on_completed=on_action_completed) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # Launch reactor host_app.start_reactor()
def print_data_stats(arguments): """ Print the backup data statistics in the cloud. """ if len(arguments) < 3: cli_error('The host UUID, the dataset UUID (or asterisk), ' 'and the path (or asterisk)\n' 'should be passed as the arguments!') else: my_uuid, ds_uuid, path = (try_parse_uuid(arguments.popleft()), try_parse_uuid(arguments.popleft()), arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ def on_data_stats_received(ds_uuid, path, data_stats): try: data_stats['chunk_ratio'] = \ float(data_stats['chunk_replicas_count']) / \ data_stats['chunk_count'] __print_data_stats(data_stats, ds_uuid, path) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_data_replication_stats( ds_uuid if ds_uuid != '*' else None, path if path != '*' else None, on_data_stats_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def logs_action(arguments): """ Perform some action on the log files. """ global _LOG_ACTIONS if len(arguments) < 2: cli_error('At least the host UUID and the action should be passed!') else: my_uuid, action = (try_parse_uuid(arguments.popleft()), arguments.popleft()) if action not in _LOG_ACTIONS: cli_error( 'Action %r unsupported, only the following actions ' 'are supported: %s', action, ', '.join(imap('{!r}'.format, _LOG_ACTIONS))) else: proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) def on_action_completed(result): app.terminate_host() send_settings = uhost_settings.get_log_reporting_settings() app.action_with_error_logs(action=action, report_settings=send_settings, on_completed=on_action_completed) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # Launch reactor host_app.start_reactor()
def set_setting(arguments): """ Set some host setting to the value. """ if len(arguments) < 3: cli_error('The host UUID, setting name and the value ' 'should be passed as the arguments!') else: (my_uuid, setting_name, setting_value) = (try_parse_uuid(arguments.popleft()), arguments.popleft(), arguments.popleft()) if setting_name not in Queries.Settings.ALL_SETTINGS: cli_error('Setting "%s" unsupported!', setting_name) else: proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ print('Modifying {!r} to {!r}'.format(setting_name, setting_value)) app.set_setting(setting_name, setting_value, on_received=lambda x: app.terminate_host()) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start, do_send_messages=False) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def delete_dataset(arguments): """ Delete the dataset from the Node. """ global _LOG_ACTIONS if len(arguments) < 2: cli_error('At least the host UUID and the dataset UUID ' 'should be passed!') else: my_uuid, ds_uuid = (try_parse_uuid(arguments.popleft()), try_parse_uuid(arguments.popleft())) print('Trying to delete the dataset {}'.format(ds_uuid)) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) def on_datasets_deleted(deleted_ds_uuids): print('Deleted the following backups successfully:') print('\n'.join(' {}'.format(u) for u in deleted_ds_uuids)) app.terminate_host() app.delete_datasets_from_node(ds_uuids_to_delete=[ds_uuid], on_completed=on_datasets_deleted) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # Launch reactor host_app.start_reactor()
def launch_host(arguments): """Launch main host loop.""" if len(arguments) < 1: cli_host_uuid_error() else: # This MUST alter the arguments variable, not its copy my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage()) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor() logger.debug('Host reactor completed.')
def start_restore(arguments): """ Start restore process. """ if len(arguments) < 4: cli_error("At least the host UUID, the target directory, " "the dataset UUID\n" "and at least one file full path from the dataset " "should be passed!") else: my_uuid = try_parse_uuid(arguments.popleft()) if arguments and arguments[0] in _SAS_OPTIONS: arguments.popleft() stay_alive_on_success = True else: stay_alive_on_success = False if arguments and arguments[0] in _SAF_OPTIONS: arguments.popleft() stay_alive_on_failure = True else: stay_alive_on_failure = False target_dir, ds_uuid = (arguments.popleft(), try_parse_uuid(arguments.popleft())) file_paths_to_restore = map(normpath_nodot, __get_all_remaining_args(arguments)) if not file_paths_to_restore: cli_error('No files are given!') proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_restore_completed(restore_succeeded): """ @param restore_succeeded: whether the restore attempt has succeeded in overall. @type restore_succeeded: bool """ if restore_succeeded: print('Restore completed successfully!') else: print('The node disallowed the restore.') if (stay_alive_on_success if restore_succeeded else stay_alive_on_failure): print("Stayin' alive. Stayin' alive.") else: app.terminate_host() app.start_restore(file_paths_to_restore=file_paths_to_restore, ds_uuid=ds_uuid, restore_directory=target_dir, on_completed=on_restore_completed) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start, do_auto_start_backup=False) host_app.first_start() # Launch reactor host_app.start_reactor()
def start_backup(arguments): """Start backup process. @requires: There is at least one incomplete dataset in the DB. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) if arguments and arguments[0] in _SAS_OPTIONS: arguments.popleft() stay_alive_on_success = True else: stay_alive_on_success = False if arguments and arguments[0] in _SAF_OPTIONS: arguments.popleft() stay_alive_on_failure = True else: stay_alive_on_failure = False proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_backup_completed(backup_succeeded): """ @param backup_succeeded: whether the backup attempt has succeeded in overall. @type backup_succeeded: bool """ if backup_succeeded: print('Backup completed successfully!') else: print('The node disallowed the backup.') if (stay_alive_on_success if backup_succeeded else stay_alive_on_failure): print("Stayin' alive. Stayin' alive.") else: app.terminate_host() with db.RDB() as rdbw: all_datasets = Queries.Datasets.get_just_datasets( my_uuid, rdbw) incomplete_datasets_exist, incomplete_datasets = \ inonempty(ds for ds in all_datasets if not ds.completed) if not incomplete_datasets_exist: # No incomplete datasets to backup on_backup_completed(False) else: # Start the backup of the first dataset in the sequence. incomplete_dataset_to_start = incomplete_datasets.next() app.auto_start_backup = False app.start_backup(incomplete_dataset_to_start.uuid, on_backup_completed) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # Launch reactor host_app.start_reactor()
def request_info_all_datasets(arguments): """ Request info on datasets. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) def on_datasets_received(datasets_list): try: print('The following datasets are available:') _format = u' {0:<36} {1:<5} {2:<6} {3:<12} ' \ '{4:<17} {5:<17} {6:<12}' print(_format.format('Dataset UUID', 'Files', 'Chunks', 'Total size', 'Time started', 'Time completed', 'Dataset name')) print(_format.format('-' * 36, '-' * 5, '-' * 6, '-' * 12, '-' * 17, '-' * 17, '-' * 12)) _encoding = locale.getpreferredencoding() for dataset in datasets_list: _formatted = \ _format.format( dataset.uuid, dataset.files_count(), dataset.chunks_count(), dataset.size(), dataset.time_started .strftime(DATETIME_STRFTIME_FORMAT), 'Not completed yet' if dataset.time_completed is None else dataset.time_completed.strftime( DATETIME_STRFTIME_FORMAT), dataset.name) print(_formatted.encode(_encoding)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_datasets(on_datasets_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def request_info_all_datasets(arguments): """ Request info on datasets. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) def on_datasets_received(datasets_list): try: print('The following datasets are available:') _format = u' {0:<36} {1:<5} {2:<6} {3:<12} ' \ '{4:<17} {5:<17} {6:<12}' print( _format.format('Dataset UUID', 'Files', 'Chunks', 'Total size', 'Time started', 'Time completed', 'Dataset name')) print( _format.format('-' * 36, '-' * 5, '-' * 6, '-' * 12, '-' * 17, '-' * 17, '-' * 12)) _encoding = locale.getpreferredencoding() for dataset in datasets_list: _formatted = \ _format.format( dataset.uuid, dataset.files_count(), dataset.chunks_count(), dataset.size(), dataset.time_started .strftime(DATETIME_STRFTIME_FORMAT), 'Not completed yet' if dataset.time_completed is None else dataset.time_completed.strftime( DATETIME_STRFTIME_FORMAT), dataset.name) print(_formatted.encode(_encoding)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_datasets(on_datasets_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def start_backup(arguments): """Start backup process. @requires: There is at least one incomplete dataset in the DB. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) if arguments and arguments[0] in _SAS_OPTIONS: arguments.popleft() stay_alive_on_success = True else: stay_alive_on_success = False if arguments and arguments[0] in _SAF_OPTIONS: arguments.popleft() stay_alive_on_failure = True else: stay_alive_on_failure = False proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_backup_completed(backup_succeeded): """ @param backup_succeeded: whether the backup attempt has succeeded in overall. @type backup_succeeded: bool """ if backup_succeeded: print('Backup completed successfully!') else: print('The node disallowed the backup.') if (stay_alive_on_success if backup_succeeded else stay_alive_on_failure): print("Stayin' alive. Stayin' alive.") else: app.terminate_host() with db.RDB() as rdbw: all_datasets = Queries.Datasets.get_just_datasets(my_uuid, rdbw) incomplete_datasets_exist, incomplete_datasets = \ inonempty(ds for ds in all_datasets if not ds.completed) if not incomplete_datasets_exist: # No incomplete datasets to backup on_backup_completed(False) else: # Start the backup of the first dataset in the sequence. incomplete_dataset_to_start = incomplete_datasets.next() app.auto_start_backup = False app.start_backup(incomplete_dataset_to_start.uuid, on_backup_completed) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # Launch reactor host_app.start_reactor()