def request_info_dataset_files(arguments): """ Request info on particular dataset. """ if len(arguments) < 2: cli_error('You must pass at least the host UUID ' 'and the dataset UUID to this command.') else: my_uuid, ds_uuid = (try_parse_uuid(arguments.popleft()), try_parse_uuid(arguments.popleft())) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_datasets_received(ds_uuid, dataset_files_dict): """ @type ds_uuid: UUID @type dataset_files_dict: dict """ try: print('The following files are present in the dataset {}:' .format(ds_uuid)) _format = u' {0:<36s} {1}' print(_format.format('File UUID', 'File path')) print(_format.format('-' * 36, '-' * len('File path'))) for root_dir in sorted(dataset_files_dict.iterkeys()): print(u" {}".format(root_dir)) for f in dataset_files_dict[root_dir]: print(_format.format(f.uuid, f.full_path)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_dataset_files(ds_uuid, on_datasets_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def request_info_dataset_files(arguments): """ Request info on particular dataset. """ if len(arguments) < 2: cli_error('You must pass at least the host UUID ' 'and the dataset UUID to this command.') else: my_uuid, ds_uuid = (try_parse_uuid(arguments.popleft()), try_parse_uuid(arguments.popleft())) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_datasets_received(ds_uuid, dataset_files_dict): """ @type ds_uuid: UUID @type dataset_files_dict: dict """ try: print('The following files are present in the dataset {}:'. format(ds_uuid)) _format = u' {0:<36s} {1}' print(_format.format('File UUID', 'File path')) print(_format.format('-' * 36, '-' * len('File path'))) for root_dir in sorted(dataset_files_dict.iterkeys()): print(u" {}".format(root_dir)) for f in dataset_files_dict[root_dir]: print(_format.format(f.uuid, f.full_path)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_dataset_files(ds_uuid, on_datasets_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def print_data_stats(arguments): """ Print the backup data statistics in the cloud. """ if len(arguments) < 3: cli_error('The host UUID, the dataset UUID (or asterisk), ' 'and the path (or asterisk)\n' 'should be passed as the arguments!') else: my_uuid, ds_uuid, path = (try_parse_uuid(arguments.popleft()), try_parse_uuid(arguments.popleft()), arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ def on_data_stats_received(ds_uuid, path, data_stats): try: data_stats['chunk_ratio'] = \ float(data_stats['chunk_replicas_count']) / \ data_stats['chunk_count'] __print_data_stats(data_stats, ds_uuid, path) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_data_replication_stats(ds_uuid if ds_uuid != '*' else None, path if path != '*' else None, on_data_stats_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def print_data_stats(arguments): """ Print the backup data statistics in the cloud. """ if len(arguments) < 3: cli_error('The host UUID, the dataset UUID (or asterisk), ' 'and the path (or asterisk)\n' 'should be passed as the arguments!') else: my_uuid, ds_uuid, path = (try_parse_uuid(arguments.popleft()), try_parse_uuid(arguments.popleft()), arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ def on_data_stats_received(ds_uuid, path, data_stats): try: data_stats['chunk_ratio'] = \ float(data_stats['chunk_replicas_count']) / \ data_stats['chunk_count'] __print_data_stats(data_stats, ds_uuid, path) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_data_replication_stats( ds_uuid if ds_uuid != '*' else None, path if path != '*' else None, on_data_stats_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def add_user(arguments): node_map = proceed_with_node() if node_map is not None: if not arguments: cli_error('No user name passed') else: username_str = arguments.popleft() if arguments and arguments[0] == '--to-group': dummy = arguments.popleft() if not arguments: cli_error('Attempt to add user to group, ' 'but no group specified!') else: username = str(username_str) group_uuid = try_parse_uuid(arguments.popleft()) # Let's add the user to group NodeApp.add_user_to_group( username=username, group_uuid=UserGroupUUID.safe_cast_uuid(group_uuid)) print(u'Added user "{}" to the group "{}"' .format(username, group_uuid)) else: # Let's add the user to the system __add_new_regular_user(username_str, arguments)
def save_connection_password(arguments): """ Generate digest for the connection password (for the current host) and save it. """ if len(arguments) < 1: cli_error('The host UUID and (optionally) the password ' 'should be passed as the arguments!') else: my_uuid = try_parse_uuid(arguments.popleft()) password = _get_password_from_arguments(arguments, repeat=True) proceed_with_host_uuid_cli(my_uuid) with db.RDB() as rdbw: my_host = Queries.Inhabitants.get_host_by_uuid(my_uuid, rdbw) my_username = my_host.username _digest = \ crypto.generate_digest(my_username, password, common_settings.HTTP_AUTH_REALM_NODE) Queries.Inhabitants.update_user_digest(my_username, _digest, rdbw) print(u'For host {host} (with user {user}), ' u'saving the following digest: {digest}'.format(host=my_uuid, user=my_username, digest=_digest))
def select_files(arguments): """ Generate digest for the username. """ if len(arguments) < 3: cli_error('The host UUID, the backup name, and at least one directory ' 'with the files should be passed!') else: my_uuid, ds_name = (try_parse_uuid(arguments.popleft()), arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) paths = __get_all_remaining_args(arguments) if not paths: cli_error('No paths passed!') else: host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage()) ugroup_uuid = host_app.host.user.base_group.uuid path_map = {k: {'f+': ['all'], 'f-': [], 'stat': safe_stat(k)} for k in paths} host_app.select_paths_for_backup(ds_name=ds_name, ds_uuid=gen_uuid(), ugroup_uuid=ugroup_uuid, sync=False, paths_map=path_map)
def save_connection_password(arguments): """ Generate digest for the connection password (for the current host) and save it. """ if len(arguments) < 1: cli_error('The host UUID and (optionally) the password ' 'should be passed as the arguments!') else: my_uuid = try_parse_uuid(arguments.popleft()) password = _get_password_from_arguments(arguments, repeat=True) proceed_with_host_uuid_cli(my_uuid) with db.RDB() as rdbw: my_host = Queries.Inhabitants.get_host_by_uuid(my_uuid, rdbw) my_username = my_host.username _digest = \ crypto.generate_digest(my_username, password, common_settings.HTTP_AUTH_REALM_NODE) Queries.Inhabitants.update_user_digest(my_username, _digest, rdbw) print(u'For host {host} (with user {user}), ' u'saving the following digest: {digest}' .format(host=my_uuid, user=my_username, digest=_digest))
def select_files(arguments): """ Generate digest for the username. """ if len(arguments) < 3: cli_error('The host UUID, the backup name, and at least one directory ' 'with the files should be passed!') else: my_uuid, ds_name = (try_parse_uuid(arguments.popleft()), arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) paths = __get_all_remaining_args(arguments) if not paths: cli_error('No paths passed!') else: host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage()) ugroup_uuid = host_app.host.user.base_group.uuid path_map = { k: { 'f+': ['all'], 'f-': [], 'stat': safe_stat(k) } for k in paths } host_app.select_paths_for_backup(ds_name=ds_name, ds_uuid=gen_uuid(), ugroup_uuid=ugroup_uuid, sync=False, paths_map=path_map)
def __test_web_upload_remove(op_upload, arguments): """Common code for CLI operations for web upload/web remove.""" op_name = 'upload' if op_upload else 'remove' if len(arguments) < 2: cli_error(u'You must pass at least 2 arguments to this command: ' u'the UUID of the group, and at least ' u'one filename to {op}.' .format(op=op_name)) else: group_uuid = UserGroupUUID.safe_cast_uuid( try_parse_uuid(arguments.popleft())) file_paths = [] while arguments: file_paths.append(arguments.popleft()) print(u'{} file paths for group {}:\n{}'.format( op_name.capitalize(), group_uuid, '\n'.join(file_paths))) node_map = proceed_with_node() chunk_storage = ChunkStorageBigDB(bdbw_factory=ds.BDB) with db.RDB() as rdbw: __test_upload_remove_files(group_uuid, file_paths, chunk_storage, op_upload, rdbw)
def add_trusted_host(arguments): if len(arguments) < 3: cli_error('You must pass at least the user name, password digest ' 'and the host UUID!') else: username = str(arguments.popleft()) digest = str(arguments.popleft()) host_uuid = try_parse_uuid(arguments.popleft()) node_map = proceed_with_node() if node_map is not None: # Finally, add user NodeApp.add_user(UserGroupUUID.safe_cast_uuid(gen_uuid()), username=str(username), digest=digest, is_trusted=True) _for_storage = True _for_restore = False NodeApp.add_host(username=username, hostname='Trusted: {}'.format(host_uuid), host_uuid=host_uuid, trusted_host_tuple=(_for_storage, _for_restore)) NodeApp.change_user(username, digest) print(u'Added Trusted Host {!r}'.format(username))
def print_cloud_stats(arguments): """ Print the overall cloud statistics. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_cloud_stats_received(cloud_stats): """ @type cloud_stats: col.Mapping """ try: _total_mb, _used_mb = (cloud_stats['total_mb'], cloud_stats['used_mb']) print('The following statistics is available: \n' ' Total hosts count: {0:d}\n' ' Alive hosts now: {1:d}\n' ' Cloud size: {2:d} MiB\n' ' Used cloud size: {3:d} MiB ({4: 5.2f}%)\n' .format(cloud_stats['total_hosts_count'], cloud_stats['alive_hosts_count'], int(_total_mb), int(_used_mb), _used_mb / _total_mb)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_overall_cloud_stats(on_cloud_stats_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def delete_dataset(arguments): """ Delete the dataset from the Node. """ global _LOG_ACTIONS if len(arguments) < 2: cli_error('At least the host UUID and the dataset UUID ' 'should be passed!') else: my_uuid, ds_uuid = (try_parse_uuid(arguments.popleft()), try_parse_uuid(arguments.popleft())) print('Trying to delete the dataset {}'.format(ds_uuid)) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) def on_datasets_deleted(deleted_ds_uuids): print('Deleted the following backups successfully:') print('\n'.join(' {}'.format(u) for u in deleted_ds_uuids)) app.terminate_host() app.delete_datasets_from_node(ds_uuids_to_delete=[ds_uuid], on_completed=on_datasets_deleted) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # Launch reactor host_app.start_reactor()
def print_cloud_stats(arguments): """ Print the overall cloud statistics. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_cloud_stats_received(cloud_stats): """ @type cloud_stats: col.Mapping """ try: _total_mb, _used_mb = (cloud_stats['total_mb'], cloud_stats['used_mb']) print( 'The following statistics is available: \n' ' Total hosts count: {0:d}\n' ' Alive hosts now: {1:d}\n' ' Cloud size: {2:d} MiB\n' ' Used cloud size: {3:d} MiB ({4: 5.2f}%)\n'.format( cloud_stats['total_hosts_count'], cloud_stats['alive_hosts_count'], int(_total_mb), int(_used_mb), _used_mb / _total_mb)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_overall_cloud_stats(on_cloud_stats_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def logs_action(arguments): """ Perform some action on the log files. """ global _LOG_ACTIONS if len(arguments) < 2: cli_error('At least the host UUID and the action should be passed!') else: my_uuid, action = (try_parse_uuid(arguments.popleft()), arguments.popleft()) if action not in _LOG_ACTIONS: cli_error('Action %r unsupported, only the following actions ' 'are supported: %s', action, ', '.join(imap('{!r}'.format, _LOG_ACTIONS))) else: proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) def on_action_completed(result): app.terminate_host() send_settings = uhost_settings.get_log_reporting_settings() app.action_with_error_logs(action=action, report_settings=send_settings, on_completed=on_action_completed) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # Launch reactor host_app.start_reactor()
def _test_take_snapshot(arguments): if len(arguments) < 2: cli_error('Must pass the base host UUID and the directory path ' 'to take snapshot!') my_uuid = try_parse_uuid(arguments.popleft()) dir_path = arguments.popleft() print(u'Taking snapshot of {!r}'.format(dir_path)) proceed_with_host_uuid_cli(my_uuid) with db.RDB() as rdbw: host = Queries.Inhabitants.get_host_with_user_by_uuid(uuid, rdbw) UHostApp.take_base_directory_snapshot(dir_path, host.user.base_group.uuid)
def add_host(arguments): if len(arguments) < 2: cli_error('You must pass at least the user name ' 'and the host UUID!') else: username = str(arguments.popleft()) host_uuid = try_parse_uuid(arguments.popleft()) node_map = proceed_with_node() if node_map is not None: NodeApp.add_host(username=username, hostname=str(host_uuid), host_uuid=host_uuid, trusted_host_tuple=None) print(u'Added host {} to "{}"'.format(host_uuid, username))
def test_web_download(arguments): node_map = proceed_with_node() if len(arguments) < 3: cli_error(u'You must pass at least 3 arguments to this command: ' u'the dataset UUID , ' u'base directory of the file, ' u'and the relative path of the file.' .format(op=op_name)) else: ds_uuid = try_parse_uuid(arguments.popleft()) base_dir = arguments.popleft() rel_path = arguments.popleft() edition = settings.get_common().edition print(u'Downloading file from dataset {}, base directory {}, ' u'path {}' .format(ds_uuid, base_dir, rel_path)) with db.RDB() as rdbw, ds.BDB() as bdbw: cr = data_ops.get_cryptographer(ds_uuid, edition, rdbw) file_data = data_ops.download_file(ds_uuid, base_dir, rel_path, edition, rdbw) result_filename = os.path.basename(rel_path) logger.debug('Writing to file %r %i bytes long', result_filename, file_data.size) with open(result_filename, 'wb') as fh: # Preallocate file if file_data.size: fh.seek(file_data.size - 1) fh.write('\x00') fh.seek(0) # Now write the file contents. try: for bl in data_ops.get_file_of_blocks_gen( file_data.blocks, cr, bdbw=bdbw): fh.write(bl) except BDBQueries.Chunks.NoChunkException as e: logger.error('Problem while downloading the file: %s', e) print(u'Error: file {!r} cannot be created!'.format( result_filename))
def logs_action(arguments): """ Perform some action on the log files. """ global _LOG_ACTIONS if len(arguments) < 2: cli_error('At least the host UUID and the action should be passed!') else: my_uuid, action = (try_parse_uuid(arguments.popleft()), arguments.popleft()) if action not in _LOG_ACTIONS: cli_error( 'Action %r unsupported, only the following actions ' 'are supported: %s', action, ', '.join(imap('{!r}'.format, _LOG_ACTIONS))) else: proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) def on_action_completed(result): app.terminate_host() send_settings = uhost_settings.get_log_reporting_settings() app.action_with_error_logs(action=action, report_settings=send_settings, on_completed=on_action_completed) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # Launch reactor host_app.start_reactor()
def print_all_settings(arguments): """ Print all available settings together with their values. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) print('Configured settings:') _settings = host_settings.get_all_settings_newer_than(datetime.min) for name, (value, time) in _settings.iteritems(): print(u' {0:<20s}: {1!r} (last update on {2})'.format( '{!r}'.format(name), value if not isinstance(value, buffer) else '<binary {:d} bytes>'.format(len(value)), time))
def set_setting(arguments): """ Set some host setting to the value. """ if len(arguments) < 3: cli_error('The host UUID, setting name and the value ' 'should be passed as the arguments!') else: (my_uuid, setting_name, setting_value) = (try_parse_uuid(arguments.popleft()), arguments.popleft(), arguments.popleft()) if setting_name not in Queries.Settings.ALL_SETTINGS: cli_error('Setting "%s" unsupported!', setting_name) else: proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ print('Modifying {!r} to {!r}'.format(setting_name, setting_value)) app.set_setting(setting_name, setting_value, on_received=lambda x: app.terminate_host()) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start, do_send_messages=False) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def launch_host(arguments): """Launch main host loop.""" if len(arguments) < 1: cli_host_uuid_error() else: # This MUST alter the arguments variable, not its copy my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage()) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor() logger.debug('Host reactor completed.')
def print_all_settings(arguments): """ Print all available settings together with their values. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) print('Configured settings:') _settings = host_settings.get_all_settings_newer_than(datetime.min) for name, (value, time) in _settings.iteritems(): print(u' {0:<20s}: {1!r} (last update on {2})' .format('{!r}'.format(name), value if not isinstance(value, buffer) else '<binary {:d} bytes>' .format(len(value)), time))
def start_restore(arguments): """ Start restore process. """ if len(arguments) < 4: cli_error("At least the host UUID, the target directory, " "the dataset UUID\n" "and at least one file full path from the dataset " "should be passed!") else: my_uuid = try_parse_uuid(arguments.popleft()) if arguments and arguments[0] in _SAS_OPTIONS: arguments.popleft() stay_alive_on_success = True else: stay_alive_on_success = False if arguments and arguments[0] in _SAF_OPTIONS: arguments.popleft() stay_alive_on_failure = True else: stay_alive_on_failure = False target_dir, ds_uuid = (arguments.popleft(), try_parse_uuid(arguments.popleft())) file_paths_to_restore = map(normpath_nodot, __get_all_remaining_args(arguments)) if not file_paths_to_restore: cli_error('No files are given!') proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_restore_completed(restore_succeeded): """ @param restore_succeeded: whether the restore attempt has succeeded in overall. @type restore_succeeded: bool """ if restore_succeeded: print('Restore completed successfully!') else: print('The node disallowed the restore.') if (stay_alive_on_success if restore_succeeded else stay_alive_on_failure): print("Stayin' alive. Stayin' alive.") else: app.terminate_host() app.start_restore(file_paths_to_restore=file_paths_to_restore, ds_uuid=ds_uuid, restore_directory=target_dir, on_completed=on_restore_completed) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start, do_auto_start_backup=False) host_app.first_start() # Launch reactor host_app.start_reactor()
def start_backup(arguments): """Start backup process. @requires: There is at least one incomplete dataset in the DB. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) if arguments and arguments[0] in _SAS_OPTIONS: arguments.popleft() stay_alive_on_success = True else: stay_alive_on_success = False if arguments and arguments[0] in _SAF_OPTIONS: arguments.popleft() stay_alive_on_failure = True else: stay_alive_on_failure = False proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_backup_completed(backup_succeeded): """ @param backup_succeeded: whether the backup attempt has succeeded in overall. @type backup_succeeded: bool """ if backup_succeeded: print('Backup completed successfully!') else: print('The node disallowed the backup.') if (stay_alive_on_success if backup_succeeded else stay_alive_on_failure): print("Stayin' alive. Stayin' alive.") else: app.terminate_host() with db.RDB() as rdbw: all_datasets = Queries.Datasets.get_just_datasets( my_uuid, rdbw) incomplete_datasets_exist, incomplete_datasets = \ inonempty(ds for ds in all_datasets if not ds.completed) if not incomplete_datasets_exist: # No incomplete datasets to backup on_backup_completed(False) else: # Start the backup of the first dataset in the sequence. incomplete_dataset_to_start = incomplete_datasets.next() app.auto_start_backup = False app.start_backup(incomplete_dataset_to_start.uuid, on_backup_completed) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # Launch reactor host_app.start_reactor()
def request_info_all_datasets(arguments): """ Request info on datasets. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) def on_datasets_received(datasets_list): try: print('The following datasets are available:') _format = u' {0:<36} {1:<5} {2:<6} {3:<12} ' \ '{4:<17} {5:<17} {6:<12}' print(_format.format('Dataset UUID', 'Files', 'Chunks', 'Total size', 'Time started', 'Time completed', 'Dataset name')) print(_format.format('-' * 36, '-' * 5, '-' * 6, '-' * 12, '-' * 17, '-' * 17, '-' * 12)) _encoding = locale.getpreferredencoding() for dataset in datasets_list: _formatted = \ _format.format( dataset.uuid, dataset.files_count(), dataset.chunks_count(), dataset.size(), dataset.time_started .strftime(DATETIME_STRFTIME_FORMAT), 'Not completed yet' if dataset.time_completed is None else dataset.time_completed.strftime( DATETIME_STRFTIME_FORMAT), dataset.name) print(_formatted.encode(_encoding)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_datasets(on_datasets_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()
def start_backup(arguments): """Start backup process. @requires: There is at least one incomplete dataset in the DB. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) if arguments and arguments[0] in _SAS_OPTIONS: arguments.popleft() stay_alive_on_success = True else: stay_alive_on_success = False if arguments and arguments[0] in _SAF_OPTIONS: arguments.popleft() stay_alive_on_failure = True else: stay_alive_on_failure = False proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) @contract_epydoc def on_backup_completed(backup_succeeded): """ @param backup_succeeded: whether the backup attempt has succeeded in overall. @type backup_succeeded: bool """ if backup_succeeded: print('Backup completed successfully!') else: print('The node disallowed the backup.') if (stay_alive_on_success if backup_succeeded else stay_alive_on_failure): print("Stayin' alive. Stayin' alive.") else: app.terminate_host() with db.RDB() as rdbw: all_datasets = Queries.Datasets.get_just_datasets(my_uuid, rdbw) incomplete_datasets_exist, incomplete_datasets = \ inonempty(ds for ds in all_datasets if not ds.completed) if not incomplete_datasets_exist: # No incomplete datasets to backup on_backup_completed(False) else: # Start the backup of the first dataset in the sequence. incomplete_dataset_to_start = incomplete_datasets.next() app.auto_start_backup = False app.start_backup(incomplete_dataset_to_start.uuid, on_backup_completed) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # Launch reactor host_app.start_reactor()
def request_info_all_datasets(arguments): """ Request info on datasets. """ if len(arguments) < 1: cli_host_uuid_error() else: my_uuid = try_parse_uuid(arguments.popleft()) proceed_with_host_uuid_cli(my_uuid) @exceptions_logged(logger) @contract_epydoc def on_reactor_start(app): """ @type app: UHostApp """ @exceptions_logged(logger) def on_datasets_received(datasets_list): try: print('The following datasets are available:') _format = u' {0:<36} {1:<5} {2:<6} {3:<12} ' \ '{4:<17} {5:<17} {6:<12}' print( _format.format('Dataset UUID', 'Files', 'Chunks', 'Total size', 'Time started', 'Time completed', 'Dataset name')) print( _format.format('-' * 36, '-' * 5, '-' * 6, '-' * 12, '-' * 17, '-' * 17, '-' * 12)) _encoding = locale.getpreferredencoding() for dataset in datasets_list: _formatted = \ _format.format( dataset.uuid, dataset.files_count(), dataset.chunks_count(), dataset.size(), dataset.time_started .strftime(DATETIME_STRFTIME_FORMAT), 'Not completed yet' if dataset.time_completed is None else dataset.time_completed.strftime( DATETIME_STRFTIME_FORMAT), dataset.name) print(_formatted.encode(_encoding)) except Exception: traceback.print_exc() finally: app.terminate_host() app.query_datasets(on_datasets_received) # Launch the main host app host_app = UHostApp(my_uuid, uhost_settings.detect_edition(), __create_chunk_storage(), on_reactor_start=on_reactor_start) host_app.first_start() # But it is not yet started, until the reactor is launched as well. # Launch reactor host_app.start_reactor()