def main(args, settings): arg_archive_file = args.get('<archive_file>', None) arg_components = args.get('--components', False) aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME') (status, json_resp, api_version) = api_checks(settings) # Do not continue if API is unavailable or token is not valid if not status == 200: print("server status is not ok: {0}".format(json_resp)) sys.exit(1) # Use tar data stream if S3 bucket name is specified if aws_s3_bucket_name: s3_data = s3_download(args, settings) try: tar = tarfile.open(fileobj=s3_data, mode='r:gz') except Exception as e: print(str(e)) sys.exit(1) else: try: tarfile.is_tarfile(name=arg_archive_file) except IOError as e: print(str(e)) sys.exit(1) try: tar = tarfile.open(name=arg_archive_file, mode='r:gz') except Exception as e: print(str(e)) sys.exit(1) with tempfile.TemporaryDirectory() as tmpdir: tar.extractall(tmpdir) tar.close() restore_functions = { 'folder': create_folder, 'datasource': create_datasource, 'dashboard': create_dashboard, 'alert_channel': create_alert_channel, 'organization': create_org, 'user': create_user} if arg_components: arg_components_list = arg_components.split(',') # Restore only the components that provided via an argument # but must also exist in extracted archive for ext in arg_components_list: for file_path in glob('{0}/**/*.{1}'.format(tmpdir, ext[:-1]), recursive=True): print('restoring {0}: {1}'.format(ext, file_path)) restore_functions[ext[:-1]](args, settings, file_path) else: # Restore every component included in extracted archive for ext in restore_functions.keys(): for file_path in glob('{0}/**/*.{1}'.format(tmpdir, ext), recursive=True): print('restoring {0}: {1}'.format(ext, file_path)) restore_functions[ext](args, settings, file_path)
def main(args, settings): arg_archive_file = args.get('<archive_file>', None) aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME') (status, json_resp, api_version) = api_checks(settings) # Do not continue if API is unavailable or token is not valid if not status == 200: print("server status is not ok: {0}".format(json_resp)) sys.exit(1) # Use tar data stream if S3 bucket name is specified if aws_s3_bucket_name: s3_data = s3_download(args, settings) try: tar = tarfile.open(fileobj=s3_data, mode='r:gz') except Exception as e: print(str(e)) sys.exit(1) else: try: tarfile.is_tarfile(name=arg_archive_file) except IOError as e: print(str(e)) sys.exit(1) try: tar = tarfile.open(name=arg_archive_file, mode='r:gz') except Exception as e: print(str(e)) sys.exit(1) restore_functions = { 'folder': create_folder, 'datasource': create_datasource, 'dashboard': create_dashboard, 'alert_channel': create_alert_channel, 'organization': create_org, 'user': create_user } if sys.version_info >= (3, ): with tempfile.TemporaryDirectory() as tmpdir: tar.extractall(tmpdir) tar.close() restore_components(args, settings, restore_functions, tmpdir) tmpdir.cleanup() else: tmpdir = tempfile.mkdtemp() tar.extractall(tmpdir) tar.close() restore_components(args, settings, restore_functions, tmpdir) try: shutil.rmtree(tmpdir) except OSError as e: print("Error: %s : %s" % (tmpdir, e.strerror))
def main(args, settings): def open_compressed_backup(compressed_backup): try: tar = tarfile.open(fileobj=compressed_backup, mode='r:gz') return tar except Exception as e: print(str(e)) sys.exit(1) arg_archive_file = args.get('<archive_file>', None) aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME') azure_storage_container_name = settings.get('AZURE_STORAGE_CONTAINER_NAME') gcs_bucket_name = settings.get('GCS_BUCKET_NAME') (status, json_resp, uid_support, paging_support) = api_checks(settings) # Do not continue if API is unavailable or token is not valid if not status == 200: sys.exit(1) # Use tar data stream if S3 bucket name is specified if aws_s3_bucket_name: print('Download archives from S3:') s3_data = s3_download(args, settings) tar = open_compressed_backup(s3_data) elif azure_storage_container_name: print('Download archives from Azure:') azure_storage_data = azure_storage_download(args, settings) tar = open_compressed_backup(azure_storage_data) elif gcs_bucket_name: print('Download archives from GCS:') gcs_storage_data = gcs_download(args, settings) tar = open_compressed_backup(gcs_storage_data) else: try: tarfile.is_tarfile(name=arg_archive_file) except IOError as e: print(str(e)) sys.exit(1) try: tar = tarfile.open(name=arg_archive_file, mode='r:gz') except Exception as e: print(str(e)) sys.exit(1) restore_functions = collections.OrderedDict() restore_functions['folder'] = create_folder restore_functions['datasource'] = create_datasource restore_functions['dashboard'] = create_dashboard restore_functions['alert_channel'] = create_alert_channel restore_functions['organization'] = create_org restore_functions['user'] = create_user if sys.version_info >= (3, ): with tempfile.TemporaryDirectory() as tmpdir: tar.extractall(tmpdir) tar.close() restore_components(args, settings, restore_functions, tmpdir) else: tmpdir = tempfile.mkdtemp() tar.extractall(tmpdir) tar.close() restore_components(args, settings, restore_functions, tmpdir) try: shutil.rmtree(tmpdir) except OSError as e: print("Error: %s : %s" % (tmpdir, e.strerror))