def main(args, settings): arg_components = args.get('--components', False) arg_no_archive = args.get('--no-archive', False) backup_functions = { 'dashboards': save_dashboards, 'datasources': save_datasources, 'folders': save_folders, 'alert-channels': save_alert_channels, 'organizations': save_orgs, 'users': save_users, 'snapshots': save_snapshots, 'versions': save_versions, 'annotations': save_annotations } (status, json_resp, dashboard_uid_support, datasource_uid_support, paging_support) = api_checks(settings) # Do not continue if API is unavailable or token is not valid if not status == 200: print("server status is not ok: {0}".format(json_resp)) sys.exit(1) settings.update({'DASHBOARD_UID_SUPPORT': dashboard_uid_support}) settings.update({'DATASOURCE_UID_SUPPORT': datasource_uid_support}) settings.update({'PAGING_SUPPORT': paging_support}) if arg_components: arg_components_list = arg_components.replace("_", "-").split(',') # Backup only the components that provided via an argument for backup_function in arg_components_list: backup_functions[backup_function](args, settings) else: # Backup every component for backup_function in backup_functions.keys(): backup_functions[backup_function](args, settings) aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME') azure_storage_container_name = settings.get('AZURE_STORAGE_CONTAINER_NAME') gcs_bucket_name = settings.get('GCS_BUCKET_NAME') if not arg_no_archive: archive(args, settings) if aws_s3_bucket_name: print('Upload archives to S3:') s3_upload(args, settings) if azure_storage_container_name: print('Upload archives to Azure Storage:') azure_storage_upload(args, settings) if gcs_bucket_name: print('Upload archives to GCS:') gcs_upload(args, settings)
def main(args, settings): arg_components = args.get('--components', False) arg_no_archive = args.get('--no-archive', False) backup_functions = { 'dashboards': save_dashboards, 'datasources': save_datasources, 'folders': save_folders, 'alert-channels': save_alert_channels, 'organizations': save_orgs, 'users': save_users } (status, json_resp, api_version) = api_checks(settings) # Do not continue if API is unavailable or token is not valid if not status == 200: print("server status is not ok: {0}".format(json_resp)) sys.exit(1) settings.update({'API_VERSION': api_version}) if arg_components: arg_components_list = arg_components.split(',') # Backup only the components that provided via an argument for backup_function in arg_components_list: backup_functions[backup_function](args, settings) else: # Backup every component for backup_function in backup_functions.keys(): backup_functions[backup_function](args, settings) aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME') if not arg_no_archive: archive(args, settings) if aws_s3_bucket_name: print('Upload archives to S3:') s3_upload(args, settings)
def main(args, settings): arg_components = args.get('--components', False) arg_no_archive = args.get('--no-archive', False) backup_functions = { 'dashboards': save_dashboards, 'datasources': save_datasources, 'folders': save_folders, 'alert-channels': save_alert_channels } if arg_components: arg_components_list = arg_components.split(',') # Backup only the components that provided via an argument for backup_function in arg_components_list: backup_functions[backup_function](args, settings) else: # Backup every component for backup_function in backup_functions.keys(): backup_functions[backup_function](args, settings) if not arg_no_archive: archive(args, settings)