コード例 #1
0
def main(args, settings):
    alerts_file = args.get('<alerts_filename>', None)
    print("got alerts_file {0}".format(alerts_file))

    (status, json_resp, dashboard_uid_support, datasource_uid_support,
     paging_support) = api_checks(settings)

    # Do not continue if API is unavailable or token is not valid
    if not status == 200:
        sys.exit(1)

    debug = settings.get('DEBUG')
    verify_ssl = settings.get('VERIFY_SSL')
    client_cert = settings.get('CLIENT_CERT')
    grafana_url = settings.get('GRAFANA_URL')
    http_post_headers = settings.get('HTTP_POST_HEADERS')

    with open(alerts_file, 'r') as f:
        data = f.read()

    alerts = json.loads(data)
    print(alerts)

    for alert in alerts:
        if alert['state'] != 'paused':
            result = unpause_alert(alert['id'], grafana_url, http_post_headers,
                                   verify_ssl, client_cert, debug)
            if result[0] != 200:
                print("failed to unpause alert: {0} - {1} with {2}".format(
                    alert['id'], alert['name'], result[0]))
            print("unpausing alert: {0} - {1} with previous state: {2}".format(
                alert['id'], alert['name'], result[0]))
        else:
            print("keeping alert {0} - {1} paused".format(
                alert['id'], alert['name']))
コード例 #2
0
def main(args, settings):
    arg_components = args.get('--components', False)

    delete_functions = {
        'dashboards': delete_dashboards,
        'datasources': delete_datasources,
        'folders': delete_folders,
        'alert-channels': delete_alert_channels,
        'snapshots': delete_snapshots,
        'annotations': delete_annotations
    }

    (status, json_resp, dashboard_uid_support, datasource_uid_support,
     paging_support) = api_checks(settings)

    # Do not continue if API is unavailable or token is not valid
    if not status == 200:
        print("server status is not ok: {0}".format(json_resp))
        sys.exit(1)

    settings.update({'DASHBOARD_UID_SUPPORT': dashboard_uid_support})
    settings.update({'DATASOURCE_UID_SUPPORT': datasource_uid_support})
    settings.update({'PAGING_SUPPORT': paging_support})

    if arg_components:
        arg_components_list = arg_components.split(',')

        # Delete only the components that provided via an argument
        for delete_function in arg_components_list:
            delete_functions[delete_function](args, settings)
    else:
        # delete every component
        for delete_function in delete_functions.keys():
            delete_functions[delete_function](args, settings)
コード例 #3
0
def main(args, settings):
    users_file = args.get('<users_filename>', None)
    print("got users_file {0}".format(users_file))

    (status, json_resp, uid_support, paging_support) = api_checks(settings)

    # Do not continue if API is unavailable or token is not valid
    if not status == 200:
        sys.exit(1)

    debug = settings.get('DEBUG')
    verify_ssl = settings.get('VERIFY_SSL')
    client_cert = settings.get('CLIENT_CERT')
    grafana_url = settings.get('GRAFANA_URL')
    http_post_headers = settings.get('HTTP_POST_HEADERS')

    with open(users_file, 'r') as f:
        data = f.read()

    users = json.loads(data)
    print(users)

    for user in users:
        if user['role'] == 'Editor':
            (status, content) = set_user_role(user['userId'], 'Editor',
                                              grafana_url, http_post_headers,
                                              verify_ssl, client_cert, debug)
            print("changed user {0} to Editor".format(user['login']))

            if status != 200:
                print("changing role of user {0} failed with {1}".format(
                    user['login'], status))
コード例 #4
0
def main(args, settings):
    arg_archive_file = args.get('<archive_file>', None)
    arg_components = args.get('--components', False)
    aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME')

    (status, json_resp, api_version) = api_checks(settings)

    # Do not continue if API is unavailable or token is not valid
    if not status == 200:
        print("server status is not ok: {0}".format(json_resp))
        sys.exit(1)

    # Use tar data stream if S3 bucket name is specified
    if aws_s3_bucket_name:
        s3_data = s3_download(args, settings)
        try:
            tar = tarfile.open(fileobj=s3_data, mode='r:gz')    
        except Exception as e:
            print(str(e))
            sys.exit(1)
    else:
        try:
            tarfile.is_tarfile(name=arg_archive_file)
        except IOError as e:
            print(str(e))
            sys.exit(1)
        try:
            tar = tarfile.open(name=arg_archive_file, mode='r:gz')
        except Exception as e:
            print(str(e))
            sys.exit(1)

    with tempfile.TemporaryDirectory() as tmpdir:
        tar.extractall(tmpdir)
        tar.close()

        restore_functions = { 'folder': create_folder,
                              'datasource': create_datasource,
                              'dashboard': create_dashboard,
                              'alert_channel': create_alert_channel,
                              'organization': create_org,
                              'user': create_user}

        if arg_components:
            arg_components_list = arg_components.split(',')

            # Restore only the components that provided via an argument
            # but must also exist in extracted archive
            for ext in arg_components_list:
                for file_path in glob('{0}/**/*.{1}'.format(tmpdir, ext[:-1]), recursive=True):
                    print('restoring {0}: {1}'.format(ext, file_path))
                    restore_functions[ext[:-1]](args, settings, file_path)
        else:
            # Restore every component included in extracted archive
            for ext in restore_functions.keys():
                for file_path in glob('{0}/**/*.{1}'.format(tmpdir, ext), recursive=True):
                    print('restoring {0}: {1}'.format(ext, file_path))
                    restore_functions[ext](args, settings, file_path)
コード例 #5
0
def main(args, settings):
    arg_components = args.get('--components', False)
    arg_no_archive = args.get('--no-archive', False)

    backup_functions = {
        'dashboards': save_dashboards,
        'datasources': save_datasources,
        'folders': save_folders,
        'alert-channels': save_alert_channels,
        'organizations': save_orgs,
        'users': save_users,
        'snapshots': save_snapshots,
        'versions': save_versions,
        'annotations': save_annotations
    }

    (status, json_resp, dashboard_uid_support, datasource_uid_support,
     paging_support) = api_checks(settings)

    # Do not continue if API is unavailable or token is not valid
    if not status == 200:
        print("server status is not ok: {0}".format(json_resp))
        sys.exit(1)

    settings.update({'DASHBOARD_UID_SUPPORT': dashboard_uid_support})
    settings.update({'DATASOURCE_UID_SUPPORT': datasource_uid_support})
    settings.update({'PAGING_SUPPORT': paging_support})

    if arg_components:
        arg_components_list = arg_components.replace("_", "-").split(',')

        # Backup only the components that provided via an argument
        for backup_function in arg_components_list:
            backup_functions[backup_function](args, settings)
    else:
        # Backup every component
        for backup_function in backup_functions.keys():
            backup_functions[backup_function](args, settings)

    aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME')
    azure_storage_container_name = settings.get('AZURE_STORAGE_CONTAINER_NAME')
    gcs_bucket_name = settings.get('GCS_BUCKET_NAME')

    if not arg_no_archive:
        archive(args, settings)

    if aws_s3_bucket_name:
        print('Upload archives to S3:')
        s3_upload(args, settings)

    if azure_storage_container_name:
        print('Upload archives to Azure Storage:')
        azure_storage_upload(args, settings)

    if gcs_bucket_name:
        print('Upload archives to GCS:')
        gcs_upload(args, settings)
コード例 #6
0
def main(args, settings):
    arg_archive_file = args.get('<archive_file>', None)
    aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME')

    (status, json_resp, api_version) = api_checks(settings)

    # Do not continue if API is unavailable or token is not valid
    if not status == 200:
        print("server status is not ok: {0}".format(json_resp))
        sys.exit(1)

    # Use tar data stream if S3 bucket name is specified
    if aws_s3_bucket_name:
        s3_data = s3_download(args, settings)
        try:
            tar = tarfile.open(fileobj=s3_data, mode='r:gz')
        except Exception as e:
            print(str(e))
            sys.exit(1)
    else:
        try:
            tarfile.is_tarfile(name=arg_archive_file)
        except IOError as e:
            print(str(e))
            sys.exit(1)
        try:
            tar = tarfile.open(name=arg_archive_file, mode='r:gz')
        except Exception as e:
            print(str(e))
            sys.exit(1)

    restore_functions = {
        'folder': create_folder,
        'datasource': create_datasource,
        'dashboard': create_dashboard,
        'alert_channel': create_alert_channel,
        'organization': create_org,
        'user': create_user
    }

    if sys.version_info >= (3, ):
        with tempfile.TemporaryDirectory() as tmpdir:
            tar.extractall(tmpdir)
            tar.close()
            restore_components(args, settings, restore_functions, tmpdir)
            tmpdir.cleanup()
    else:
        tmpdir = tempfile.mkdtemp()
        tar.extractall(tmpdir)
        tar.close()
        restore_components(args, settings, restore_functions, tmpdir)
        try:
            shutil.rmtree(tmpdir)
        except OSError as e:
            print("Error: %s : %s" % (tmpdir, e.strerror))
コード例 #7
0
ファイル: save.py プロジェクト: xmj/grafana-backup-tool
def main(args, settings):
    arg_components = args.get('--components', False)
    arg_no_archive = args.get('--no-archive', False)

    backup_functions = {
        'dashboards': save_dashboards,
        'datasources': save_datasources,
        'folders': save_folders,
        'alert-channels': save_alert_channels,
        'organizations': save_orgs,
        'users': save_users
    }

    (status, json_resp, api_version) = api_checks(settings)

    # Do not continue if API is unavailable or token is not valid
    if not status == 200:
        print("server status is not ok: {0}".format(json_resp))
        sys.exit(1)

    settings.update({'API_VERSION': api_version})

    if arg_components:
        arg_components_list = arg_components.split(',')

        # Backup only the components that provided via an argument
        for backup_function in arg_components_list:
            backup_functions[backup_function](args, settings)
    else:
        # Backup every component
        for backup_function in backup_functions.keys():
            backup_functions[backup_function](args, settings)

    aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME')

    if not arg_no_archive:
        archive(args, settings)

    if aws_s3_bucket_name:
        print('Upload archives to S3:')
        s3_upload(args, settings)
コード例 #8
0
def main(args, settings):
    (status, json_resp, uid_support, paging_support) = api_checks(settings)

    # Do not continue if API is unavailable or token is not valid
    if not status == 200:
        print("server status is not ok: {0}".format(json_resp))
        sys.exit(1)

    settings.update({'UID_SUPPORT': uid_support})
    settings.update({'PAGING_SUPPORT': paging_support})

    debug = settings.get('DEBUG')
    timestamp = settings.get('TIMESTAMP')
    verify_ssl = settings.get('VERIFY_SSL')
    client_cert = settings.get('CLIENT_CERT')
    grafana_url = settings.get('GRAFANA_URL')
    pretty_print = settings.get('PRETTY_PRINT')
    http_post_headers = settings.get('HTTP_POST_HEADERS')

    folder_path = 'user_permissions/{0}'.format(timestamp)

    if not os.path.exists(folder_path):
        os.makedirs(folder_path)

    users = get_all_users(grafana_url, http_post_headers, verify_ssl,
                          client_cert, debug)
    file_path = save_json("users.json", users, folder_path, 'users',
                          pretty_print)
    print("users have been saved to {0}".format(file_path))

    for user in users:
        if user['role'] != 'Admin':
            (status, content) = set_user_role(user['userId'], 'Viewer',
                                              grafana_url, http_post_headers,
                                              verify_ssl, client_cert, debug)
            print("changed user {0} to Viewer".format(user['login']))

            if status != 200:
                print("changing role of user {0} failed with {1}".format(
                    user['login'], status))
コード例 #9
0
def main(args, settings):
    (status, json_resp, dashboard_uid_support, datasource_uid_support,
     paging_support) = api_checks(settings)

    # Do not continue if API is unavailable or token is not valid
    if not status == 200:
        print("server status is not ok: {0}".format(json_resp))
        sys.exit(1)

    settings.update({'DASHBOARD_UID_SUPPORT': dashboard_uid_support})
    settings.update({'DATASOURCE_UID_SUPPORT': datasource_uid_support})
    settings.update({'PAGING_SUPPORT': paging_support})

    debug = settings.get('DEBUG')
    timestamp = settings.get('TIMESTAMP')
    verify_ssl = settings.get('VERIFY_SSL')
    client_cert = settings.get('CLIENT_CERT')
    grafana_url = settings.get('GRAFANA_URL')
    pretty_print = settings.get('PRETTY_PRINT')
    http_get_headers = settings.get('HTTP_POST_HEADERS')

    folder_path = 'alert_status/{0}'.format(timestamp)

    if not os.path.exists(folder_path):
        os.makedirs(folder_path)

    alerts = get_all_alerts(grafana_url, http_get_headers, verify_ssl,
                            client_cert, debug)
    file_path = save_json("alerts.json", alerts, folder_path, 'alerts',
                          pretty_print)
    print("alerts have been saved to {0}".format(file_path))

    for alert in alerts:
        (status, content) = pause_alert(alert['id'], grafana_url,
                                        http_get_headers, verify_ssl,
                                        client_cert, debug)
        if status != 200:
            print("pausing of alert {0} failed with {1}".format(
                alert['name'], status))
コード例 #10
0
def main(args, settings):
    def open_compressed_backup(compressed_backup):
        try:
            tar = tarfile.open(fileobj=compressed_backup, mode='r:gz')
            return tar
        except Exception as e:
            print(str(e))
            sys.exit(1)

    arg_archive_file = args.get('<archive_file>', None)
    aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME')
    azure_storage_container_name = settings.get('AZURE_STORAGE_CONTAINER_NAME')
    gcs_bucket_name = settings.get('GCS_BUCKET_NAME')

    (status, json_resp, uid_support, paging_support) = api_checks(settings)

    # Do not continue if API is unavailable or token is not valid
    if not status == 200:
        sys.exit(1)

    # Use tar data stream if S3 bucket name is specified
    if aws_s3_bucket_name:
        print('Download archives from S3:')
        s3_data = s3_download(args, settings)
        tar = open_compressed_backup(s3_data)

    elif azure_storage_container_name:
        print('Download archives from Azure:')
        azure_storage_data = azure_storage_download(args, settings)
        tar = open_compressed_backup(azure_storage_data)

    elif gcs_bucket_name:
        print('Download archives from GCS:')
        gcs_storage_data = gcs_download(args, settings)
        tar = open_compressed_backup(gcs_storage_data)

    else:
        try:
            tarfile.is_tarfile(name=arg_archive_file)
        except IOError as e:
            print(str(e))
            sys.exit(1)
        try:
            tar = tarfile.open(name=arg_archive_file, mode='r:gz')
        except Exception as e:
            print(str(e))
            sys.exit(1)

    restore_functions = collections.OrderedDict()
    restore_functions['folder'] = create_folder
    restore_functions['datasource'] = create_datasource
    restore_functions['dashboard'] = create_dashboard
    restore_functions['alert_channel'] = create_alert_channel
    restore_functions['organization'] = create_org
    restore_functions['user'] = create_user

    if sys.version_info >= (3, ):
        with tempfile.TemporaryDirectory() as tmpdir:
            tar.extractall(tmpdir)
            tar.close()
            restore_components(args, settings, restore_functions, tmpdir)
    else:
        tmpdir = tempfile.mkdtemp()
        tar.extractall(tmpdir)
        tar.close()
        restore_components(args, settings, restore_functions, tmpdir)
        try:
            shutil.rmtree(tmpdir)
        except OSError as e:
            print("Error: %s : %s" % (tmpdir, e.strerror))