Exemplo n.º 1
0
def grafana_retrive_datasources(parent_local_folder):
    grafana_url = "localhost:3000"
    dat = EasyDict()
    com = RecursiveFormatter(dat, BACKUP_INFO)
    dat.grafana_type_export = "datasource"

    dat.file_month = datetime.datetime.now().month
    dat.file_year = datetime.datetime.now().year
    with quiet():
        # TODO in case values are not found, prompt user for them !!!!, Maybe
        # generalize the procedure
        dash_json = run("curl http://{}:{}@{}/api/datasources/ ".format(
            odb.var.GRAFANA_ADMIN_NAME, odb.var.GRAFANA_ADMIN_PASS,
            grafana_url)).strip()
        print(dash_json)
        dasource = json.loads(dash_json)

    local("mkdir -p " + parent_local_folder)
    for datas_dict in dasource:
        dat.datasource_name = datas_dict["name"]

        datasource_bk_path = com.raw_(
            pjoin(parent_local_folder, BACKUP_INFO.grafana_export_file))
        datas_dict.pop("id", None)

        with open(datasource_bk_path, "w") as das:
            json.dump(datas_dict, das)
Exemplo n.º 2
0
def write_dashboard(get_path, parent_folder, isHome=False):
    dat = EasyDict()
    com = RecursiveFormatter(dat, BACKUP_INFO)

    raw_dbdi = json.loads(run("curl " + get_path))
    the_db = raw_dbdi["dashboard"]
    the_db.pop("version", None)
    the_db.pop("id", None)

    good_from = {}
    good_from["dashboard"] = the_db
    good_from["overwrite"] = True

    dat.grafana_type_export = "dashboard"
    dat.file_month = datetime.datetime.now().month
    dat.file_year = datetime.datetime.now().year
    dat.datasource_name = raw_dbdi["meta"]["slug"]

    # db_fn = (str(raw_dbdi["meta"]["slug"]) + "_dashboard.json")

    if isHome == True:
        # db_fn = "HOME-dashboard.json"
        dat.datasource_name = "_HOME_"
        good_from["meta"] = {"isHome": True}

    datasource_bk_path = com.raw_(
        pjoin(parent_folder, BACKUP_INFO.grafana_export_file))

    with open(datasource_bk_path, "w") as dbo:
        json.dump(good_from, dbo)
Exemplo n.º 3
0
def influxdb_export_points(database, backup_root, compress=True):
    dat = EasyDict()
    com = RecursiveFormatter(dat, BACKUP_INFO)
    dat.database = database
    dat.cont_name = "influxdb"
    dat.extension = "gz" if compress == True else "txt"
    dat.compress = "-compress" if compress == True else ""
    dat.root_data = "/data"
    with quiet():
        if run(com.raw_(
                "{dexec} {cont_name} test -d {root_data}/data")).failed:
            dat.root_data = "/var/lib/influxdb"
        if run(com.raw_(
                "{dexec} {cont_name} test -d {root_data}/data")).failed:
            print(
                red("Did not find root folder for influxdb ./data and ./wal"))
            return  # TODO Maybe something to improve
    dat.export_influx = "{dexec} {cont_name} influx_inspect export " + \
                        " -database {database} -datadir {root_data}/data -waldir {root_data}/wal {compress} " + \
                        " -start {start_date_iso} -end {end_date_iso} -out  {influx_bk_container}/{influx_export_file} "

    strategy = "last_two" if odb.arg.last_two else "full_range"
    sections_info = influx_section_database_time(database, strategy)

    # for seq_ in sections_info:
    #     print()
    #     print( seq_.start)
    #     print( seq_.end)

    # return

    run(com.raw_("mkdir -p {influx_bk_target}"))
    run(com.raw_("{dexec} {cont_name} mkdir -p {influx_bk_container}  "))

    # show("stdout")
    for seq_ in sections_info:
        dat.start_date_iso = seq_.start.isoformat() + "Z"
        dat.end_date_iso = seq_.end.isoformat() + "Z"
        dat.backup_status = seq_.status if "status" in seq_ else "full"
        dat.file_month = seq_.start.month
        dat.file_year = seq_.start.year

        influxdb_handle_exported(com, backup_root)