def handleData(host, port, db, table): client = conn(host, port, db) tables_query = 'show field keys on ' + db + ' from rp_1m.' + table + '_1m' tables_list = query(client, tables_query) sql = "" for k, v in tables_list.items(): for table_name in v: measurement_name = table_name["fieldKey"] sql = sql + 'sum("' + measurement_name + '") as "' + \ measurement_name + '",' cq_10m = 'create continuous query "cq_' + table + '_10m" on "' + db + '" RESAMPLE EVERY 10m for 30m begin select ' + \ sql[:-1] + ' into "rp_10m"."' + table + '_10m" from "rp_1m"."' + \ table + '_1m" group by time(10m),* end' cq_1h = 'create continuous query "cq_' + table + '_1h" on "' + db + '" RESAMPLE EVERY 1h for 2h begin select ' + \ sql[:-1] + ' into "rp_1h"."' + table + '_1h" from "rp_10m"."' + \ table + '_10m" group by time(1h),* end' cq_6h = 'create continuous query "cq_' + table + '_6h" on "' + db + '" RESAMPLE EVERY 6h for 12h begin select ' + \ sql[:-1] + ' into "rp_6h"."' + table + '_6h" from "rp_1h"."' + \ table + '_1h" group by time(6h),* end' result_10m = query(client, cq_10m) result_1h = query(client, cq_1h) result_6h = query(client, cq_6h) print(result_10m, result_1h, result_6h)
def main(): q = r"SHOW CONTINUOUS QUERIES" c = conn(host, port, db) res = query(c, q) # print(res) for db_struct, res_list in res.items(): db_name, _ = db_struct if db_name == db: for item in res_list: print(item["name"])
def main(): with open("./meas_to_del") as f: for line in f.readlines(): line = line.strip() print(line) # q_drop = 'drop MEASUREMENT "%s"' % line c = conn(host, port, db) # print(q_drop) # res = query(c, q_drop) res = c.drop_measurement(line) print(res)
def main(): q = r"SHOW MEASUREMENTS ON %s" c = conn(host, port, db) res = query(c, q % db) for db_struct, res_list in res.items(): print( list( set( map(lambda x: x.rsplit("_", 1)[0], [item["name"] for item in res_list]))))
def main(): start = sys.argv[1] with open("./resoulution.log") as f: for line in f.readlines(): line = line.strip() if start and int(line) < int(start): continue print(line) q_drop = "drop series from chaos_monitor_data where resolution='%s'" % line c = conn(host, port, db) res = query(c, q_drop) print(res)
def main(): q_drop = r"DROP CONTINUOUS QUERY %s ON %s" q = r"SHOW CONTINUOUS QUERIES" c = conn(host, port, db) res = query(c, q) # print(res) for db_struct, res_list in res.items(): db_name, _ = db_struct if db_name == db: for item in res_list: cq = item["name"] print(query(c, q_drop % (cq, db)))
def handle_data(host, port, db, measurement): client = conn(host, port, db) tables_list = get_fields(db, measurement, DEFAULT_DURATION, postfix) select_sentence = "" for _, v in tables_list.items(): select_sentence = ", ".join([ "sum(\"" + item["fieldKey"] + "\") as \"" + item["fieldKey"] + "\"" for item in v ]) for i in range(1, len(rp)): source_duration = rp[i - 1] target_duration = rp[i] tables_query = TPL_CREATE_CQ % ( "\"cq_" + measurement + "_" + target_duration + "\"", db, generate_resample(target_duration) if resample else "", select_sentence, generate_measurement_fullname(db, measurement, target_duration, postfix), generate_measurement_fullname(db, measurement, source_duration, postfix), target_duration) # print(tables_query) res = query(client, tables_query) print(res)
def get_fields(db, measurement, default_duration, post_fix=True): fullname = generate_measurement_fullname(db, measurement, default_duration, post_fix) tables_query = TPL_SHOW_FIELD % (db, fullname) client = conn(host, port, db) return query(client, tables_query)