def query(): print(request.json) if request.json['targets'][0]['type'] == 'table': series = request.json['targets'][0]['target'] bodies = { 'series A': [{ "columns": [{ "text": "Time", "type": "time" }, { "text": "Country", " type": "string" }, { "text": "Number", "type": "number" }], "rows": [[1234567, "SE", 123], [1234567, "DE", 231], [1234567, "US", 321]], "type": "table" }], 'series B': [{ "columns": [{ "text": "Time", "type": "time" }, { "text": "Country", "type": "string" }, { "text": "Number", "type": "number" }], "rows": [[1234567, "BE", 123], [1234567, "GE", 231], [1234567, "PS", 321]], "type": "table" }] } series = request.json['targets'][0]['target'] body = dumps(bodies[series]) else: body = [] start, end = request.json['range']['from'], request.json['range']['to'] for target in request.json['targets']: name = target['target'] datapoints = create_data_points(FUNCTIONS[name], start, end) body.append({'target': name, 'datapoints': datapoints}) body = dumps(body) return HTTPResponse(body=body, headers={'Content-Type': 'application/json'})
def update_object(path, meta): path = path.rstrip('/').lstrip('/') meta_fpath = os.path.join(config.shared_db, path).rstrip('/') + config.special_extension infos = loads(open(meta_fpath, 'rb').read()) infos.update(meta) open(meta_fpath, 'wb').write(dumps(infos).encode())
def query(): start, end = request.json['range']['from'], request.json['range']['to'] new_data = None for target in request.json['targets']: name = target['target'] for sensor in sensor_list: if name == sensor: new_data = convert_to_datapoints(data, name, start, end) if new_data is None: for sensor in static_sensor_list: if name == sensor: new_data = convert_to_datapoints( static_data, name, start, end) body = dumps(new_data) return HTTPResponse(body=body, headers={'Content-Type': 'application/json'})
def cb(path): log.debug('~ Listing %r', path) # TODO: session + permission mgmt bottle.response.set_header('Content-Type', 'application/json') obj = root_objects.list_children(path) if bottle.request.is_xhr: log.warning(obj) return dumps(obj) bottle.redirect('/')
def search(): return HTTPResponse(body=dumps([ 'all', 'sh', 'uttil times', 'gntotal', 'sustotal', 'deathtotal', 'curetotal', 'addcon', 'wjw_addsus', 'adddeath', 'addcure', 'sh_value', 'sh_susNum', 'sh_cureNum', 'sh_deathNum', 'cn_chart_value', 'cn_chart_susNum', 'cn_chart_cureNum', 'cn_chart_deathNum', 'sh_chart_value', 'sh_chart_susNum', 'sh_chart_cureNum', 'sh_chart_deathNum', 'sh uttil times' ]), headers={'Content-Type': 'application/json'})
def query(): print(request.json) body = [] parser = get_parser() args = parser.parse_args() specs = load_arg(args.access_key_id, args.access_secret, args.region_id) mon = MonitorEcsTop(specs) for target in request.json['targets']: name = target['target'] if name == 'cpu_top_10': cpu_list = mon.query_cpu_top() for i in range(10): datapoints = [[ cpu_list[i]['Average'], cpu_list[i]['timestamp'] ]] name = get_instance_name(cpu_list[i]['instanceId']) body.append({'target': name, 'datapoints': datapoints}) elif name == 'mem_top_10': mem_list = mon.query_mem_top() for i in range(10): datapoints = [[ mem_list[i]['Average'], mem_list[i]['timestamp'] ]] name = get_instance_name(mem_list[i]['instanceId']) body.append({'target': name, 'datapoints': datapoints}) elif name == 'disk_top_10': disk_list = mon.query_disk_top() for i in range(10): datapoints = [[ disk_list[i]['Average'], disk_list[i]['timestamp'] ]] name = get_instance_name(disk_list[i]['instanceId']) body.append({'target': name, 'datapoints': datapoints}) elif name == 'cpu_top': cpu_list = mon.query_cpu_top() datapoints = [[cpu_list[0]['Average'], cpu_list[0]['timestamp']]] name = get_instance_name(cpu_list[0]['instanceId']) body.append({'target': name, 'datapoints': datapoints}) elif name == 'mem_top': mem_list = mon.query_mem_top() datapoints = [[mem_list[0]['Average'], mem_list[0]['timestamp']]] name = get_instance_name(mem_list[0]['instanceId']) body.append({'target': name, 'datapoints': datapoints}) elif name == 'disk_top': disk_list = mon.query_disk_top() datapoints = [[disk_list[0]['Average'], disk_list[0]['timestamp']]] name = get_instance_name(disk_list[0]['instanceId']) body.append({'target': name, 'datapoints': datapoints}) body = dumps(body) return HTTPResponse(body=body, headers={'Content-Type': 'application/json'})
def query(): #print(request.json) body = [] start, end = request.json['range']['from'], request.json['range']['to'] for target in request.json['targets']: name = target['target'] datapoints = create_data_points(name, start, end) body.append({'target': name, 'datapoints': datapoints}) body = dumps(body) #print('body: ', body) return HTTPResponse(body=body, headers={'Content-Type': 'application/json'})
def cb(): bottle.response.set_header('Content-Type', 'application/json') log.debug("search") yield '[' first = True # TODO: handle multi-page pages, results = search( bottle.request.POST['text'], results=100 ) # for item in root_objects.search_objects(bottle.request.POST['text'].encode('utf-8')): for item in results: if not first: yield ',' else: first = False yield dumps(item) yield ']'
def query(): print(request.json) try: body = [] start, end = request.json['range']['from'], request.json['range']['to'] for target in request.json['targets']: name = target['target'] parse() datapoints = create_data_points(DATA[name], start, end) print(datapoints) body.append({'target': name, 'datapoints': datapoints}) body = dumps(body) except: print("Something Went Wrong") return HTTPResponse(body=body, headers={'Content-Type': 'application/json'})
def query(): try: parse() body = [] start, end = request.json['range']['from'], request.json['range']['to'] for target in request.json['targets']: name = target['target'] parse() #datapoints = create_data_points(DATA[name], start, end) datapoints = sorted(DATA[name], key=lambda x: x[1]) body.append({'target': name, 'datapoints': datapoints}) body = dumps(body) except Exception as e: print(e) return HTTPResponse(body=body, headers={'Content-Type': 'application/json'})
def query(): body = [] request_json = request.json max_points = request_json["maxDataPoints"] if request.json['targets'][0]['type'] == 'table': series = request.json['targets'][0]['target'] body = [{"columns":[{"text": "Date", "type": "text"}, {"text": "Number of Users", "type": "number"}, ], "rows": get_all_rows(max_points) }] else: start, end = request_json['range']['from'], request_json['range']['to'] for target in request.json['targets']: name = target['target'] datapoints = create_data_points(start, end, max_points) body.append({'target': name, 'datapoints': datapoints}) body = dumps(body) return HTTPResponse(body=body, headers={'Content-Type': 'application/json'})
def search(): print('Static Sensors %s' % static_sensor_list) return HTTPResponse(body=dumps(sensor_list + static_sensor_list), headers={'Content-Type': 'application/json'})
def search(): return HTTPResponse(body=dumps(list(DATA.keys())), headers={'Content-Type': 'application/json'})
def search(): return HTTPResponse(body=dumps(tickers), headers={'Content-Type': 'application/json'})
def cb(path='/'): path = _fix_path(path) log.debug('~ Listing %r', path) # TODO: session + permission mgmt bottle.response.set_header('Content-Type', 'application/json') return dumps( root_objects.list_children(path) )
def query(): print(request.json) body = [] all_data = getDataSync() sh_data = getShDataSync() time_stamp = int(round(time.time() * 1000)) if request.json['targets'][0]['type'] == 'table': rows = [] for data in all_data['list']: row = [ data['name'], data['value'], data['susNum'], data['cureNum'], data['deathNum'] ] rows.append(row) sh_rows = [] for data in sh_data['city']: row = [ data['name'], data['conNum'], data['susNum'], data['cureNum'], data['deathNum'] ] sh_rows.append(row) bodies = { 'all': [{ "columns": [{ "text": "省份", "type": "name" }, { "text": "确诊", " type": "conNum" }, { "text": "疑似", " type": "susNum" }, { "text": "治愈", "type": "cureNum" }, { "text": "死亡", "type": "deathNum" }], "rows": rows, "type": "table" }], 'sh': [{ "columns": [{ "text": "省份", "type": "name" }, { "text": "确诊", " type": "value" }, { "text": "疑似", " type": "susNum" }, { "text": "治愈", "type": "cureNum" }, { "text": "死亡", "type": "deathNum" }], "rows": sh_rows, "type": "table" }] } series = request.json['targets'][0]['target'] body = dumps(bodies[series]) else: for target in request.json['targets']: name = target['target'] if name == 'gntotal': body.append({ 'target': 'gntotal', 'datapoints': [[all_data['gntotal'], time_stamp]] }) if name == 'sustotal': body.append({ 'target': 'sustotal', 'datapoints': [[all_data['sustotal'], time_stamp]] }) if name == 'curetotal': body.append({ 'target': 'curetotal', 'datapoints': [[all_data['curetotal'], time_stamp]] }) if name == 'deathtotal': body.append({ 'target': 'deathtotal', 'datapoints': [[all_data['deathtotal'], time_stamp]] }) if name == 'uttil times': body.append({ 'target': 'uttil times', 'datapoints': [[all_data['times'], time_stamp]] }) if name == 'addcon': body.append({ 'target': 'addcon', 'datapoints': [[all_data['add_daily']['addcon'], time_stamp]] }) if name == 'wjw_addsus': body.append({ 'target': 'wjw_addsus', 'datapoints': [[all_data['add_daily']['wjw_addsus'], time_stamp]] }) if name == 'adddeath': body.append({ 'target': 'adddeath', 'datapoints': [[all_data['add_daily']['adddeath'], time_stamp]] }) if name == 'addcure': body.append({ 'target': 'addcure', 'datapoints': [[all_data['add_daily']['addcure'], time_stamp]] }) if name == 'cn_chart_value': historylist = all_data['historylist'] datapoints = [] for history in historylist: his_time = '2020.{history}'.format(history=history['date']) timeStamp = int( time.mktime( datetime.strptime(his_time, '%Y.%m.%d').timetuple())) * 1000 datapoints.append([history['cn_conNum'], timeStamp]) body.append({'target': '确诊', 'datapoints': datapoints}) if name == 'cn_chart_susNum': historylist = all_data['historylist'] datapoints = [] for history in historylist: his_time = '2020.{history}'.format(history=history['date']) timeStamp = int( time.mktime( datetime.strptime(his_time, '%Y.%m.%d').timetuple())) * 1000 datapoints.append([history['cn_susNum'], timeStamp]) body.append({'target': '疑似', 'datapoints': datapoints}) if name == 'cn_chart_cureNum': historylist = all_data['historylist'] datapoints = [] for history in historylist: his_time = '2020.{history}'.format(history=history['date']) timeStamp = int( time.mktime( datetime.strptime(his_time, '%Y.%m.%d').timetuple())) * 1000 datapoints.append([history['cn_cureNum'], timeStamp]) body.append({'target': '治愈', 'datapoints': datapoints}) if name == 'cn_chart_deathNum': historylist = all_data['historylist'] datapoints = [] for history in historylist: his_time = '2020.{history}'.format(history=history['date']) timeStamp = int( time.mktime( datetime.strptime(his_time, '%Y.%m.%d').timetuple())) * 1000 datapoints.append([history['cn_deathNum'], timeStamp]) body.append({'target': '死亡', 'datapoints': datapoints}) if name == 'sh uttil times': body.append({ 'target': 'uttil times', 'datapoints': [[sh_data['times'], time_stamp]] }) if name == 'sh_value': body.append({ 'target': 'sh_value', 'datapoints': [[sh_data['contotal'], time_stamp]] }) if name == 'sh_susNum': body.append({ 'target': 'sh_susNum', 'datapoints': [[sh_data['sustotal'], time_stamp]] }) if name == 'sh_cureNum': body.append({ 'target': 'sh_cureNum', 'datapoints': [[sh_data['curetotal'], time_stamp]] }) if name == 'sh_deathNum': body.append({ 'target': 'sh_deathNum', 'datapoints': [[sh_data['deathtotal'], time_stamp]] }) if name == 'sh_chart_value': historylist = sh_data['historylist'] datapoints = [] for history in historylist: his_time = '2020.{history}'.format(history=history['date']) timeStamp = int( time.mktime( datetime.strptime(his_time, '%Y.%m.%d').timetuple())) * 1000 datapoints.append([history['conNum'], timeStamp]) body.append({'target': '确诊', 'datapoints': datapoints}) if name == 'sh_chart_susNum': historylist = sh_data['historylist'] datapoints = [] for history in historylist: his_time = '2020.{history}'.format(history=history['date']) timeStamp = int( time.mktime( datetime.strptime(his_time, '%Y.%m.%d').timetuple())) * 1000 datapoints.append([history['susNum'], timeStamp]) body.append({'target': '疑似', 'datapoints': datapoints}) if name == 'sh_chart_cureNum': historylist = sh_data['historylist'] datapoints = [] for history in historylist: his_time = '2020.{history}'.format(history=history['date']) timeStamp = int( time.mktime( datetime.strptime(his_time, '%Y.%m.%d').timetuple())) * 1000 datapoints.append([history['cureNum'], timeStamp]) body.append({'target': '治愈', 'datapoints': datapoints}) if name == 'sh_chart_deathNum': historylist = sh_data['historylist'] datapoints = [] for history in historylist: his_time = '2020.{history}'.format(history=history['date']) timeStamp = int( time.mktime( datetime.strptime(his_time, '%Y.%m.%d').timetuple())) * 1000 datapoints.append([history['deathNum'], timeStamp]) body.append({'target': '死亡', 'datapoints': datapoints}) body = dumps(body) return HTTPResponse(body=body, headers={'Content-Type': 'application/json'})
def search(): return HTTPResponse(body=dumps(['17dh0cf43jg77n', '17dh0cf43jg77j']), headers={'Content-Type': 'application/json'})
def search(): conn = sqlite3.connect('cms.db') cursor = conn.cursor() print(request.json) produce_type = request.json['target'] if produce_type == '': return HTTPResponse(body=dumps([ 'cpu_top_10', 'mem_top_10', 'disk_top_10', 'cpu_top', 'mem_top', 'disk_top' ]), headers={'Content-Type': 'application/json'}) elif produce_type.startswith('all('): sql = 'select id from {0}'.format( re.findall(r'[(](.*?)[)]', produce_type.replace('\\', ''))[0]) cursor.execute(sql) values = cursor.fetchall() result = [] for val in values: result.append('{"instanceId":"' + val[0] + '"}') body = [','.join(result)] elif produce_type.startswith('num('): sql = 'select count(*) from {0}'.format( re.findall(r'[(](.*?)[)]', produce_type.replace('\\', ''))[0]) cursor.execute(sql) values = cursor.fetchall() body = values[0] elif produce_type.startswith('ecs_ip('): cursor.execute( 'select ip from ecs where name=?', (re.findall(r'[(](.*?)[)]', produce_type.replace('\\', ''))), ) values = cursor.fetchall() body = values[0] elif '((' in produce_type: db_name = produce_type.split('((')[0] names = re.findall(r'[(][(](.*?)[)][)]', produce_type.replace('\\', ''))[0].replace('|', '","') sql = 'select id from {0} where name in ("{1}")'.format(db_name, names) cursor.execute(sql) values = cursor.fetchall() result = [] for val in values: result.append('{"instanceId":"' + val[0] + '"}') body = [','.join(result)] elif '(' in produce_type: sql = 'select id from {0} where name="{1}"'.format( produce_type.split('(')[0], re.findall(r'[(](.*?)[)]', produce_type.replace('\\', ''))[0]) print(sql) cursor.execute(sql) values = cursor.fetchall() body = [str({"instanceId": values[0][0]})] else: cursor.execute('select name from {0}'.format(produce_type)) values = cursor.fetchall() body = [] for value in values: body.append(value[0]) cursor.close() conn.close() return HTTPResponse(body=dumps(body), headers={'Content-Type': 'application/json'})
def search(): return HTTPResponse(body=dumps(['users-vs-time', 'tracking-table']), headers={'Content-Type': 'application/json'})
def get_refresh(): parser = get_parser() args = parser.parse_args() refresh(args) return HTTPResponse(body=dumps(['refresh resource']), headers={'Content-Type': 'application/json'})
def query(): body = [] request_json = request.json max_points = request_json["maxDataPoints"] if request.json['targets'][0]['type'] == 'table': series = request.json['targets'][0]['target'] bodies = { '17dh0cf43jg77n': [{ "columns": [{ "text": "Date", "type": "time" }, { "text": "Humidity", "type": "number" }, { "text": "Longitude", "type": "number" }, { "text": "Latitude", "type": "number" }, { "text": "Device ID", "type": "number" }], "rows": get_all_rows(max_points, 357518080233232), "type": "table" }], '17dh0cf43jg77j': [{ "columns": [{ "text": "Date", "type": "time" }, { "text": "Humidity", "type": "number" }, { "text": "Longitude", "type": "number" }, { "text": "Latitude", "type": "number" }, { "text": "Device ID", "type": "number" }], "rows": get_all_rows(max_points, 357518080249493), "type": "table" }] } # else: # start, end = request_json['range']['from'], request_json['range']['to'] # for target in request.json['targets']: # name = target['target'] # datapoints = create_data_points(start, end, max_points) # body.append({'target': name, 'datapoints': datapoints}) body = dumps(bodies[series]) return HTTPResponse(body=body, headers={'Content-Type': 'application/json'})
def query(): body = [] request_json = request.json max_points = request_json["maxDataPoints"] if request.json['targets'][0]['type'] == 'table': series = request.json['targets'][0]['target'] bodies = { '17dh0cf43jg77n': [{ "columns":[ {"text": "Date", "type": "time"}, {"text": "Temperature", "type": "number"}, {"text": "Humidity", "type": "number"}, {"text": "Longitude", "type": "number"}, {"text": "Latitude", "type": "number"}, {"text": "PM 1.0", "type": "number"}, {"text": "PM 2.5", "type": "number"}, {"text": "PM 10", "type": "number"} ], "rows": get_all_rows(max_points, 357518080233232), "type": "table" }], '17dh0cf43jg77j': [{ "columns":[ {"text": "Date", "type": "time"}, {"text": "Temperature", "type": "number"}, {"text": "Humidity", "type": "number"}, {"text": "Longitude", "type": "number"}, {"text": "Latitude", "type": "number"}, {"text": "PM 1.0", "type": "number"}, {"text": "PM 2.5", "type": "number"}, {"text": "PM 10", "type": "number"} ], "rows": get_all_rows(max_points, 357518080249493), "type": "table" }], '17dh0cf43jg77l': [{ "columns":[ {"text": "Date", "type": "time"}, {"text": "Temperature", "type": "number"}, {"text": "Humidity", "type": "number"}, {"text": "Longitude", "type": "number"}, {"text": "Latitude", "type": "number"}, {"text": "PM 1.0", "type": "number"}, {"text": "PM 2.5", "type": "number"}, {"text": "PM 10", "type": "number"} ], "rows": get_all_rows(max_points, 357518080231574), "type": "table" }], '17dh0cf43jg783': [{ "columns":[ {"text": "Date", "type": "time"}, {"text": "Temperature", "type": "number"}, {"text": "Humidity", "type": "number"}, {"text": "Longitude", "type": "number"}, {"text": "Latitude", "type": "number"}, {"text": "PM 1.0", "type": "number"}, {"text": "PM 2.5", "type": "number"}, {"text": "PM 10", "type": "number"} ], "rows": get_all_rows(max_points, 357518080249428), "type": "table" }], '17dh0cf43jg781': [{ "columns":[ {"text": "Date", "type": "time"}, {"text": "Temperature", "type": "number"}, {"text": "Humidity", "type": "number"}, {"text": "Longitude", "type": "number"}, {"text": "Latitude", "type": "number"}, {"text": "PM 1.0", "type": "number"}, {"text": "PM 2.5", "type": "number"}, {"text": "PM 10", "type": "number"} ], "rows": get_all_rows(max_points, 357518080249352), "type": "table" }], '17dh0cf43jg6n4': [{ "columns":[ {"text": "Date", "type": "time"}, {"text": "Temperature", "type": "number"}, {"text": "Humidity", "type": "number"}, {"text": "Longitude", "type": "number"}, {"text": "Latitude", "type": "number"}, {"text": "PM 1.0", "type": "number"}, {"text": "PM 2.5", "type": "number"}, {"text": "PM 10", "type": "number"} ], "rows": get_all_rows(max_points, 357518080231251), "type": "table" }], '17dh0cf43jg7ka': [{ "columns":[ {"text": "Date", "type": "time"}, {"text": "Temperature", "type": "number"}, {"text": "Humidity", "type": "number"}, {"text": "Longitude", "type": "number"}, {"text": "Latitude", "type": "number"}, {"text": "PM 1.0", "type": "number"}, {"text": "PM 2.5", "type": "number"}, {"text": "PM 10", "type": "number"} ], "rows": get_all_rows(max_points, 357518080231095), "type": "table" }]} body = dumps(bodies[series]) return HTTPResponse(body=body, headers={'Content-Type': 'application/json'})
def search(): return HTTPResponse(body=dumps(['series A', 'series B']), headers={'Content-Type': 'application/json'})
def get_object_from_path(path): """ returns metadata for an item from its path. List of supported metadata: - id - size - name - descr - mime :arg path: the path of the item :type path: str """ # print('GET %s'%(path)) # TODO: pure metadata reading // a metadata injector will act asychronously path = path.rstrip('/').lstrip('/') fpath = os.path.join(config.shared_root, path).rstrip('/') meta_fpath = os.path.join(config.shared_db, path).rstrip('/') + config.special_extension infos = None try: # print('load %s'%meta_fpath) infos = loads(open(meta_fpath, 'rb').read()) except (OSError, IOError, FileNotFoundError): if not os.path.exists(fpath): return {'error': True, 'message': 'File not found', 'link': path} # or "choices" + "default", instead of link else: file_type = guess_type(fpath) # We have to create an item, we don't have infos yet ! if not infos: st = os.stat(fpath) if '/' in path: name = path.rsplit('/', 1)[1] else: name = path infos = {'size': st.st_size, 'link': name, 'title': (name.rsplit('.', 1)[0] if '.' else name).replace('-', ' ').replace('_', ' ').strip().title(), 'descr': '', 'mime': file_type} # create parent directory parent = os.path.dirname( meta_fpath ) if not os.path.exists( parent ): try: os.makedirs( parent ) except (OSError, FileExistsError): pass # save it if path: # do not save root try: open(meta_fpath, 'wb').write(dumps(infos).encode()) except (OSError, IOError, PermissionError) as e: log.error('Unable to save metadata as %r: %r', meta_fpath, e) return infos