def get_json_data_apdex_global(dumpdata): global valvel global valveh # dumpdata = {} url_satisfied_count = 0 url_tolerating_count = 0 url_frustrated_count = 0 # dumpdata = dumper.get_http_data(tm) for item in dumpdata: iteminner = item.split() url = iteminner[11] data = int(iteminner[8]) status = int(iteminner[9]) if status >= 400: data = 999999999 if data <= valvel: url_satisfied_count = url_satisfied_count + 1 elif data > valveh: url_frustrated_count = url_frustrated_count + 1 else: url_tolerating_count = url_tolerating_count + 1 resultdata = {} jsondata = {} good = url_satisfied_count + url_tolerating_count / 2 sumcount = url_satisfied_count + url_frustrated_count + url_tolerating_count rate = 0 if sumcount > 0: rate = good / sumcount * 100 strdata = '{"Component/Http/Global/Apdex":%f}' % (rate) jsondata = eval(strdata) resultdata.update(jsondata) common.jprint( '$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$4' ) common.jprint(resultdata) return resultdata
def get_json_data_httplatency_global(dumpdata): # dumpdata = {} # dumpdata = dumper.get_http_data(tm) print('get_json_data_httplatency_global') url_latency = 0 url_count = 0 for item in dumpdata: iteminner = item.split() url = iteminner[11] data = int(iteminner[8]) url_latency = url_latency + data url_count = url_count + 1 resultdata = {} avglatency = 0.0 if url_count > 0: avglatency = url_latency / url_count / 1000000 strdata = '{"Component/Http/Global/Latency":%.8f}' % (avglatency) jsondata = eval(strdata) resultdata.update(jsondata) common.jprint( '----------------------------------------------------------------------------------------' ) common.jprint(url_latency) common.jprint(resultdata) return resultdata
def parse_jsons(tm, refresh_time): data = {} dictdata = {} # tm = datetime.datetime.now() # tm = tm - datetime.timedelta(minutes = 1) # refresh_time = get_refresh_time() try: json_tcp = load_json_file(tm, refresh_time, "tcp_5s") except: pass try: json_tcp_connection_all = load_json_file(tm, refresh_time, "tcp_connection_all_5s") except: pass try: json_tcp_timeout = load_json_file(tm, refresh_time, "tcp_timeout_5s") except: pass try: json_http = load_json_file(tm, refresh_time, "http_5s") except: pass try: json_http_status = load_json_file(tm, refresh_time, "http_status_5s") except: pass try: json_npm_traffic = load_json_file(tm, refresh_time, "npm_traffic_all_5s") except: pass try: json_npm_port = load_json_file(tm, refresh_time, "npm_port_all_5s") except: pass # try : # json_http_filter_url_status = load_json_file(tm,refresh_time, "http_filter_url_status_1m") # except : pass # try : # json_http_filter_url_delay = load_json_file(tm,refresh_time, "http_filter_url_5s") # except : pass try: json_http_filter_status = load_json_file(tm, refresh_time, "http_filter_status_5s") except: pass try: json_http_url_status = load_json_file(tm, refresh_time, "http_url_status_5s") except: pass try: json_http_url_delay = load_json_file(tm, refresh_time, "http_url_5s") except: pass try: json_http_apdex = load_json_file(tm, refresh_time, "http_url_5s") except: pass try: json_http_url = load_json_file(tm, refresh_time, "http_url_5s") except: pass dictdata = {} try: pairdata = tcp.format_data_tcp(json_tcp, refresh_time) dictdata.update(pairdata) except: pass try: pairdata = tcpconn.format_data_tcp_connection(json_tcp_connection_all, refresh_time) dictdata.update(pairdata) except: pass try: pairdata = tcptimeout.format_data_tcp_timeout(json_tcp_timeout, refresh_time) dictdata.update(pairdata) except: pass try: pairdata = http.format_data_http(json_http, refresh_time) dictdata.update(pairdata) except: pass try: pairdata = httpstatus.format_data_http_status(json_http_status, refresh_time) dictdata.update(pairdata) except: pass try: pairdata = traffic.format_data_npm_traffic(json_npm_traffic, refresh_time) dictdata.update(pairdata) except: pass # try : # pairdata = httpfilterurlstatus.format_data_http_filter_url_status(json_http_filter_url_status,refresh_time) # dictdata.update(pairdata) # except : pass dumpdata = {} dumpdata = dumper.get_http_data(tm, refresh_time) try: # pairdata = apdex.get_json_data_apdex(json_http_url_status) # pairdata = apdex.get_json_data_apdex2(json_http_url) pairdata = apdex.get_json_data_apdex(dumpdata) dictdata.update(pairdata) except: pass try: pairdata = apdex.get_json_data_apdex_global(dumpdata) dictdata.update(pairdata) except: pass # try : # pairdata = httpfilterurldelay.format_data_http_filter_url_delay(json_http_filter_url_delay,refresh_time) # dictdata.update(pairdata) # except : pass try: # pairdata = httpurlstatus.format_data_http_url_status(json_http_url_status,refresh_time) pairdata = errorrate.get_json_data_errorrate(dumpdata) dictdata.update(pairdata) except: pass try: pairdata = errorrate.get_json_data_errorrate_global(dumpdata) dictdata.update(pairdata) except: pass try: # pairdata = httpurl.format_data_http_url(json_http_url,refresh_time) pairdata = httplatency.get_json_data_httplatency(dumpdata) dictdata.update(pairdata) except: pass try: pairdata = httplatency.get_json_data_httplatency_global(dumpdata) dictdata.update(pairdata) except: pass data = format_newrelicdata_from_dictdata(dictdata) common.jprint(data) try: if testing: print("Testing") else: result = post_newrelic(data) return result except: pass return False
def get_json_data_errorrate_global(dumpdata): # dumpdata = {} url_status_count_1xx = 0 url_status_count_2xx = 0 url_status_count_3xx = 0 url_status_count_4xx = 0 url_status_count_5xx = 0 url_status_count_other = 0 resultdata = {} # dumpdata = dumper.get_http_data(tm) for item in dumpdata: iteminner = item.split() url = iteminner[11] data = int(iteminner[9]) if data >= 100 and data < 200: # url_status_count_1xx++ url_status_count_1xx = url_status_count_1xx + 1 elif data >= 200 and data < 300: # url_status_count_2xx++ url_status_count_2xx = url_status_count_2xx + 1 elif data >= 300 and data < 400: # url_status_count_3xx++ url_status_count_3xx = url_status_count_3xx + 1 elif data >= 400 and data < 500: # url_status_count_4xx++ url_status_count_4xx = url_status_count_4xx + 1 elif data >= 500 and data < 600: # url_status_count_5xx++ url_status_count_5xx = url_status_count_5xx + 1 else: # url_status_count_other++ url_status_count_other = url_status_count_other + 1 rate = 0 error = url_status_count_4xx + url_status_count_5xx + url_status_count_other sumcount = (url_status_count_1xx + url_status_count_2xx + url_status_count_3xx + url_status_count_4xx + url_status_count_5xx + url_status_count_other) if sumcount > 0: rate = error / sumcount * 100 strdata = '{"Component/Http/Global/ErrorRate":%f}' % (rate) jsondata = {} jsondata = eval(strdata) resultdata.update(jsondata) common.jprint( '*****************************************************************************' ) common.jprint(resultdata) return resultdata
# jsonFormat = json.loads(response.text) jsonResponse = response.json() todayWeather = {} todayWeather["Description"] = jsonResponse["list"][0]["weather"][0][ "description"] todayWeather["Temp_min"] = jsonResponse["list"][0]["main"]["temp_min"] todayWeather["Temp_max"] = jsonResponse["list"][0]["main"]["temp_max"] todayWeather["Humidity"] = jsonResponse["list"][0]["main"]["humidity"] todayWeather["Wind_speed"] = jsonResponse["list"][0]["wind"]["speed"] todayWeather["Sunrise"] = jsonResponse["city"]["sunrise"] todayWeather["Sunset"] = jsonResponse["city"]["sunset"] todayWeather["Icon"] = jsonResponse["list"][0]["weather"][0]["icon"] todayWeather["Str_sunrise"] = str( datetime.fromtimestamp(todayWeather["Sunrise"])) todayWeather["Str_sunset"] = str(datetime.fromtimestamp( todayWeather["Sunset"])) # Weather Icon URL : http://openweathermap.org/img/wn/[Icon].png todayWeather["Icon"] = "http://openweathermap.org/img/wn/" + todayWeather[ "Icon"] + ".png" #print( todayWeather ) common.jprint(todayWeather) # Write json to TodayWeather.json file fout = open("TodayWeather.json", "w") # using json.dump to store json format to file # json.dump() - Takes in a Python object, and converts(dumps) it to a string json.dump(todayWeather, fout, sort_keys=True, indent=2) fout.close()