def parse(self, output_file, file): with open(file) as fp: next(fp) # skip the header with open(output_file, "w") as parsed_fp: parsed_fp.write("date,response_time,url,response_code,status,attr1,attr2\n") converters = Converters() for line in fp: timestamp, estimated_time, url, response_code, status, attr1, attr2 = line.split(",") response_code = converters.response_code_converter(response_code) url = converters.url_converter(url) if url is not None: parsed_fp.write('%s,%s,%s,%s,%s,%s,%s' % (timestamp, estimated_time, url, response_code, status, attr1, attr2))
def check(file_path): output = "" urls = {} unsuccessfull = 0 all_requests = 0 fp = open(file_path) for line in fp: converters = Converters() try: timestamp, estimated_time, url, response_code, _, _, _ = line.split( ",") url = converters.url_converter(url) if max_time.has_key(url): all_requests += 1 if not urls.has_key(url): urls[url] = {} urls[url]['times'] = [] urls[url]['times'].append([estimated_time, response_code]) if response_code != "200": unsuccessfull += 1 except Exception as e: output += "Exception occured\n" output += e.message + "\n" pass for k in urls: count_succ = 0 all = len(urls[k]['times']) for time, response_code in urls[k]['times']: if int(time) <= max_time[k] and response_code == "200": count_succ += 1 dist = (all * 100.0) / all_requests if count_succ >= (all * 90) / 100: output += "%-50s %-50s prob = %.2f%%\n" % ( k, "OK [no. requests = %s]" % all, dist) else: p = (count_succ * 100) / all output += "%-50s %-50s prob = %.2f%%\n" % ( k, "NOT OK [all = %s, succ = %s (%s%%) ]" % (all, count_succ, p), dist) fp.close() output += "--------------------------------------------------\n" output += "ALL = %s, UNSUCCESSFULL = %s\n" % (all_requests, unsuccessfull) return output
def check(file_path): output = "" urls = {} unsuccessfull = 0 all_requests = 0 fp = open(file_path) for line in fp: converters = Converters() try: timestamp, estimated_time, url, response_code, _, _, _ = line.split(",") url = converters.url_converter(url) if max_time.has_key(url): all_requests+=1 if not urls.has_key(url): urls[url] = {} urls[url]['times'] = [] urls[url]['times'].append([estimated_time, response_code]) if response_code != "200": unsuccessfull += 1 except Exception as e: output += "Exception occured\n" output += e.message + "\n" pass for k in urls: count_succ = 0 all = len(urls[k]['times']) for time, response_code in urls[k]['times']: if int(time) <= max_time[k] and response_code == "200": count_succ += 1 dist = (all*100.0)/all_requests if count_succ >= (all * 90) / 100: output += "%-50s %-50s prob = %.2f%%\n" % (k, "OK [no. requests = %s]" % all, dist) else: p = (count_succ*100)/all output += "%-50s %-50s prob = %.2f%%\n" % (k, "NOT OK [all = %s, succ = %s (%s%%) ]" % (all, count_succ, p), dist) fp.close() output += "--------------------------------------------------\n" output += "ALL = %s, UNSUCCESSFULL = %s\n" % (all_requests, unsuccessfull) return output
def check(file_path): output = "" urls = {} unsuccessfull = 0 all_requests = 0 fp = open(file_path) for line in fp: converters = Converters() try: timestamp, estimated_time, operation, response_code, _, _, _ = line.split(",") url = converters.url_converter(operation) if url != None and max_time.has_key(url): all_requests += 1 if not urls.has_key(url): urls[url] = {} urls[url]["times"] = [] urls[url]["times"].append([estimated_time, response_code]) if response_code != "200": unsuccessfull += 1 except Exception as e: output += "Exception occured\n" output += e.message + "\n" pass dist_sum = 0 web_interactions = WebInteractions() output += "%-40s %s %-20s %s %-20s %s %-25s %s %-20s %s %-20s %s %-20s %s %-20s %s %-20s\n" % ( "operation", "|", "status", "|", "# all request", "|", "# successfull requests", "|", "% of successfull", "|", "% of operation", "|", "allowed deviation", "|", "actual deviation", "|", "deviation ok?", ) separator = "".join(["-" for _ in xrange(len(output))]) + "\n" output += separator for k in urls: count_succ = 0 all = len(urls[k]["times"]) for time, response_code in urls[k]["times"]: if int(time) <= max_time[k] and response_code == "200": count_succ += 1 dist = (all * 100.0) / all_requests dist_sum = dist_sum + dist allowed_deviation = 0.05 * web_interactions.get_probability(k) status = "NOT OK" p = (count_succ * 100) / all if count_succ >= (all * 90) / 100: status = "OK" p = 0 actual_deviation = get_actual_deviation(dist, k) deviation_ok = is_deviation_ok(dist, allowed_deviation, k) output += "%-40s %s %-20s %s %-20s %s %-25s %s %-20s %s %-20s %s %-20.3f %s %-20s %s %-20s\n" % ( k, "|", status, "|", all, "|", count_succ, "|", p, "|", "%.3f%% (%.3f%%)" % (dist, web_interactions.get_probability(k)), "|", allowed_deviation, "|", actual_deviation, "|", deviation_ok, ) fp.close() output += separator output += "# ALL REQUESTS = %s, # UNSUCCESSFULL REQUESTS = %s, PROB SUM = %s\n" % ( all_requests, unsuccessfull, dist_sum, ) return output
def check(file_path): output = "" urls = {} unsuccessfull = 0 all_requests = 0 fp = open(file_path) for line in fp: converters = Converters() try: timestamp, estimated_time, operation, response_code, _, _, _ = line.split(",") url = converters.url_converter(operation) if url != None and max_time.has_key(url): all_requests+=1 if not urls.has_key(url): urls[url] = {} urls[url]['times'] = [] urls[url]['times'].append([estimated_time, response_code]) if response_code != "200": unsuccessfull += 1 except Exception as e: output += "Exception occured\n" output += e.message + "\n" pass dist_sum = 0 web_interactions = WebInteractions() output += "%-40s %s %-20s %s %-20s %s %-25s %s %-20s %s %-20s %s %-20s %s %-20s %s %-20s\n" % ("operation", "|", "status", "|", "# all request", "|", "# successfull requests", "|", "% of successfull", "|", "% of operation", "|", "allowed deviation", "|", "actual deviation", "|", "deviation ok?") separator = "".join(["-" for _ in xrange(len(output))]) + "\n" output += separator cummulative_violations = 0 cummulative_successful = 0 cummulative_all = 0 for k in urls: count_succ = 0 all = len(urls[k]['times']) for time, response_code in urls[k]['times']: if int(time) <= max_time[k] and response_code == "200": count_succ += 1 cummulative_successful += count_succ cummulative_all += all dist = (all*100.0)/all_requests dist_sum = dist_sum + dist allowed_deviation = 0.05*web_interactions.get_probability(k) status = "NOT OK" p = (count_succ*100)/all if count_succ >= (all * 90) / 100: status = "OK" # p = 0 cummulative_violations += p actual_deviation = get_actual_deviation(dist, k) deviation_ok = is_deviation_ok(dist, allowed_deviation, k) output += "%-40s %s %-20s %s %-20s %s %-25s %s %-20s %s %-20s %s %-20.3f %s %-20s %s %-20s\n" % (k, "|", status, "|", all, "|", count_succ, "|", p, "|", "%.3f%% (%.3f%%)" % (dist, web_interactions.get_probability(k)), "|", allowed_deviation, "|", actual_deviation, "|", deviation_ok) fp.close() output += separator output += "# ALL REQUESTS = %s, # UNSUCCESSFULL REQUESTS = %s, PROB SUM = %s, # SLO VIOLATIONS = %s%%\n" % (all_requests, unsuccessfull, dist_sum, (((cummulative_all-cummulative_successful)*100)/cummulative_all)) return output