def initialize(self): sources = Util.get_source_content() if sources is not None: self.db = SqlConnector() self.sources_cache = sources print("started..") return True return False
def __init__(self, api_ids=None): # Get api info self.__apis_info = SqlConnector().get_apis() # Builds api ids if api_ids is not None: self.__ids_to_query = api_ids else: self.__ids_to_query = [] for data in self.__apis_info: self.__ids_to_query.append(data[0])
class Responser(object): request_frequency_sec = 3 db = None logging_path = sys.path[0] + "/logging/" userAgent = 'Your friendly neighborhood API-response-tracker - https://github.com/ckrag/responder' sources_cache = None def __init__(self): if self.initialize(): self.run() def initialize(self): sources = Util.get_source_content() if sources is not None: self.db = SqlConnector() self.sources_cache = sources print("started..") return True return False def time_url_opening(self, source): url = source["url"] pretime = time.time() try: request_result = requests.get(url=url, data=None, headers={'Connection':'close', 'user-agent':self.userAgent}) except ConnectionError as e: self.log_error(e, url) return None except SocketError as e: self.log_error(e, url) return None response_code = request_result.status_code request_result.close() reques_time = time.time() - pretime return { "time" : reques_time, "url" : url, "code" : response_code} def get_response_times(self): pool = ThreadPool(4) current_time = int(time.time()) results = pool.map(self.time_url_opening, self.sources_cache) pool.close() pool.join() # For now we simply ignore any collections with None values, since None values aren't supported in the database if not None in results: self.db.store_data(results, current_time) def log_error(self, exception, url): timestamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') with open(self.logging_path + "request_log", 'a') as log_file: log_file.write("\n" + "Time: "+ timestamp + "\n" + "Url: " + url + "\n" + "Exception: " + str(exception) + "\n") def run(self): try: while True: self.get_response_times() time.sleep(self.request_frequency_sec) except KeyboardInterrupt: print("Script stopped, ..exiting")
class Responser(object): request_frequency_sec = 3 db = None logging_path = sys.path[0] + "/logging/" userAgent = 'Your friendly neighborhood API-response-tracker - https://github.com/ckrag/responder' sources_cache = None def __init__(self): if self.initialize(): self.run() def initialize(self): sources = Util.get_source_content() if sources is not None: self.db = SqlConnector() self.sources_cache = sources print("started..") return True return False def time_url_opening(self, source): url = source["url"] pretime = time.time() try: request_result = requests.get(url=url, data=None, headers={ 'Connection': 'close', 'user-agent': self.userAgent }) except ConnectionError as e: self.log_error(e, url) return None except SocketError as e: self.log_error(e, url) return None response_code = request_result.status_code request_result.close() reques_time = time.time() - pretime return {"time": reques_time, "url": url, "code": response_code} def get_response_times(self): pool = ThreadPool(4) current_time = int(time.time()) results = pool.map(self.time_url_opening, self.sources_cache) pool.close() pool.join() # For now we simply ignore any collections with None values, since None values aren't supported in the database if not None in results: self.db.store_data(results, current_time) def log_error(self, exception, url): timestamp = datetime.datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d %H:%M:%S') with open(self.logging_path + "request_log", 'a') as log_file: log_file.write("\n" + "Time: " + timestamp + "\n" + "Url: " + url + "\n" + "Exception: " + str(exception) + "\n") def run(self): try: while True: self.get_response_times() time.sleep(self.request_frequency_sec) except KeyboardInterrupt: print("Script stopped, ..exiting")
def get_data(self, sources=None): # Query data for ids data = SqlConnector().get_graph_data(self.__ids_to_query, 40) # Builds presentable data timestamp_collection = [] """ returned format: { "timestamps" : [ { "timestamp" : 12345678, "apis_with_respondtime" : [ { "url" : "url", "respondtime" : 213214 } ] } ] } """ # We can trust that the number of sets returned from the graph_data-query is atleast the number of sets # returned from the apis-ids returned from the get_apis query, # Since the before mentioned query depends on the later unique_timestamps = [] for index_i, item in enumerate(data): # Get unique timestamps for index_j, tuple in enumerate(item[1]): timestamp = data[index_i][1][index_j][0] if timestamp not in unique_timestamps: unique_timestamps.append(timestamp) # Build 'timestamps' json-objects for index, timestamp in reversed(list(enumerate(unique_timestamps))): # For each timestamp, find the responsetime and url of each api, and map it apis_with_respondtime = { "timestamp": timestamp, "api_responsetimes": [] } for index_j, api_data in enumerate(data): for time_tuple in enumerate(api_data[1]): if time_tuple[1][0] == timestamp: apis_with_respondtime["api_responsetimes"].append({ "index_ref": self.__apis_info[index_j][0], "response_time": time_tuple[1][1], "response_code": time_tuple[1][2] }) timestamp_collection.append(apis_with_respondtime) api_meta_data = [] for index, api_info in enumerate(self.__apis_info): if sources is not None: api_meta_data.append({ "request_url": api_info[1], "index": api_info[0], "pretty_name": sources[index]["pretty_name"], "pretty_color": sources[index]["pretty_color"] }) else: api_meta_data.append({ "request_url": api_info[1], "index": api_info[0] }) # Present data, return json obj return json.dumps({ "api_data": timestamp_collection, "api_count": len(data), "api_meta_data": api_meta_data #"api_meta" : self.get_meta_data() })