def test_limits_defined(self): endpoint = Endpoint() self.assertFalse(endpoint.limits_defined) endpoint.handle_response_headers(headers) self.assertTrue(endpoint.limits_defined) endpoint.limits = {} self.assertFalse(endpoint.limits_defined)
def test_get_usage(self): used = {'static': {}, 'limited': {}} self.assertEqual(self.platform.get_usage(), used) url = match_url_template.format(matchid=100) match_endpoint = Endpoint.identify_endpoint(url) used['limited'][match_endpoint] = 'No limits defined' self.platform.add_data({'url': url}) self.assertEqual(self.platform.get_usage(), used) self.platform.handle_response_headers(url, headers) used['limited'][match_endpoint] = '1:270' self.assertEqual(self.platform.get_usage(), used) self.platform.get() used['limited'][match_endpoint] = '2:270' self.assertEqual(self.platform.get_usage(), used) # Static tests static_endpoint = Endpoint.identify_endpoint(static_champions_url) self.platform.add_data({'url': static_champions_url}) used['static'][static_endpoint] = 'No limits defined' self.assertEqual(self.platform.get_usage(), used) self.platform.get() new_headers = copy.copy(headers) new_headers['X-Method-Rate-Limit-Count'] = '1:60,2:120' new_headers['X-Method-Rate-Limit'] = '7:60,10:120' used['static'][static_endpoint] = '1:7,2:10' self.platform.handle_response_headers(static_champions_url, new_headers)
def test_identify_endpoint(self): e = Endpoint.identify_endpoint( summoner_url_template.format(name=fake_name)) self.assertTrue(e == 'lol/summoner/v3/summoners/by-name') e = Endpoint.identify_endpoint(match_url_template.format(matchid='3')) self.assertTrue(e == 'lol/match/v3/matches') e = Endpoint.identify_endpoint( match_url_template.format(matchid='324')) self.assertTrue(e == 'lol/match/v3/matches') e = Endpoint.identify_endpoint(static_champions_url) self.assertTrue(e == 'lol/static-data/v3/champions')
def parse_endpoints(file_stream): global num_endpoints, caches, endpoints for i in range(num_endpoints): endpoint_header = file_stream.readline().split(" ") datacenter_latency = int(endpoint_header[0]) num_caches = int(endpoint_header[1]) e = Endpoint(i, datacenter_latency) for j in range(num_caches): endpoint_cache_line = file_stream.readline().split(" ") cache_id = int(endpoint_cache_line[0]) latency = int(endpoint_cache_line[1]) e.add_cache(cache_id, latency) caches[cache_id].add_endpoint(e) endpoints.append(e) assert len(endpoints), num_endpoints
def read_file(file_name): with open(file_name, 'r') as f: l_caches = [] videos, endpoints, request_descriptions, caches, cache_size = f.readline().split(' ') for i in xrange(0, int(caches)): l_caches.append(CacheServer(i, int(cache_size))) video_file_sizes = [int(x) for x in f.readline().split(' ')] endpoints = [Endpoint(_) for _ in xrange(int(endpoints))] #parse endpoint cfg for idx in xrange(len(endpoints)): v = f.readline().split(' ') ms = int(v[0]) nu = int(v[1]) endpoints[idx].add_latency(ms) for cache_index in xrange(nu): endpoints[idx].add_latency(int(f.readline().split(' ')[1]), cache_index) #parse video requests for idx in xrange(int(request_descriptions)): video_id, endpoint_id, num_requests = (int(x) for x in f.readline().split(' ')) endpoints[endpoint_id].add_request(video_id, num_requests) return l_caches, endpoints
def handle_response_headers(self, url, headers, code=200): # Handle X-Rate-Limit-Type if 'X-Rate-Limit-Type' in headers or (400 <= code <= 500): self._handle_delay(headers) # Check that X-App-Rate-Limit didn't change if 'X-App-Rate-Limit' in headers: try: self._verify_limits(headers) except Exception as e: print('Platform - Exception verifying limits - %s' % e) # Check that X-App-Rate-Limit-Count is still OK if 'X-App-Rate-Limit-Count' in headers: try: self._verify_counts(headers) except Exception as e: print('Platform - Exception verifying counts - %s' % e) # Pass to the endpoint endpoint_str = Endpoint.identify_endpoint(url) # if 'static' in endpoint_str: # if endpoint_str in self.static_endpoints: # self.static_endpoints[endpoint_str].handle_response_headers(headers, code) # else: # raise Exception('Invalid response URL: endpoint was not called') # else: if endpoint_str in self.ordered_limited_endpoints: index = self.ordered_limited_endpoints.index(endpoint_str) endpoint = self.limited_endpoints[index] endpoint.handle_response_headers(headers, code) self.limited_endpoints.update([(index, endpoint)]) else: raise Exception('Invalid response URL: endpoint was not called')
def readConfiguration(self): theFile = open(self.theFile) values = theFile.readline().split(' ') self.numberOfVideos = int(values[0]) self.numberOfEndpoints = int(values[1]) self.numberOfRequestDescription = int(values[2]) self.numberOfCacheServer = int(values[3]) self.cacheServerCapacity = int(values[4]) values = theFile.readline().split(' ') self.videos = [Video(int(el)) for el in values] for i in range(0, self.numberOfEndpoints): values = theFile.readline().split(' ') latency = int(values[0]) n = int(values[1]) theList = [] for j in range(0, n): values = theFile.readline().split(' ') theList.append([int(values[0]), int(values[1])]) self.endpoints.append(Endpoint(latency, theList)) for i in range(0, self.numberOfRequestDescription): values = theFile.readline().split(' ') self.requests.append( EndpointRequests(int(values[2]), int(values[0]), int(values[1]))) theFile.close()
def endpoint_creator(line_list): enpoint_data = line_list[2:] endpoints_list = list() for i in range(line_list[0][1]): new_endPoint = Endpoint(endpoints_list[i][0]) for j in range(i + 1, endpoints_list[i][1]): new_endPoint._cache_servers.append( (endpoints_list[j][0], endpoints_list[j][1]))
def __init__(self, redsmin_prod): #creating both sockets self.redisClient = RedisClient(config["uri"]) self.endpoint = Endpoint(redsmin_prod) #binding data's transfer to the sockets self.redisClient.handle_read = self.sendDataFromRedisToEndPoint self.endpoint.handle_read = self.sendDataFromEndPointToRedis #calling the handshake self.endpoint.handshake()
class Proxy: def __init__(self, redsmin_prod): #creating both sockets self.redisClient = RedisClient(config["uri"]) self.endpoint = Endpoint(redsmin_prod) #binding data's transfer to the sockets self.redisClient.handle_read = self.sendDataFromRedisToEndPoint self.endpoint.handle_read = self.sendDataFromEndPointToRedis #calling the handshake self.endpoint.handshake() #transfer data from Redsmin to Redis (called when the sockets have something to read) def sendDataFromEndPointToRedis(self): data = self.endpoint.recv(8192) if(not self.endpoint.handshaken): print data self.redisClient.send(data) #transfer data from Redis to Redsmin (called when the sockets have something to read) def sendDataFromRedisToEndPoint(self): data = self.redisClient.recv(8192) self.endpoint.send(data)
def add_data(self, data, front=False): # data is a dict with url inside, but other info too endpoint_str = Endpoint.identify_endpoint(data['url']) # if 'static' in endpoint_str: # if endpoint_str not in self.static_endpoints: # self.static_endpoints[endpoint_str] = Endpoint() # self.static_endpoints[endpoint_str].add_data(data, front) # self.static_endpoints.update([(endpoint_str, self.static_endpoints[endpoint_str])]) # self.static_count += 1 # else: if endpoint_str not in self.ordered_limited_endpoints: # self.limited_endpoints[endpoint_str] = Endpoint(name=endpoint_str) self.ordered_limited_endpoints.append(endpoint_str) index = self.ordered_limited_endpoints.index(endpoint_str) endpoint = self.limited_endpoints[index] endpoint.add_data(data, front) self.limited_endpoints.update([(index, endpoint)]) self.limited_count += 1
def setup(f): with open(f) as file: lines = file.readlines() n_video, n_endpoint, n_request, n_cache, cache_size = ( int(param) for param in lines[0].strip().split(' ')) video_sizes = [int(size) for size in lines[1].strip().split(' ')] idx = 2 endpoints = [] for i in range(n_endpoint): datacenter_latency, cache_count = ( int(p) for p in lines[idx].strip().split(' ')) caches = [] for j in range(cache_count): idx += 1 caches.append((int(x) for x in lines[idx].strip().split(' '))) endpoints.append(Endpoint(i, datacenter_latency, caches)) idx += 1 REQ_array = [[0 for _ in range(n_endpoint)] for _ in range(n_video)] for i in range(idx, len(lines)): req_line = [int(x) for x in lines[i].strip().split(' ')] if video_sizes[req_line[4]] <= cache_size: REQ_array[req_line[4]][req_line[8]] = req_line[0] REQ_matrix = np.array(REQ_array) VLAT_array = [] for i in range(n_endpoint): VLAT_array.append( [endpoints[i].base_latency for _ in range(n_video)]) VLAT_matrix = np.array(VLAT_array).T CLAT_array = [[0 for _ in range(n_endpoint)] for _ in range(n_cache)] for i in range(n_endpoint): for key in endpoints[i].cache_list: CLAT_array[i][key] = endpoints[i].cache_list[key] CLAT_matrix = np.array(CLAT_array).T video_stats = [] for i in range(n_video): video_stats.append([video_sizes[i], sum(REQ_array[i])]) return REQ_matrix, VLAT_matrix, CLAT_matrix, video_stats, n_cache, cache_size
def test_handle_response_headers(self): url = match_url_template.format(matchid=1) endpoint_str = Endpoint.identify_endpoint(url) self.assertRaises(Exception, self.platform.handle_response_headers, (url, headers)) self.platform.add_data({'url': url}) self.assertEqual(self.platform.platform_limits, {}) self.platform.handle_response_headers(url, headers) self.assertEqual(len(self.platform.platform_limits), 2) self.assertEqual(self.platform.platform_limits["1"].cap, 20) self.assertEqual(self.platform.platform_limits["1"].seconds, 1) self.assertEqual(self.platform.platform_limits["1"].used, 1) self.assertEqual(self.platform.platform_limits["120"].cap, 100) self.assertEqual(self.platform.platform_limits["120"].seconds, 120) self.assertEqual(self.platform.platform_limits["120"].used, 1) self.assertEqual(len(self.platform.limited_endpoints), 1) self.assertEqual( self.platform.limited_endpoints[endpoint_str].limits["60"].cap, 270) self.assertEqual( self.platform.limited_endpoints[endpoint_str].limits["60"].seconds, 60) self.assertEqual( self.platform.limited_endpoints[endpoint_str].limits["60"].used, 1) new_headers = copy.copy(headers) new_headers['X-App-Rate-Limit'] = "1:1" new_headers['X-App-Rate-Limit-Count'] = "1:1" new_headers['X-Method-Rate-Limit'] = "5:1" new_headers['X-Method-Rate-Limit-Count'] = "1:1" self.platform.handle_response_headers(url, new_headers) self.assertEqual(len(self.platform.platform_limits), 1) self.assertEqual(self.platform.platform_limits["1"].cap, 1) self.assertEqual(self.platform.platform_limits["1"].seconds, 1) self.assertEqual(self.platform.platform_limits["1"].used, 1) self.assertEqual(len(self.platform.limited_endpoints), 1) self.assertEqual( self.platform.limited_endpoints[endpoint_str].limits["1"].cap, 5) self.assertEqual( self.platform.limited_endpoints[endpoint_str].limits["1"].seconds, 1) self.assertEqual( self.platform.limited_endpoints[endpoint_str].limits["1"].used, 1)
def __init__(self, manager, slug='', endpoints=0): # self.manager = manager self.slug = slug self.delay = False self.delay_end = None self.default_retry_after = 1 # self.static_endpoints = manager.dict() # self.static_count = 0 self.limited_endpoints = manager.dict() for i in range(endpoints): self.limited_endpoints.update([(i, Endpoint(manager))]) self.limited_count = 0 self.ordered_limited_endpoints = manager.list() self.last_limited_endpoint = '' self.platform_limits = manager.dict() print('Initializing Platform')
list_endpoint = [] list_cache = [] list_video = [] list_remaining_cache_size = [cache_size] * number_of_caches list_videoReq_ep_cache = [[] for i in range(number_of_endpoints)] list_dc = [] list_score = [] for i in range(0, number_of_caches): # puts all the cache objects into a list for easy access C = Cache(i, cache_size, number_of_caches, number_of_videos) list_cache.append(C) for i in range(0, number_of_endpoints): # puts all the endpoint objects into a list for easy access EP = Endpoint(i, ep_cache_list, number_of_endpoints, ep_to_dc_latency, cache_latency_ep, ep_to_cache_latency) list_endpoint.append(EP) for i in range(0, number_of_endpoints): # puts all the endpoints video details into a list list_video.append(list_endpoint[i].get_video_request_and_number_request(video_ep_request, video_size_desc)) for i in range(0, number_of_endpoints): for j in list_endpoint[i].get_number_of_caches(): req = 0 dcReq = 0 for x in range(0, len(list_video[i])): if list_endpoint[i].get_number_of_caches(): # if the end point has as cache linked to it add = list_cache[j].add_video_to_cache(list_video[i][x][0], list_video[i][x][1]) if add == True:
def get_rpi_data(): return jsonify(Endpoint().rpi_stats())
def sensors(): return render_template('sensors.html', sensor_data=Endpoint().sensor_module_data())
def node(): return render_template('node.html', node_data=Endpoint().node_data())
class TestEndpoint(unittest.TestCase): def setUp(self): self.endpoint = Endpoint() self.default_data = {'url': match_url_template.format(matchid=1)} def test_identify_endpoint(self): e = Endpoint.identify_endpoint( summoner_url_template.format(name=fake_name)) self.assertTrue(e == 'lol/summoner/v3/summoners/by-name') e = Endpoint.identify_endpoint(match_url_template.format(matchid='3')) self.assertTrue(e == 'lol/match/v3/matches') e = Endpoint.identify_endpoint( match_url_template.format(matchid='324')) self.assertTrue(e == 'lol/match/v3/matches') e = Endpoint.identify_endpoint(static_champions_url) self.assertTrue(e == 'lol/static-data/v3/champions') def test_limits_defined(self): endpoint = Endpoint() self.assertFalse(endpoint.limits_defined) endpoint.handle_response_headers(headers) self.assertTrue(endpoint.limits_defined) endpoint.limits = {} self.assertFalse(endpoint.limits_defined) def test_add_data(self): self.assertRaises(Exception, self.endpoint.add_data, ({ 'other': 'thing' }, )) self.endpoint.add_data(self.default_data) self.assertEqual(self.endpoint.count, 1) # Endpoint should prevent adding data that doesn't match self.assertRaises(Exception, self.endpoint.add_data, ({ 'other': 'thing' }, )) self.assertRaises( Exception, self.endpoint.add_data, ({ 'url': summoner_url_template.format(name=fake_name) }, )) # Check order m2 = {'url': match_url_template.format(matchid=2)} m3 = {'url': match_url_template.format(matchid=3)} self.endpoint.add_data(m2) self.assertEqual(self.endpoint.count, 2) self.endpoint.add_data(m3) self.assertEqual(self.endpoint.count, 3) self.assertEqual(self.endpoint.get(), self.default_data) self.assertEqual(self.endpoint.get(), m2) self.assertEqual(self.endpoint.get(), m3) # Test adding data atFront self.assertEqual(self.endpoint.count, 0) self.endpoint.add_data(self.default_data, front=True) self.endpoint.add_data(m2, front=True) self.endpoint.add_data(m3, front=True) self.assertEqual(self.endpoint.get(), m3) self.assertEqual(self.endpoint.get(), m2) self.assertEqual(self.endpoint.get(), self.default_data) def test_available(self): self.assertFalse(self.endpoint.available()) for i in range(1, 4): self.endpoint.add_data( {'url': match_url_template.format(matchid=i)}) self.assertTrue(self.endpoint.available()) self.assertTrue(self.endpoint.available()) self.endpoint.get() self.assertTrue( self.endpoint.available()) # No limit set, still available self.endpoint.handle_response_headers(headers) self.endpoint.get() self.assertTrue(self.endpoint.available()) self.endpoint.get() self.assertFalse(self.endpoint.available()) self.assertFalse(self.endpoint.available()) def test_next_ready(self): self.assertEqual(rtime(self.endpoint.next_ready()), rtime()) self.endpoint.handle_response_headers(headers) self.endpoint.add_data(self.default_data) self.assertEqual(rtime(self.endpoint.next_ready()), rtime()) count, seconds = headers['X-Method-Rate-Limit'].split(':') new_headers = copy.copy(headers) new_headers['X-Method-Rate-Limit-Count'] = '%s:%s' % (count, seconds) self.endpoint.handle_response_headers(new_headers) self.assertTrue(len(self.endpoint.limits) > 0) self.assertFalse(self.endpoint.available()) self.assertEqual(rtime(self.endpoint.limits[seconds].start), rtime()) self.assertEqual(rtime(self.endpoint.next_ready()), rtime(time.time() + int(seconds))) def test_count(self): self.assertEqual(self.endpoint.count, 0) self.endpoint.add_data(self.default_data) self.assertEqual(self.endpoint.count, 1) self.endpoint.add_data(self.default_data) self.endpoint.add_data(self.default_data) self.endpoint.add_data(self.default_data) self.assertEqual(self.endpoint.count, 4) self.endpoint.get() self.assertEqual(self.endpoint.count, 3) def test_handle_delay(self): self.endpoint.add_data(self.default_data) self.assertTrue(self.endpoint.available()) self.endpoint._handle_delay(get_date_header()) self.assertFalse(self.endpoint.available()) time.sleep(self.endpoint.default_retry_after) self.assertTrue(self.endpoint.available()) delay = 1 new_headers = get_date_header() new_headers['X-Rate-Limit-Type'] = "Method" new_headers['X-Retry-After'] = '%s' % delay self.endpoint.add_data(self.default_data) self.endpoint.get() self.endpoint.handle_response_headers(new_headers) self.assertFalse(self.endpoint.available()) time.sleep(delay) self.assertTrue(self.endpoint.available()) def test_get_usage(self): self.assertEqual(self.endpoint.get_usage(), 'No limits defined') self.endpoint.handle_response_headers(headers) count, seconds = headers['X-Method-Rate-Limit'].split(':') used, seconds = headers['X-Method-Rate-Limit-Count'].split(':') used = int(used) self.assertEqual(self.endpoint.get_usage(), '%s:%s' % (used, count)) self.endpoint.add_data(self.default_data) self.endpoint.get() self.assertEqual(self.endpoint.get_usage(), '%s:%s' % (used + 1, count)) new_headers = copy.copy(headers) new_headers['X-Method-Rate-Limit-Count'] = '0:%s' % seconds self.endpoint.handle_response_headers(new_headers) self.assertEqual(self.endpoint.get_usage(), '%s:%s' % (used + 1, count)) # Limit assumes 0 is old data new_headers = copy.copy(headers) new_headers['X-Method-Rate-Limit'] = '10:1,100:5' new_headers['X-Method-Rate-Limit-Count'] = '1:1,5:5' self.endpoint.handle_response_headers(new_headers) self.assertEqual(self.endpoint.get_usage(), '1:10,5:100') self.endpoint.add_data({'url': match_url_template.format(matchid=1)}) self.endpoint.get() self.assertEqual(self.endpoint.get_usage(), '2:10,6:100')
def setUp(self): self.endpoint = Endpoint() self.default_data = {'url': match_url_template.format(matchid=1)}
def testEndpoint(): summoner_url = 'https://na1.api.riotgames.com/lol/summoner/v3/summoners/by-name/SomeSummonerName' endpoint_str = Endpoint.identify_endpoint(summoner_url) assert (endpoint_str == 'lol/summoner/v3/summoners/by-name') static_url = 'https://na1.api.riotgames.com/lol/static-data/v3/champions?locale=en_US&dataById=false' endpoint_str = Endpoint.identify_endpoint(static_url) assert (endpoint_str == 'lol/static-data/v3/champions') # There was an issue with id's 1 through 9 ending with a '/' match_example = 'https://na1.api.riotgames.com/lol/match/v3/matches/' for i in range(1, 50): url = '%s%s' % (match_example, i) assert (Endpoint.identify_endpoint(url) == 'lol/match/v3/matches') endpoint = Endpoint() assert (endpoint.limits_defined == False) assert (endpoint.count == 0) assert (endpoint.available() == False) # No urls assert (endpoint.name == '') endpoint.addURL(summoner_url) assert (endpoint.count == 1) assert (endpoint.available()) assert (endpoint.name == 'lol/summoner/v3/summoners/by-name') assert (endpoint.get() == summoner_url) assert (endpoint.count == 0) assert (endpoint.get() == None) assert_raises(endpoint.addURL, [static_url]) endpoint.addURL(summoner_url) endpoint.addURL(summoner_url) assert (endpoint.count == 2) assert (endpoint.available()) headers = { 'X-Method-Rate-Limit': '1:0.1,10:1', 'X-Method-Rate-Limit-Count': '0:0.1,9:1' } assert (endpoint.limits_defined == False) endpoint.setLimit(headers) assert (endpoint.available()) assert (endpoint.limits_defined) assert (endpoint.get() == summoner_url) assert (endpoint.get() == None) # Exceeded limit, returned nothing assert (endpoint.getResetTime() > time.time() + 0.01) time.sleep(0.1) assert (endpoint.getResetTime() < time.time()) endpoint.setCount(headers) assert (endpoint.available()) endpoint.addURL(summoner_url) assert (endpoint.get() == summoner_url) assert (endpoint.available() == False) assert (endpoint.get() == None) # Exceeded limit, returned nothing time.sleep(0.1) assert (endpoint.available() == False) time.sleep(0.9) assert (endpoint.available()) print('Endpoint tests pass')
final_map = {} # key: cache id, list video_ids for it in range(int(init_line[0])): video_map[it] = Video(it, int(video_size_line[it])) last_tmp = start_endpoint_line[len(start_endpoint_line) - 1] first_video_request_indx = last_tmp + int( fileContent[last_tmp].split(' ')[1]) + 1 for it in range(first_video_request_indx, len(fileContent)): video_tmp, endpoint_tmp, nr_requests_tmp = fileContent[it].split(' ') video_map[int(video_tmp)].addNrRequestToEnpoint(int(endpoint_tmp), int(nr_requests_tmp)) for it in range(int(init_line[1])): nr_lat = int(fileContent[start_endpoint_line[it]].split(' ')[1]) endpoint_map[it] = Endpoint( it, int(fileContent[start_endpoint_line[it]].split(' ')[0])) for it_lat in range((start_endpoint_line[it] + 1), (start_endpoint_line[it] + nr_lat + 1)): cache_id = int(fileContent[it_lat].split(' ')[0]) latency_tmp = int(fileContent[it_lat].split(' ')[1]) endpoint_map[it].addCacheEndpointLatency(cache_id, latency_tmp) for it in range(int(init_line[3])): cache_map[it] = Cache(it, int(init_line[4])) for video_id in range(len(video_map)): video = video_map[video_id] for endpoint_id in video.getNrRequestMapping(): num_requests = video.getNrRequestMapping()[endpoint_id] for cacheServerID in endpoint_map[endpoint_id].getLatencyMapping(): cache_map[cacheServerID].addEndpointList(endpoint_id)
def parse_data(self): with open("kittens.in", "r+") as file: print(file) endp = Endpoint(1000, self, [(0, 100), (2, 200), (1, 200)]) self.endpoints.append(endp) endp.add_video(1500, 3, 30) self.all_videos_requests_quantity += 1500 endp.add_video(1000, 0, 50) self.all_videos_requests_quantity += 1000 endp.add_video(1000, 1, 50) self.all_videos_requests_quantity += 1000 endp.add_video(500, 4, 110) self.all_videos_requests_quantity += 500 endp.estimate_for_point()
def get_sensor_data(): return jsonify(Endpoint().sensor_module_data())
for prop in self.Field._fields: attribute[prop] = True if prop in value else False print(attribute) def main(): tick = Ticket(subject='Hello World', comment='description') print(tick.serialize()) if __name__ == '__main__': main() attributes = { 'id': Endpoint.Field(True, False, False, False, False), # Automatically assigned when creating tickets 'url': Endpoint.Field(True, False, False, False, False), # The API url of this ticket 'external_id': Endpoint.Field( False, False, False, True, True ), # A unique external id, you can use this to link Zendesk tickets to local records 'type': Endpoint.Field( False, False, False, True, True ), # The type of this ticket, i.e. "problem", "incident", "question" or "task" 'subject': Endpoint.Field(False, False, True, True, True), # The value of the subject field for this ticket 'description': Endpoint.Field(True, False, False, False, False), # The first comment on the ticket 'comment': Endpoint.Field(False, True, True, True,
def get_node_data(): return jsonify(Endpoint().node_data())
def rpi(): return render_template('rpi.html', rpi_data=Endpoint().rpi_stats())