def current_function(self): """ Thread reads the current requests and generates the prefetch requests that determines the next segment for all the current fetched bitrates. We use a separate prefetch queue to ensure that the prefetch does not affect the performance of the current requests """ config_cdash.LOG.info('Current Thread: Started thread. Stop value = {}'.format(self.stop.is_set())) while not self.stop.is_set(): try: current_request, username, session_id = self.current_queue.get(timeout=None) config_cdash.LOG.info('Retrieved the file: {}'.format(current_request)) except Queue.Empty: config_cdash.LOG.error('Current Thread: Thread GET returned Empty value') current_request = None continue # Determining the next bitrates and adding to the prefetch list if current_request: if config_cdash.PREFETCH_SCHEME == 'SMART': throughput = get_throughput_info(username, session_id, config_cdash.LIMIT, config_cdash.SCHEME) config_cdash.LOG.info('average of throughput: = {}'.format(throughput)) prefetch_request, prefetch_bitrate = get_prefetch(current_request, config_cdash.PREFETCH_SCHEME, throughput) else: prefetch_request, prefetch_bitrate = get_prefetch(current_request, config_cdash.PREFETCH_SCHEME, None) if not segment_exists(prefetch_request): config_cdash.LOG.info('Segment not there {}'.format(prefetch_request)) if check_content_server(prefetch_request): config_cdash.LOG.info('Current Thread: Current segment: {}, Next segment: {}'.format(current_request, prefetch_request)) self.prefetch_queue.put(prefetch_request) config_cdash.LOG.info('Pre-fetch queue count = {}'.format(self.prefetch_queue.qsize())) else: config_cdash.LOG.info('Current Thread: Invalid Next segment: {}'.format(current_request, prefetch_request)) else: config_cdash.LOG.info('Segment already there {}'.format(prefetch_request)) else: config_cdash.LOG.warning('Current Thread: terminated')
def current_function(self): """ Thread reads the current requests and generates the prefetch requests that determines the next segment for all the current fetched bitrates. We use a separate prefetch queue to ensure that the prefetch does not affect the performance of the current requests """ config_cdash.LOG.info('Current Thread: Started thread. Stop value = {}'.format(self.stop.is_set())) while not self.stop.is_set(): try: current_request, username, session_id = self.current_queue.get(timeout=None) config_cdash.LOG.info('Retrieved the file: {}'.format(current_request)) except Queue.Empty: config_cdash.LOG.error('Current Thread: Thread GET returned Empty value') current_request = None continue # Determining the next bitrates and adding to the prefetch list if current_request: if config_cdash.PREFETCH_SCHEME == 'SMART': a=0.8 d=0.2 config_cdash.LOG.info('smart') throughput_client= self.get_throughput_client(username, session_id) if throughput_client== None: config_cdash.LOG.info('throughput client:=None') throughput= self.get_throughput_info(username, session_id, config_cdash.LIMIT, config_cdash.SCHEME) config_cdash.LOG.info('average of throughput: = {}'.format(throughput)) forecast_throughput=throughput self.insert_forecast(username, session_id,0.0) self.insert_trend(username, session_id,0.0) config_cdash.LOG.info('forecast throughput equal throughput:= {}'.format(forecast_throughput)) prefetch_request, prefetch_bitrate = get_prefetch(current_request, config_cdash.PREFETCH_SCHEME,forecast_throughput) else: config_cdash.LOG.info('throughput client:= {}'.format(throughput_client)) At_1=float(throughput_client) config_cdash.LOG.info('type of At_1:= {}'.format(type(At_1))) FIT_t_1=self.get_previous_forecast(username, session_id) config_cdash.LOG.info('previous forecast:= {}'.format(FIT_t_1)) config_cdash.LOG.info('type of FIT_t_1:= {}'.format(type(FIT_t_1))) Tt_1=self.get_previous_trend(username, session_id) config_cdash.LOG.info('previous trend:= {}'.format(Tt_1)) config_cdash.LOG.info('type of Tt_1:= {}'.format(type(Tt_1))) Ft=FIT_t_1+a*(At_1-FIT_t_1) Tt=Tt_1+d*(Ft-FIT_t_1) FIT_t=Ft+Tt self.insert_forecast(username, session_id,FIT_t) self.insert_trend(username, session_id,Tt) config_cdash.LOG.info('forecast throughput: {}'.format(FIT_t)) prefetch_request, prefetch_bitrate = get_prefetch(current_request, config_cdash.PREFETCH_SCHEME,FIT_t) else: prefetch_request, prefetch_bitrate = get_prefetch(current_request, config_cdash.PREFETCH_SCHEME, None) config_cdash.LOG.info('not smart') if not segment_exists(prefetch_request): config_cdash.LOG.info('Segment not there {}'.format(prefetch_request)) if check_content_server(prefetch_request): t=type(prefetch_request) config_cdash.LOG.info('Type will be saved in queue: {}'.format(t)) config_cdash.LOG.info('Current Thread: Current segment: {}, Next segment: {}'.format(current_request, prefetch_request)) self.prefetch_queue.put(prefetch_request) self.backup_prefetch_queue.put(prefetch_request) config_cdash.LOG.info('Pre-fetch queue count = {}'.format(self.prefetch_queue.qsize())) else: config_cdash.LOG.info('Current Thread: Invalid Next segment: {}'.format(current_request, prefetch_request)) else: config_cdash.LOG.info('Segment already there {}'.format(prefetch_request)) else: config_cdash.LOG.warning('Current Thread: terminated')
def do_GET(self): """Function to handle the get message""" start_time=time.time() config_cdash.LOG.info('T2 = {}'.format(start_time)) entry_id = username = session_id = request_id = "NULL" request_size = throughput = request_time=request_t =time_c= "NULL" segment_size=seg_time="NULL" try: username = self.headers['Username'] config_cdash.LOG.info("username {}".format(username)) session_id = self.headers['Session-ID'] config_cdash.LOG.info("Session-ID {}".format(session_id)) if self.headers['Time']: time_c=self.headers['Time'] else: time_c=0 config_cdash.LOG.info("time_c {}".format(time_c)) global client_throughput client_throughput=self.headers['Throughput'] if client_throughput==None: client_throughput=0.0 if client_throughput=='NULL': client_throughput=0.0 config_cdash.LOG.info("Client Throughput {}".format(client_throughput)) segment_size = self.headers['segment_size'] config_cdash.LOG.info("segment_size {}".format(segment_size)) seg_time = self.headers['seg_time'] config_cdash.LOG.info("seg_time {}".format(seg_time)) #global client_throughput #client_throughput=self.headers['Throughput'] #if client_throughput==None: # client_throughput=0.0 #if client_throughput=='NULL': # client_throughput=0.0 #config_cdash.LOG.info("Client Throughput {}".format(client_throughput)) except KeyError: config_cdash.LOG.warning('Could not find the username or session-ID for request from host:{}'.format( self.client_address)) #start_time=time.time() #config_cdash.LOG.info('T2 = {}'.format(start_time)) #s_time=str(datetime.datetime.time(datetime.datetime.now())) global MPD_DICT request = self.path.strip("/").split('?')[0] config_cdash.LOG.info("Received request {}".format(request)) # check if mpd file requested is in Cache Server (dictionary) if request in MPD_DICT: config_cdash.LOG.info('Found MPD in MPD_DICT') request_path = request.replace('/', os.path.sep) make_sure_path_exists(config_cdash.MPD_FOLDER) local_mpd_path = os.path.join(config_cdash.MPD_FOLDER, request_path) for header in self.headers: config_cdash.LOG.info(header) self.send_response(HTTP_OK) for header, header_value in MPD_DICT[request]['http_headers'].items(): self.send_header(header, header_value) self.end_headers() with open(local_mpd_path, 'rb') as request_file: #start_time = timeit.default_timer() self.wfile.write(request_file.read()) # Elapsed time in seconds T3=time.time() request_t = T3 - start_time config_cdash.LOG.info('T3 = {}'.format(T3)) config_cdash.LOG.info('Served the MPD file from the cache server') request_size = stat(local_mpd_path).st_size elif request in config_cdash.MPD_SOURCE_LIST: config_cdash.LOG.info("MPD: not in cache. Retrieving from Content server".format(request)) # if mpd is in content server save it in cache server and put it in MPD_DICT and json file mpd_headers = None if "Big" in request: mpd_url = config_cdash.CONTENT_SERVER +config_cdash.SERVER[0]+ request elif "Elephants" in request: mpd_url = config_cdash.CONTENT_SERVER +config_cdash.SERVER[1]+ request elif "OfForest" in request: mpd_url = config_cdash.CONTENT_SERVER +config_cdash.SERVER[2]+ request elif "Tears" in request: mpd_url = config_cdash.CONTENT_SERVER +config_cdash.SERVER[3]+ request #mpd_url = config_cdash.CONTENT_SERVER + request try: content_server_response = urllib2.urlopen(mpd_url) mpd_headers = content_server_response.headers config_cdash.LOG.info('Fetching MPD from {}'.format(mpd_url)) except urllib2.HTTPError as http_error: config_cdash.LOG.error('Unable to fetch MPD file from the content server url {}. HTTPError: {}'.format( mpd_url, http_error.code)) request_path = request.replace('/', os.path.sep) local_mpd_path = os.path.join(config_cdash.MPD_FOLDER, request_path) make_sure_path_exists(os.path.dirname(local_mpd_path)) with open(local_mpd_path, 'wb') as local_mpd_file: shutil.copyfileobj(content_server_response, local_mpd_file) config_cdash.LOG.info('Downloaded the MPD: {} to {}'.format(content_server_response, local_mpd_path)) self.send_response(HTTP_OK) for header, header_value in mpd_headers.items(): self.send_header(header, header_value) self.end_headers() with open(local_mpd_path, 'rb') as request_file: #start_time = timeit.default_timer() self.wfile.write(request_file.read()) # Elapsed time in seconds T3=time.time() request_t = T3 - start_time config_cdash.LOG.info('T3 = {}'.format(T3)) # file_size in bytes request_size = stat(local_mpd_path).st_size config_cdash.LOG.info('Served MPD file:{}'.format(local_mpd_path)) # client_ip, client_port = self.client_address mpd_headers_dict = dict(mpd_headers) config_cdash.LOG.info('Parsing MPD file') parse_mpd(local_mpd_path, request, mpd_headers_dict, (username, session_id )) elif 'm4s' in request: # Check if it is a valid request config_cdash.LOG.info('Request for m4s {}'.format(request)) if check_content_server(request): local_file_path, http_headers = cache_manager.fetch_file(request, username, session_id) T3=time.time() request_t = T3 - start_time config_cdash.LOG.info('T3 = {}'.format(T3)) #cache_manager.current_queue.put((request, username, session_id)) config_cdash.LOG.debug('M4S request: local {}, http_headers: {}'.format(local_file_path, http_headers)) self.send_response(HTTP_OK) for header, header_value in http_headers.items(): self.send_header(header, header_value) self.end_headers() with open(local_file_path, 'rb') as request_file: #start_time = timeit.default_timer() self.wfile.write(request_file.read()) #T3=time.time() #request_t = T3 - start_time #config_cdash.LOG.info('T3 = {}'.format(T3)) request_size = stat(local_file_path).st_size # If valid request sent entry_id = datetime.datetime.now() cursor = TH_CONN.cursor() #Client_transfer=abs(float(time_c)-start_time) #FMT = '%H:%M:%S.%f' #Client_transfer = datetime.datetime.strptime(s_time, FMT) - datetime.datetime.strptime(time_c, FMT) #secs = Client_transfer.total_seconds() if time_c=='NULL': time_c=0 Client_transfer=0 else: Client_transfer=abs(float(time_c)-start_time) config_cdash.LOG.info('sent header time {}'.format(time_c)) config_cdash.LOG.info('start time {}'.format(start_time)) config_cdash.LOG.info('Client transfer time {}'.format(Client_transfer)) config_cdash.LOG.info('request time {}'.format(request_t)) if request_t=='NULL': request_t=0.0 if request_size=='NULL': request_size=0.0 request_size = float(request_size)*8 config_cdash.LOG.info('transfer size:{} '.format(request_size)) request_time=float(request_t+(Client_transfer*2)) throughput = float(request_size)/(request_time) #config_cdash.LOG.info('Adding row to Throughput database : ' # 'INSERT INTO THROUGHPUTDATA VALUES ({}, {}, {}, {}, {}, {}, {}, {},{},{});'.format(entry_id, username, session_id, request_id, request_size, request_time, throughput ,client_throughput,'None','None')) #cursor.execute('INSERT INTO THROUGHPUTDATA(ENTRYID, USERNAME, SESSIONID, REQUESTSIZE, REQUESTTIME, THROUGHPUT, C_THROUGHPUT) ' # 'VALUES (?,?, ?, ?, ?, ?, ?);', (entry_id, username, session_id, request_size, request_time,throughput,client_throughput)) #TH_CONN.commit() cache_manager.list_data.append((entry_id, username, session_id, request_size, request_time,throughput,client_throughput,None,None)) #config_cdash.LOG.info('list_data in the begining:{} '.format(cache_manager.list_data)) cache_manager.current_queue.put((request, username, session_id)) else: config_cdash.LOG.warning('Invalid video file request: {}'.format(request)) else: self.send_response(HTTP_NOT_FOUND) config_cdash.LOG.warning('Could not find file {}'.format(request)) return
def do_GET(self): """Function to handle the get message""" entry_id = username = session_id = request_id = "NULL" request_size = throughput = request_time = "NULL" try: username = self.headers['Username'] session_id = self.headers['Session-ID'] except KeyError: config_cdash.LOG.warning('Could not find the username or session-ID for request from host:{}'.format( self.client_address)) global MPD_DICT request = self.path.strip("/").split('?')[0] # check if mpd file requested is in Cache Server (dictionary) if request in MPD_DICT: config_cdash.LOG.info('Found MPD in MPD_DICT') request_path = request.replace('/', os.path.sep) make_sure_path_exists(config_cdash.MPD_FOLDER) local_mpd_path = os.path.join(config_cdash.MPD_FOLDER, request_path) for header in self.headers: config_cdash.LOG.info(header) self.send_response(HTTP_OK) for header, header_value in MPD_DICT[request]['http_headers'].items(): self.send_header(header, header_value) self.end_headers() with open(local_mpd_path, 'rb') as request_file: start_time = timeit.default_timer() self.wfile.write(request_file.read()) # Elapsed time in seconds request_time = timeit.default_timer() - start_time config_cdash.LOG.info('Served the MPD file from the cache server') request_size = stat(local_mpd_path).st_size elif request in config_cdash.MPD_SOURCE_LIST: config_cdash.LOG.info("MPD: not in cache. Retrieving from Content server".format(request)) # if mpd is in content server save it in cache server and put it in MPD_DICT and json file mpd_headers = None mpd_url = config_cdash.CONTENT_SERVER + request try: content_server_response = urllib2.urlopen(mpd_url) mpd_headers = content_server_response.headers config_cdash.LOG.info('Fetching MPD from {}'.format(mpd_url)) except urllib2.HTTPError as http_error: config_cdash.LOG.error('Unable to fetch MPD file from the content server url {}. HTTPError: {}'.format( mpd_url, http_error.code)) request_path = request.replace('/', os.path.sep) local_mpd_path = os.path.join(config_cdash.MPD_FOLDER, request_path) make_sure_path_exists(os.path.dirname(local_mpd_path)) with open(local_mpd_path, 'wb') as local_mpd_file: shutil.copyfileobj(content_server_response, local_mpd_file) config_cdash.LOG.info('Downloaded the MPD: {} to {}'.format(content_server_response, local_mpd_path)) self.send_response(HTTP_OK) for header, header_value in mpd_headers.items(): self.send_header(header, header_value) self.end_headers() with open(local_mpd_path, 'rb') as request_file: start_time = timeit.default_timer() self.wfile.write(request_file.read()) # Elapsed time in seconds request_time = timeit.default_timer() - start_time # file_size in bytes request_size = stat(local_mpd_path).st_size config_cdash.LOG.info('Served MPD file:{}'.format(local_mpd_path)) # client_ip, client_port = self.client_address mpd_headers_dict = dict(mpd_headers) config_cdash.LOG.info('Parsing MPD file') parse_mpd(local_mpd_path, request, mpd_headers_dict, (username, session_id )) elif 'm4s' in request: # Check if it is a valid request config_cdash.LOG.info('Request for m4s {}'.format(request)) if check_content_server(request): local_file_path, http_headers = cache_manager.fetch_file(request, username, session_id) config_cdash.LOG.debug('M4S request: local {}, http_headers: {}'.format(local_file_path, http_headers)) self.send_response(HTTP_OK) for header, header_value in http_headers.items(): self.send_header(header, header_value) self.end_headers() with open(local_file_path, 'rb') as request_file: start_time = timeit.default_timer() self.wfile.write(request_file.read()) request_time = timeit.default_timer() - start_time request_size = stat(local_file_path).st_size else: config_cdash.LOG.warning('Invalid video file request: {}'.format(request)) else: self.send_response(HTTP_NOT_FOUND) config_cdash.LOG.warning('Could not find file {}'.format(request)) return # If valid request sent entry_id = datetime.datetime.now() cursor = TH_CONN.cursor() throughput = float(request_size)/request_time config_cdash.LOG.info('Adding row to Throughput database : ' 'INSERT INTO THROUGHPUTDATA VALUES ({}, {}, {}, {}, {}, {}, {});'.format( entry_id, username, session_id, request_id, request_size, request_time, throughput)) cursor.execute('INSERT INTO THROUGHPUTDATA(ENTRYID, USERNAME, SESSIONID, REQUESTSIZE, REQUESTTIME, THROUGHPUT) ' 'VALUES (?, ?, ?, ?, ?, ?);', (entry_id, username, session_id, request_size, request_time, throughput)) TH_CONN.commit()