def start_playback_smart(dp_object, domain, playback_type=None, download=False, video_segment_duration=None, duration=120): global PLAYER """ Module that downloads the MPD-FIle and download all the representations of the Module to download the MPEG-DASH media. Example: start_playback_smart(dp_object, domain, "SMART", DOWNLOAD, video_segment_duration) :param dp_object: The DASH-playback object :param domain: The domain name of the server (The segment URLS are domain + relative_address) :param playback_type: The type of playback 1. 'BASIC' - The basic adapataion scheme 2. 'SARA' - Segment Aware Rate Adaptation 3. 'NETFLIX' - Buffer based adaptation used by Netflix :param download: Set to True if the segments are to be stored locally (Boolean). Default False :param video_segment_duration: Playback duratoin of each segment :return: """ print("START_PLAYBACK_SMART:DOMAIN=" + domain) # Initialize the DASH buffer dash_player = dash_buffer.DashPlayer(dp_object.playback_duration, video_segment_duration) PLAYER = dash_player global PLAYBACK_TIME PLAYBACK_TIME = duration dash_event_logger.init(0, PLAYER, 'unknown', MPD , 'AStream', 'standard',) dash_event_logger.setBufferLevelProvider() dash_player.start() # A folder to save the segments in file_identifier = id_generator() config_dash.LOG.info("The segments are stored in %s" % file_identifier) dp_list = defaultdict(defaultdict) # Creating a Dictionary of all that has the URLs for each segment and different bitrates for bitrate in dp_object.video: # Getting the URL list for each bitrate dp_object.video[bitrate] = read_mpd.get_url_list(dp_object.video[bitrate], video_segment_duration, dp_object.playback_duration, bitrate) if "$Bandwidth$" in dp_object.video[bitrate].initialization: dp_object.video[bitrate].initialization = dp_object.video[bitrate].initialization.replace( "$Bandwidth$", str(bitrate)) media_urls = [dp_object.video[bitrate].initialization] + dp_object.video[bitrate].url_list for segment_count, segment_url in enumerate(media_urls, dp_object.video[bitrate].start): # segment_duration = dp_object.video[bitrate].segment_duration dp_list[segment_count][bitrate] = segment_url bitrates = dp_object.video.keys() bitrates.sort() average_dwn_time = 0 segment_files = [] # For basic adaptation previous_segment_times = [] recent_download_sizes = [] weighted_mean_object = None current_bitrate = bitrates[0] previous_bitrate = None total_downloaded = 0 # Delay in terms of the number of segments delay = 0 segment_duration = 0 segment_size = segment_download_time = None # Netflix Variables average_segment_sizes = netflix_rate_map = None netflix_state = "INITIAL" # Start playback of all the segments #dash_event_logger.startupDelay(time.time() - PLAYER.actual_start_time) for segment_number, segment in enumerate(dp_list, dp_object.video[current_bitrate].start): config_dash.LOG.info(" {}: Processing the segment {}".format(playback_type.upper(), segment_number)) write_json() if not previous_bitrate: previous_bitrate = current_bitrate if SEGMENT_LIMIT: if not dash_player.segment_limit: dash_player.segment_limit = int(SEGMENT_LIMIT) if segment_number > int(SEGMENT_LIMIT): config_dash.LOG.info("Segment limit reached") break if segment_number == dp_object.video[bitrate].start: current_bitrate = bitrates[0] else: if playback_type.upper() == "BASIC": current_bitrate, average_dwn_time = basic_dash2.basic_dash2(segment_number, bitrates, average_dwn_time, recent_download_sizes, previous_segment_times, current_bitrate) if dash_player.buffer.qsize() > config_dash.BASIC_THRESHOLD: delay = dash_player.buffer.qsize() - config_dash.BASIC_THRESHOLD config_dash.LOG.info("Basic-DASH: Selected {} for the segment {}".format(current_bitrate, segment_number + 1)) elif playback_type.upper() == "SMART": if not weighted_mean_object: weighted_mean_object = WeightedMean(config_dash.SARA_SAMPLE_COUNT) config_dash.LOG.debug("Initializing the weighted Mean object") # Checking the segment number is in acceptable range if segment_number < len(dp_list) - 1 + dp_object.video[bitrate].start: try: current_bitrate, delay = weighted_dash.weighted_dash(bitrates, dash_player, weighted_mean_object.weighted_mean_rate, current_bitrate, get_segment_sizes(dp_object, segment_number+1)) except IndexError, e: config_dash.LOG.error(e) elif playback_type.upper() == "NETFLIX": config_dash.LOG.info("Playback is NETFLIX") # Calculate the average segment sizes for each bitrate if not average_segment_sizes: average_segment_sizes = get_average_segment_sizes(dp_object) if segment_number < len(dp_list) - 1 + dp_object.video[bitrate].start: try: if segment_size and segment_download_time: segment_download_rate = segment_size / segment_download_time else: segment_download_rate = 0 current_bitrate, netflix_rate_map, netflix_state = netflix_dash.netflix_dash( bitrates, dash_player, segment_download_rate, current_bitrate, average_segment_sizes, netflix_rate_map, netflix_state) config_dash.LOG.info("NETFLIX: Next bitrate = {}".format(current_bitrate)) except IndexError, e: config_dash.LOG.error(e) else: config_dash.LOG.critical("Completed segment playback for Netflix") break # If the buffer is full wait till it gets empty if dash_player.buffer.qsize() >= config_dash.NETFLIX_BUFFER_SIZE: delay = (dash_player.buffer.qsize() - config_dash.NETFLIX_BUFFER_SIZE + 1) * segment_duration config_dash.LOG.info("NETFLIX: delay = {} seconds".format(delay))
def start_playback_smart(dp_object, domain, playback_type=None, download=False, video_segment_duration=None, connection_type="", JUMP_SCENARIO=""): """ Module that downloads the MPD-FIle and download all the representations of the Module to download the MPEG-DASH media. Example: start_playback_smart(dp_object, domain, "SMART", DOWNLOAD, video_segment_duration) :param dp_object: The DASH-playback object :param domain: The domain name of the server (The segment URLS are domain + relative_address) :param playback_type: The type of playback 1. 'BASIC' - The basic adapataion scheme 2. 'SARA' - Segment Aware Rate Adaptation 3. 'NETFLIX' - Buffer based adaptation used by Netflix :param download: Set to True if the segments are to be stored locally (Boolean). Default False :param video_segment_duration: Playback duratoin of each segment :return: """ # Initialize the DASH buffer dash_player = dash_buffer.DashPlayer(dp_object.playback_duration, video_segment_duration, connection_type) dash_player.start() # A folder to save the segments in file_identifier = 'URLLIB_' #id_generator() config_dash.LOG.info("The segments are stored in %s" % file_identifier) dp_list = defaultdict(defaultdict) # Creating a Dictionary of all that has the URLs for each segment and different bitrates for bitrate in dp_object.video: # Getting the URL list for each bitrate dp_object.video[bitrate] = read_mpd.get_url_list( dp_object.video[bitrate], video_segment_duration, dp_object.playback_duration, bitrate) if "$Bandwidth$" in dp_object.video[bitrate].initialization: dp_object.video[bitrate].initialization = dp_object.video[ bitrate].initialization.replace("$Bandwidth$", str(bitrate)) media_urls = [dp_object.video[bitrate].initialization ] + dp_object.video[bitrate].url_list for segment_count, segment_url in enumerate( media_urls, dp_object.video[bitrate].start): # segment_duration = dp_object.video[bitrate].segment_duration dp_list[segment_count][bitrate] = segment_url # print segment_count,bitrate,segment_url bitrates = dp_object.video.keys() bitrates.sort() average_dwn_time = 0 segment_files = [] # For basic adaptation previous_segment_times = [] recent_download_sizes = [] weighted_mean_object = None current_bitrate = bitrates[0] previous_bitrate = None total_downloaded = 0 # Delay in terms of the number of segments delay = 0 segment_duration = 0 segment_size = segment_download_time = None # Netflix Variables average_segment_sizes = netflix_rate_map = None netflix_state = "INITIAL" sb = None global JUMP_BUFFER_COUNTER JUMP_BUFFER_COUNTER = 0 # Start playback of all the segments """ for segment1 in dp_list.keys(): for bitrate1 in dp_list[segment1]: print segment1, bitrate1, dp_list[segment1][bitrate1] """ if (CURL or QUIC): # CURL or QUIC client """ CURL or QUIC client Module to download the segment """ if CURL: CMD = config_dash.CURL_CLIENT_CMD print CMD if QUIC: CMD = config_dash.QUIC_CLIENT_CMD print CMD sb = Popen(CMD, shell=True, stdout=PIPE, stdin=PIPE, stderr=STDOUT) while True: out = non_block_read( sb.stdout) # will return '' instead of hanging for ever if "started" in out: print out break max_jump_count = 0 current_jump_index = 0 if JUMP: JUMP_SCENARIO_ARR = JUMP_SCENARIO.split(',') max_jump_count = len(JUMP_SCENARIO_ARR) total_segment_count = len(dp_list) segment_number = 1 while segment_number <= total_segment_count: config_dash.LOG.info("*************** segment_number:" + str(segment_number) + "*********************") config_dash.LOG.info(" {}: Processing the segment {}".format( playback_type.upper(), segment_number)) write_json() if not previous_bitrate: previous_bitrate = current_bitrate if SEGMENT_LIMIT: if not dash_player.segment_limit: dash_player.segment_limit = int(SEGMENT_LIMIT) if segment_number > int(SEGMENT_LIMIT): config_dash.LOG.info("Segment limit reached") break if segment_number == dp_object.video[bitrate].start: current_bitrate = bitrates[0] else: if playback_type.upper() == "BASIC": current_bitrate, average_dwn_time = basic_dash2.basic_dash2( segment_number, bitrates, average_dwn_time, recent_download_sizes, previous_segment_times, current_bitrate) if dash_player.buffer.qsize() > config_dash.BASIC_THRESHOLD: delay = dash_player.buffer.qsize( ) - config_dash.BASIC_THRESHOLD config_dash.LOG.info( "Basic-DASH: Selected {} for the segment {}".format( current_bitrate, segment_number + 1)) elif playback_type.upper() == "SMART": if not weighted_mean_object: weighted_mean_object = WeightedMean( config_dash.SARA_SAMPLE_COUNT) config_dash.LOG.debug( "Initializing the weighted Mean object") # Checking the segment number is in acceptable range if segment_number < len( dp_list) - 1 + dp_object.video[bitrate].start: try: config_dash.LOG.info("JUMP_BUFFER_COUNTER: %s", str(JUMP_BUFFER_COUNTER)) current_bitrate, delay, JUMP_BUFFER_COUNTER = weighted_dash.weighted_dash( bitrates, dash_player, weighted_mean_object.weighted_mean_rate, current_bitrate, get_segment_sizes(dp_object, segment_number + 1), JUMP_BUFFER_COUNTER) except IndexError, e: config_dash.LOG.error(e) elif playback_type.upper() == "NETFLIX": config_dash.LOG.info("Playback is NETFLIX") # Calculate the average segment sizes for each bitrate if not average_segment_sizes: average_segment_sizes = get_average_segment_sizes( dp_object) if segment_number < len( dp_list) - 1 + dp_object.video[bitrate].start: try: if segment_size and segment_download_time: segment_download_rate = segment_size / segment_download_time else: segment_download_rate = 0 config_dash.LOG.info("JUMP_BUFFER_COUNTER: %s", str(JUMP_BUFFER_COUNTER)) current_bitrate, netflix_rate_map, netflix_state, JUMP_BUFFER_COUNTER = netflix_dash.netflix_dash( bitrates, dash_player, segment_download_rate, current_bitrate, average_segment_sizes, netflix_rate_map, netflix_state, JUMP_BUFFER_COUNTER) config_dash.LOG.info( "NETFLIX: Next bitrate = {}".format( current_bitrate)) except IndexError, e: config_dash.LOG.error(e) else: config_dash.LOG.critical( "Completed segment playback for Netflix") break # If the buffer is full wait till it gets empty if dash_player.buffer.qsize( ) >= config_dash.NETFLIX_BUFFER_SIZE: delay = (dash_player.buffer.qsize() - config_dash.NETFLIX_BUFFER_SIZE + 1) * segment_duration config_dash.LOG.info( "NETFLIX: delay = {} seconds".format(delay))
def start_playback_smart(dp_object, domain, playback_type=None, download=False, video_segment_duration=None, retrans=False): """ Module that downloads the MPD-FIle and download all the representations of the Module to download the MPEG-DASH media. Example: start_playback_smart(dp_object, domain, "SMART", DOWNLOAD, video_segment_duration) :param dp_object: The DASH-playback object :param domain: The domain name of the server (The segment URLS are domain + relative_address) :param playback_type: The type of playback 1. 'BASIC' - The basic adapataion scheme 2. 'SARA' - Segment Aware Rate Adaptation 3. 'NETFLIX' - Buffer based adaptation used by Netflix 4. 'VLC' - VLC adaptation scheme :param download: Set to True if the segments are to be stored locally (Boolean). Default False :param video_segment_duration: Playback duratoin of each segment :return: """ # Initialize the DASH buffer video_segment_duration = 2 dash_player = dash_buffer.DashPlayer(dp_object.playback_duration, video_segment_duration) start_dload_time = timeit.default_timer() dash_player.start() # A folder to save the segments in file_identifier = id_generator() config_dash.LOG.info("The segments are stored in %s" % file_identifier) dp_list = defaultdict(defaultdict) # Creating a Dictionary of all that has the URLs for each segment and different bitrates for bitrate in dp_object.video: # Getting the URL list for each bitrate dp_object.video[bitrate] = read_mpd.get_url_list( dp_object.video[bitrate], video_segment_duration, dp_object.playback_duration, bitrate) if "$Bandwidth$" in dp_object.video[bitrate].initialization: dp_object.video[bitrate].initialization = dp_object.video[ bitrate].initialization.replace("$Bandwidth$", str(bitrate)) media_urls = [dp_object.video[bitrate].initialization ] + dp_object.video[bitrate].url_list for segment_count, segment_url in enumerate( media_urls, dp_object.video[bitrate].start): # segment_duration = dp_object.video[bitrate].segment_duration dp_list[segment_count][bitrate] = segment_url bitrates = dp_object.video.keys() bitrates.sort() average_dwn_time = 0 segment_files = [] # For basic adaptation global segment_w_chunks init_dl_start_time = timeit.default_timer() segment_w_chunks = [] previous_segment_times = [] recent_download_sizes = [] bitrate_history = [] #segment_dl_rates = [] weighted_mean_object = None current_bitrate = bitrates[0] previous_bitrate = None total_downloaded = 0 bitrate_holder = 0 dl_rate_history = [] # Delay in terms of the number of segments delay = 0 segment_duration = 0 segment_size = segment_download_time = None # Netflix Variables average_segment_sizes = netflix_rate_map = None netflix_state = "INITIAL" RETRANSMISSION_SWITCH = False retransmission_delay = 0 retransmission_delay_switch = False # Start playback of all the segments #for segment_number, segment in enumerate(dp_list, dp_object.video[current_bitrate].start): #for segment_number in dp_list:s segment_number = 1 original_segment_number = 1 while segment_number < len(dp_list): if retransmission_delay_switch == True: #segment_number = original_segment_number retransmission_delay_switch = False segment = segment_number #print len(dp_list) #print "dp_list" #print segment #print segment_number #print "++++++++++++" config_dash.LOG.info(" {}: Processing the segment {}".format( playback_type.upper(), segment_number)) write_json() if not previous_bitrate: previous_bitrate = current_bitrate if SEGMENT_LIMIT: if not dash_player.segment_limit: dash_player.segment_limit = int(SEGMENT_LIMIT) if segment_number > int(SEGMENT_LIMIT): config_dash.LOG.info("Segment limit reached") break if segment_number == dp_object.video[bitrate].start: current_bitrate = bitrates[0] else: if playback_type.upper() == "BASIC": current_bitrate, average_dwn_time = basic_dash2.basic_dash2( segment_number, bitrates, average_dwn_time, recent_download_sizes, previous_segment_times, current_bitrate) # if dash_player.buffer.qsize() > config_dash.BASIC_THRESHOLD: if dash_player.buffer.__len__( ) > config_dash.BASIC_THRESHOLD: #MZ # delay = dash_player.buffer.qsize() - config_dash.BASIC_THRESHOLD delay = dash_player.buffer.__len__( ) - config_dash.BASIC_THRESHOLD #MZ config_dash.LOG.info( "Basic-DASH: Selected {} for the segment {}".format( current_bitrate, segment_number + 1)) elif playback_type.upper() == "SMART": if not weighted_mean_object: weighted_mean_object = WeightedMean( config_dash.SARA_SAMPLE_COUNT) config_dash.LOG.debug( "Initializing the weighted Mean object") # Checking the segment number is in acceptable range segment_download_rate = segment_size / segment_download_time if segment_number < len( dp_list) - 1 + dp_object.video[bitrate].start: try: current_bitrate, delay = weighted_dash.weighted_dash( bitrates, dash_player, weighted_mean_object.weighted_mean_rate, current_bitrate, segment_number, segment_size, segment_download_time, get_segment_sizes(dp_object, segment_number + 1)) except IndexError, e: config_dash.LOG.error(e) #with open('sara-dash-chosen-rate.txt', 'a') as sara: #sara.write(str(current_bitrate) + '\t' + str(segment_download_rate) + '\n') if not os.path.exists(download_log_file): header_row = "EpochTime, CurrentBufferSize, Bitrate, DownloadRate".split( ",") stats = ((timeit.default_timer() - start_dload_time), str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate) else: header_row = None stats = ((timeit.default_timer() - start_dload_time), str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate) str_stats = [str(i) for i in stats] with open(download_log_file, "ab") as log_file_handle: result_writer = csv.writer(log_file_handle, delimiter=",") if header_row: result_writer.writerow(header_row) result_writer.writerow(str_stats) elif playback_type.upper() == "NETFLIX": config_dash.LOG.info("Playback is NETFLIX") # Calculate the average segment sizes for each bitrate if not average_segment_sizes: average_segment_sizes = get_average_segment_sizes( dp_object) if segment_number < len( dp_list) - 1 + dp_object.video[bitrate].start: try: if segment_size and segment_download_time: segment_download_rate = segment_size / segment_download_time else: segment_download_rate = 0 current_bitrate, netflix_rate_map, netflix_state = netflix_dash.netflix_dash( bitrates, dash_player, segment_download_rate, current_bitrate, average_segment_sizes, netflix_rate_map, netflix_state) config_dash.LOG.info( "NETFLIX: Next bitrate = {}".format( current_bitrate)) except IndexError, e: config_dash.LOG.error(e) else: config_dash.LOG.critical( "Completed segment playback for Netflix") break if not os.path.exists(download_log_file): header_row = "EpochTime, CurrentBufferSize, Bitrate, DownloadRate".split( ",") stats = (timeit.default_timer() - start_dload_time, str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate) else: header_row = None stats = (timeit.default_timer() - start_dload_time, str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate) str_stats = [str(i) for i in stats] with open(download_log_file, "ab") as log_file_handle: result_writer = csv.writer(log_file_handle, delimiter=",") if header_row: result_writer.writerow(header_row) result_writer.writerow(str_stats) # If the buffer is full wait till it gets empty # if dash_player.buffer.qsize() >= config_dash.NETFLIX_BUFFER_SIZE: if dash_player.buffer.__len__( ) >= config_dash.NETFLIX_BUFFER_SIZE: #MZ # delay = (dash_player.buffer.qsize() - config_dash.NETFLIX_BUFFER_SIZE + 1) * segment_duration delay = (dash_player.buffer.__len__() - config_dash.NETFLIX_BUFFER_SIZE + 1) * segment_duration #MZ config_dash.LOG.info( "NETFLIX: delay = {} seconds".format(delay))
def start_playback_smart(dp_object, domain, playback_type=None, download=False, video_segment_duration=None): """ Module that downloads the MPD-FIle and download all the representations of the Module to download the MPEG-DASH media. Example: start_playback_smart(dp_object, domain, "SMART", DOWNLOAD, video_segment_duration) :param dp_object: The DASH-playback object :param domain: The domain name of the server (The segment URLS are domain + relative_address) :param playback_type: The type of playback 1. 'BASIC' - The basic adapataion scheme 2. 'SARA' - Segment Aware Rate Adaptation 3. 'NETFLIX' - Buffer based adaptation used by Netflix :param download: Set to True if the segments are to be stored locally (Boolean). Default False :param video_segment_duration: Playback duratoin of each segment :return: """ # Initialize the DASH buffer dash_player = dash_buffer.DashPlayer(dp_object.playback_duration, video_segment_duration) dash_player.start() # A folder to save the segments in file_identifier = id_generator() config_dash.LOG.info("The segments are stored in %s" % file_identifier) dp_list = defaultdict(defaultdict) # Creating a Dictionary of all that has the URLs for each segment and different bitrates for bitrate in dp_object.video: # Getting the URL list for each bitrate dp_object.video[bitrate] = read_mpd.get_url_list( dp_object.video[bitrate], video_segment_duration, dp_object.playback_duration, bitrate) if "$Bandwidth$" in dp_object.video[bitrate].initialization: dp_object.video[bitrate].initialization = dp_object.video[ bitrate].initialization.replace("$Bandwidth$", str(bitrate)) media_urls = [dp_object.video[bitrate].initialization ] + dp_object.video[bitrate].url_list #print "media urls" #print media_urls for segment_count, segment_url in enumerate( media_urls, dp_object.video[bitrate].start): # segment_duration = dp_object.video[bitrate].segment_duration #print "segment url" #print segment_url dp_list[segment_count][bitrate] = segment_url bitrates = list(dp_object.video.keys()) bitrates.sort() average_dwn_time = 0 segment_files = [] # For basic adaptation previous_segment_times = [] recent_download_sizes = [] weighted_mean_object = None current_bitrate = bitrates[0] previous_bitrate = None total_downloaded = 0 # Delay in terms of the number of segments delay = 0 segment_duration = 0 segment_size = segment_download_time = None # Netflix Variables average_segment_sizes = netflix_rate_map = None netflix_state = "INITIAL" # Start playback of all the segments for segment_number, segment in enumerate( dp_list, dp_object.video[current_bitrate].start): config_dash.LOG.info(" {}: Processing the segment {}".format( playback_type.upper(), segment_number)) write_json() if not previous_bitrate: previous_bitrate = current_bitrate if SEGMENT_LIMIT: if not dash_player.segment_limit: dash_player.segment_limit = int(SEGMENT_LIMIT) if segment_number > int(SEGMENT_LIMIT): config_dash.LOG.info("Segment limit reached") break print(("segment_number ={}".format(segment_number))) print(("dp_object.video[bitrate].start={}".format( dp_object.video[bitrate].start))) if segment_number == dp_object.video[bitrate].start: current_bitrate = bitrates[0] else: if playback_type.upper() == "BASIC": current_bitrate, average_dwn_time = basic_dash2.basic_dash2( segment_number, bitrates, average_dwn_time, recent_download_sizes, previous_segment_times, current_bitrate) if dash_player.buffer.qsize() > config_dash.BASIC_THRESHOLD: delay = dash_player.buffer.qsize( ) - config_dash.BASIC_THRESHOLD config_dash.LOG.info( "Basic-DASH: Selected {} for the segment {}".format( current_bitrate, segment_number + 1)) elif playback_type.upper() == "SMART": if not weighted_mean_object: weighted_mean_object = WeightedMean( config_dash.SARA_SAMPLE_COUNT) config_dash.LOG.debug( "Initializing the weighted Mean object") # Checking the segment number is in acceptable range if segment_number < len( dp_list) - 1 + dp_object.video[bitrate].start: try: current_bitrate, delay = weighted_dash.weighted_dash( bitrates, dash_player, weighted_mean_object.weighted_mean_rate, current_bitrate, get_segment_sizes(dp_object, segment_number + 1)) except (IndexError, e): config_dash.LOG.error(e) elif playback_type.upper() == "NETFLIX": config_dash.LOG.info("Playback is NETFLIX") # Calculate the average segment sizes for each bitrate if not average_segment_sizes: average_segment_sizes = get_average_segment_sizes( dp_object) if segment_number < len( dp_list) - 1 + dp_object.video[bitrate].start: try: if segment_size and segment_download_time: segment_download_rate = segment_size / segment_download_time else: segment_download_rate = 0 current_bitrate, netflix_rate_map, netflix_state = netflix_dash.netflix_dash( bitrates, dash_player, segment_download_rate, current_bitrate, average_segment_sizes, netflix_rate_map, netflix_state) config_dash.LOG.info( "NETFLIX: Next bitrate = {}".format( current_bitrate)) except (IndexError, e): config_dash.LOG.error(e) else: config_dash.LOG.critical( "Completed segment playback for Netflix") break # If the buffer is full wait till it gets empty if dash_player.buffer.qsize( ) >= config_dash.NETFLIX_BUFFER_SIZE: delay = (dash_player.buffer.qsize() - config_dash.NETFLIX_BUFFER_SIZE + 1) * segment_duration config_dash.LOG.info( "NETFLIX: delay = {} seconds".format(delay)) else: config_dash.LOG.error( "Unknown playback type:{}. Continuing with basic playback". format(playback_type)) current_bitrate, average_dwn_time = basic_dash.basic_dash( segment_number, bitrates, average_dwn_time, segment_download_time, current_bitrate) segment_path = dp_list[segment][current_bitrate] #print "domain" #print domain #print "segment" #print segment #print "current bitrate" #print current_bitrate #print segment_path segment_url = urljoin(domain, segment_path) #print "segment url" #print segment_url config_dash.LOG.info("{}: Segment URL = {}".format( playback_type.upper(), segment_url)) if delay: delay_start = time.time() config_dash.LOG.info("SLEEPING for {}seconds ".format( delay * segment_duration)) while time.time() - delay_start < (delay * segment_duration): time.sleep(1) delay = 0 config_dash.LOG.debug("SLEPT for {}seconds ".format(time.time() - delay_start)) start_time = timeit.default_timer() try: #print 'url' #print segment_url #print 'file' #print file_identifier segment_size, segment_filename = download_segment( segment_url, file_identifier) config_dash.LOG.info("{}: Downloaded segment {}".format( playback_type.upper(), segment_url)) except (IOError, e): config_dash.LOG.error("Unable to save segment %s" % e) return None segment_download_time = timeit.default_timer() - start_time previous_segment_times.append(segment_download_time) recent_download_sizes.append(segment_size) # Updating the JSON information segment_name = os.path.split(segment_url)[1] if "segment_info" not in config_dash.JSON_HANDLE: config_dash.JSON_HANDLE["segment_info"] = list() config_dash.JSON_HANDLE["segment_info"].append( (segment_name, current_bitrate, segment_size, segment_download_time)) total_downloaded += segment_size config_dash.LOG.info( "{} : The total downloaded = {}, segment_size = {}, segment_number = {}" .format(playback_type.upper(), total_downloaded, segment_size, segment_number)) if playback_type.upper() == "SMART" and weighted_mean_object: weighted_mean_object.update_weighted_mean(segment_size, segment_download_time) segment_info = { 'playback_length': video_segment_duration, 'size': segment_size, 'bitrate': current_bitrate, 'data': segment_filename, 'URI': segment_url, 'segment_number': segment_number } segment_duration = segment_info['playback_length'] dash_player.write(segment_info) segment_files.append(segment_filename) config_dash.LOG.info( "Downloaded %s. Size = %s in %s seconds" % (segment_url, segment_size, str(segment_download_time))) if previous_bitrate: if previous_bitrate < current_bitrate: config_dash.JSON_HANDLE['playback_info']['up_shifts'] += 1 elif previous_bitrate > current_bitrate: config_dash.JSON_HANDLE['playback_info']['down_shifts'] += 1 previous_bitrate = current_bitrate # waiting for the player to finish playing while dash_player.playback_state not in dash_buffer.EXIT_STATES: time.sleep(1) write_json() if not download: clean_files(file_identifier)
def start_playback_smart(dp_object, domain, playback_type=None, download=False, video_segment_duration=None, retrans=False): """ Module that downloads the MPD-FIle and download all the representations of the Module to download the MPEG-DASH media. Example: start_playback_smart(dp_object, domain, "SMART", DOWNLOAD, video_segment_duration) :param dp_object: The DASH-playback object :param domain: The domain name of the server (The segment URLS are domain + relative_address) :param playback_type: The type of playback 1. 'BASIC' - The basic adapataion scheme 2. 'SARA' - Segment Aware Rate Adaptation 3. 'NETFLIX' - Buffer based adaptation used by Netflix 4. 'VLC' - VLC adaptation scheme :param download: Set to True if the segments are to be stored locally (Boolean). Default False :param video_segment_duration: Playback duratoin of each segment :return: """ # Initialize the DASH buffer video_segment_duration = 2 dash_player = dash_buffer.DashPlayer(dp_object.playback_duration, video_segment_duration) start_dload_time = timeit.default_timer() dash_player.start() # A folder to save the segments in file_identifier = id_generator() config_dash.LOG.info("The segments are stored in %s" % file_identifier) dp_list = defaultdict(defaultdict) # Creating a Dictionary of all that has the URLs for each segment and different bitrates for bitrate in dp_object.video: # Getting the URL list for each bitrate dp_object.video[bitrate] = read_mpd.get_url_list( dp_object.video[bitrate], video_segment_duration, dp_object.playback_duration, bitrate) if "$Bandwidth$" in dp_object.video[bitrate].initialization: dp_object.video[bitrate].initialization = dp_object.video[ bitrate].initialization.replace("$Bandwidth$", str(bitrate)) media_urls = [dp_object.video[bitrate].initialization ] + dp_object.video[bitrate].url_list for segment_count, segment_url in enumerate( media_urls, dp_object.video[bitrate].start): # segment_duration = dp_object.video[bitrate].segment_duration dp_list[segment_count][bitrate] = segment_url bitrates = sorted(dp_object.video.keys()) average_dwn_time = 0 segment_files = [] # For basic adaptation global segment_w_chunks init_dl_start_time = timeit.default_timer() segment_w_chunks = [] previous_segment_times = [] recent_download_sizes = [] bitrate_history = [] #segment_dl_rates = [] weighted_mean_object = None current_bitrate = bitrates[0] previous_bitrate = None total_downloaded = 0 bitrate_holder = 0 dl_rate_history = [] # Delay in terms of the number of segments delay = 0 segment_duration = 0 segment_size = segment_download_time = None # Netflix Variables average_segment_sizes = netflix_rate_map = None netflix_state = "INITIAL" RETRANSMISSION_SWITCH = False retransmission_delay = 0 retransmission_delay_switch = False # Start playback of all the segments #for segment_number, segment in enumerate(dp_list, dp_object.video[current_bitrate].start): #for segment_number in dp_list:s segment_number = 1 original_segment_number = 1 while segment_number < len(dp_list): if retransmission_delay_switch == True: #segment_number = original_segment_number retransmission_delay_switch = False segment = segment_number #print len(dp_list) #print "dp_list" #print segment #print segment_number #print "++++++++++++" config_dash.LOG.info(" {}: Processing the segment {}".format( playback_type.upper(), segment_number)) write_json() if not previous_bitrate: previous_bitrate = current_bitrate if SEGMENT_LIMIT: if not dash_player.segment_limit: dash_player.segment_limit = int(SEGMENT_LIMIT) if segment_number > int(SEGMENT_LIMIT): config_dash.LOG.info("Segment limit reached") break if segment_number == dp_object.video[bitrate].start: current_bitrate = bitrates[0] else: if playback_type.upper() == "BASIC": current_bitrate, average_dwn_time = basic_dash2.basic_dash2( segment_number, bitrates, average_dwn_time, recent_download_sizes, previous_segment_times, current_bitrate) # if dash_player.buffer.qsize() > config_dash.BASIC_THRESHOLD: if dash_player.buffer.__len__( ) > config_dash.BASIC_THRESHOLD: #MZ # delay = dash_player.buffer.qsize() - config_dash.BASIC_THRESHOLD delay = dash_player.buffer.__len__( ) - config_dash.BASIC_THRESHOLD #MZ config_dash.LOG.info( "Basic-DASH: Selected {} for the segment {}".format( current_bitrate, segment_number + 1)) elif playback_type.upper() == "SMART": if not weighted_mean_object: weighted_mean_object = WeightedMean( config_dash.SARA_SAMPLE_COUNT) config_dash.LOG.debug( "Initializing the weighted Mean object") # Checking the segment number is in acceptable range segment_download_rate = segment_size / segment_download_time if segment_number < len( dp_list) - 1 + dp_object.video[bitrate].start: try: current_bitrate, delay = weighted_dash.weighted_dash( bitrates, dash_player, weighted_mean_object.weighted_mean_rate, current_bitrate, segment_number, segment_size, segment_download_time, get_segment_sizes(dp_object, segment_number + 1)) except IndexError as e: config_dash.LOG.error(e) #with open('sara-dash-chosen-rate.txt', 'a') as sara: #sara.write(str(current_bitrate) + '\t' + str(segment_download_rate) + '\n') if not os.path.exists(download_log_file): header_row = "EpochTime, CurrentBufferSize, Bitrate, DownloadRate".split( ",") stats = ((timeit.default_timer() - start_dload_time), str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate) else: header_row = None stats = ((timeit.default_timer() - start_dload_time), str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate) str_stats = [str(i) for i in stats] with open(download_log_file, "a") as log_file_handle: result_writer = csv.writer(log_file_handle, delimiter=",") if header_row: result_writer.writerow(header_row) result_writer.writerow(str_stats) elif playback_type.upper() == "NETFLIX": config_dash.LOG.info("Playback is NETFLIX") # Calculate the average segment sizes for each bitrate if not average_segment_sizes: average_segment_sizes = get_average_segment_sizes( dp_object) if segment_number < len( dp_list) - 1 + dp_object.video[bitrate].start: try: if segment_size and segment_download_time: segment_download_rate = segment_size / segment_download_time else: segment_download_rate = 0 current_bitrate, netflix_rate_map, netflix_state = netflix_dash.netflix_dash( bitrates, dash_player, segment_download_rate, current_bitrate, average_segment_sizes, netflix_rate_map, netflix_state) config_dash.LOG.info( "NETFLIX: Next bitrate = {}".format( current_bitrate)) except IndexError as e: config_dash.LOG.error(e) else: config_dash.LOG.critical( "Completed segment playback for Netflix") break if not os.path.exists(download_log_file): header_row = "EpochTime, CurrentBufferSize, Bitrate, DownloadRate".split( ",") stats = (timeit.default_timer() - start_dload_time, str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate) else: header_row = None stats = (timeit.default_timer() - start_dload_time, str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate) str_stats = [str(i) for i in stats] with open(download_log_file, "ab") as log_file_handle: result_writer = csv.writer(log_file_handle, delimiter=",") if header_row: result_writer.writerow(header_row) result_writer.writerow(str_stats) # If the buffer is full wait till it gets empty # if dash_player.buffer.qsize() >= config_dash.NETFLIX_BUFFER_SIZE: if dash_player.buffer.__len__( ) >= config_dash.NETFLIX_BUFFER_SIZE: #MZ # delay = (dash_player.buffer.qsize() - config_dash.NETFLIX_BUFFER_SIZE + 1) * segment_duration delay = (dash_player.buffer.__len__() - config_dash.NETFLIX_BUFFER_SIZE + 1) * segment_duration #MZ config_dash.LOG.info( "NETFLIX: delay = {} seconds".format(delay)) elif playback_type.upper() == "VLC": config_dash.LOG.info( "Unknown playback type:{}. Continuing with basic playback". format(playback_type)) config_dash.LOG.info("VLC: Current Bitrate %d" % current_bitrate) # current_bitrate = basic_dash.basic_dash(segment_number, bitrates, segment_download_time, current_bitrate, dash_player.buffer.qsize(), segment_size) current_bitrate = basic_dash.basic_dash( segment_number, bitrates, segment_download_time, current_bitrate, dash_player.buffer.__len__(), segment_size) #MZ with open('vlc-dash-chosen-rate.txt', 'a') as vlc: vlc.write(str(current_bitrate) + '\n') # if dash_player.buffer.qsize() >= (config_dash.NETFLIX_BUFFER_SIZE): if dash_player.buffer.__len__() >= ( config_dash.NETFLIX_BUFFER_SIZE): #MZ delay = 1 else: delay = 0 if segment_number < len( dp_list) - 1 + dp_object.video[bitrate].start: try: if segment_size and segment_download_time: segment_download_rate = segment_size / segment_download_time else: segment_download_rate = 0 except IndexError as e: config_dash.LOG.error(e) if not os.path.exists(download_log_file): header_row = "EpochTime, CurrentBufferSize, Bitrate, DownloadRate".split( ",") stats = (timeit.default_timer() - start_dload_time, str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate) else: header_row = None stats = (timeit.default_timer() - start_dload_time, str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate) str_stats = [str(i) for i in stats] with open(download_log_file, "ab") as log_file_handle: result_writer = csv.writer(log_file_handle, delimiter=",") if header_row: result_writer.writerow(header_row) result_writer.writerow(str_stats) elif playback_type.upper() == "EMPIRICAL": buffer_upper = config_dash.NETFLIX_BUFFER_SIZE * BUFFER_THRESHOLD_UPPER buffer_lower = config_dash.NETFLIX_BUFFER_SIZE * BUFFER_THRESHOLD_LOWER #segment_sizes_test = get_segment_sizes(dp_object,segment_number) #print "================" #print segment_sizes_test #print segment_number #print "================" if segment_size == 0: curr_rate = 0 else: curr_rate = (segment_size * 8) / segment_download_time #segment_dl_rates.append(curr_rate) average_segment_sizes = get_average_segment_sizes(dp_object) dl_rate_history.append(curr_rate) #print "-----------!!!!!!!!" #print dl_rate_history #print "!!!!!!!------------" if len(dl_rate_history) > 10: dl_rate_history.pop(0) # current_bitrate = empirical_dash.empirical_dash(average_segment_sizes, segment_number, bitrates, segment_download_time, current_bitrate, dash_player.buffer.qsize(), segment_size, get_segment_sizes(dp_object,segment_number-2), video_segment_duration, dl_rate_history, bitrate_history, segment_w_chunks, DOWNLOAD_CHUNK) emp_func_time = timeit.default_timer() current_bitrate = empirical_dash.empirical_dash( average_segment_sizes, segment_number, bitrates, segment_download_time, current_bitrate, dash_player.buffer.__len__(), segment_size, get_segment_sizes(dp_object, segment_number - 2), video_segment_duration, dl_rate_history, bitrate_history, segment_w_chunks, DOWNLOAD_CHUNK) #MZ with open("/dev/SQUAD/http1_seg_time", 'a') as seg_time: seg_time.write("{},{}\n".format( segment_number, timeit.default_timer() - emp_func_time)) bitrates = [float(i) for i in bitrates] if len(segment_w_chunks) > 10: #segment_w_chunks = numpy.delete(segment_w_chunks, (0), axis=0) segment_w_chunks.pop(0) print("deleted elements!") #print bitrates # if dash_player.buffer.qsize() >= buffer_upper and segment_number > 10: if segment_number > 10: if current_bitrate <= bitrate_history[ -1] or dash_player.buffer.__len__() < buffer_lower: print( "current_bitrate <= bitrate_history[-1] or dash_player.buffer.__len__() < buffer_lower" ) if dash_player.buffer.__len__() >= buffer_upper: #MZ with open('empirical-buffer-holder.txt', 'a') as buh: buh.write( str(segment_number) + '\t' + '1' + '\n') if bitrate_holder == 1: #print "bitrate holder: ON" #print "bitrate_history[-1]: " + str(bitrate_history[-1]) #print bitrate_history current_bitrate = bitrate_history[-1] elif len(bitrates ) > 1 and current_bitrate < bitrates[int( bitrates.index(bitrate_history[-1]) - 2)]: #print "current_rate! : " + str(current_bitrate) #current_bitrate = bitrate_history[-1] next_bitrate = int( round(bitrate_history[-1] + current_bitrate) / 2) current_bitrate = min( bitrates, key=lambda x: abs(x - next_bitrate)) #next_q_layer = int(round((bitrates.index(bitrate_history[-1]) + bitrates.index(current_bitrate)) / 2)) #print "next_q_layer! : " + str(next_q_layer) #current_bitrate = bitrates[next_q_layer] #print "changed current_rate! : " + str(current_bitrate) bitrate_holder = 1 #elif (current_bitrate > bitrates[int(bitrates.index(bitrate_history[-1]) - 2)]) and (current_bitrate < bitrates[int(bitrates.index(bitrate_history[-1]))]): elif len(bitrates) > 1 and ( current_bitrate >= bitrates[int( bitrates.index(bitrate_history[-1]) - 2)]) and (current_bitrate < bitrate_history[-1]): #print "holding bitrate!" #print bitrate_history current_bitrate = bitrate_history[-1] elif bitrate_holder == 0 and current_bitrate < bitrates[ -1] and (current_bitrate == bitrates[int( bitrates.index(bitrate_history[-1]) + 1)]): current_bitrate = bitrate_history[-1] elif bitrate_holder == 0: print("go ahead!") elif bitrate_holder == 1 and dash_player.buffer.__len__( ) >= buffer_lower: #print "bitrate holder: ON; buffer > lower_bound" #print bitrate_history #print "bitrate_history[-1]: " + str(bitrate_history[-1]) current_bitrate = bitrate_history[-1] if dash_player.buffer.__len__() < buffer_lower: #print "buffer < lower" #print dash_player.buffer.__len__() #print buffer_lower bitrate_holder = 0 if current_bitrate != bitrates[-1] and bitrate_history[ -1] < bitrates[-1] and ( current_bitrate == bitrates[int( bitrates.index(bitrate_history[-1]) + 1)]): current_bitrate = bitrate_history[-1] elif current_bitrate > bitrate_history[ -1] and dash_player.buffer.__len__( ) >= buffer_upper: #print "current_bitrate > bitrate_history[-1] and dash_player.buffer.__len__() >= buffer_lower" #print current_bitrate #print bitrate_history[-1] #print buffer_lower #print "current_bitrate > bitrate_history[-1] and dash_player.buffer.__len__() >= buffer_lower" bitrate_holder = 0 #if (bitrates.index(current_bitrate) - bitrates.index(bitrate_history[-1])) <= 2 and (bitrates.index(current_bitrate) - bitrates.index(bitrate_history[-1])) >= 0: # current_bitrate = bitrate_history[-1] #current_bitrate = bitrates[bitrates.index(bitrate_history[-1])/2] print("---------------current_bitrate: " + str(current_bitrate)) bitrate_actual_time = timeit.default_timer( ) - init_dl_start_time # if dash_player.buffer.qsize() >= (config_dash.NETFLIX_BUFFER_SIZE): if segment_size and segment_download_time: segment_download_rate = segment_size / segment_download_time #with open('/dev/SQUAD/http2_read_seg_size_rate.txt', 'a') as rate_f: #rate_f.write(str(segment_size)+'\t'+str(segment_download_rate)+'\n') else: segment_download_rate = 0 RETRANS_OFFSET = False #original_segment_number = segment_number if segment_number > 10 and retrans: print('++++++++++++++++++++++++++') print(dash_player.buffer.__len__()) print(RETRANS_THRESHOLD_UPPER * config_dash.NETFLIX_BUFFER_SIZE) print(RETRANSMISSION_SWITCH) print('++++++++++++++++++++++++++') if dash_player.buffer.__len__() >= ( RETRANS_THRESHOLD_UPPER * config_dash.NETFLIX_BUFFER_SIZE ) or RETRANSMISSION_SWITCH == True: with open('empirical-retrans.txt', 'a') as retrans: retrans.write( str(segment_number) + '\t' + '2' + '\n') with open('empirical-debug.txt', 'a') as emp: emp.write("!!!!!!!RETRANSMISSION!!!!!!!!" + '\n') print("RETRANSMISSION_SWITCH = True !") RETRANSMISSION_SWITCH = True original_segment_number = segment_number original_current_bitrate = current_bitrate current_bitrate, segment_number = retransmission.retransmission( dp_object, current_bitrate, segment_number, dash_player.buffer, bitrates, segment_download_rate, config_dash.NETFLIX_BUFFER_SIZE, video_segment_duration) if dash_player.buffer.__len__() < ( RETRANS_THRESHOLD_LOWER * config_dash.NETFLIX_BUFFER_SIZE): RETRANSMISSION_SWITCH = False #dl_rate based retransmission: #if segment_number != original_segment_number and (curr_rate - current_bitrate >= original_current_bitrate): if segment_number != original_segment_number: retransmission_delay_switch = True seg_num_offset = -(original_segment_number - segment_number + 1) bitrate_history.pop(seg_num_offset) bitrate_history.insert(seg_num_offset, current_bitrate) RETRANS_OFFSET = True retransmission_delay += 1 with open('empirical-debug.txt', 'a') as emp: #for item in bitrate_history: # emp.write("%s " % item) emp.write('\n' + str(segment_number) + '\t' + str(bitrate_actual_time) + '\t' + str(current_bitrate) + '\t' + str(dash_player.buffer.__len__()) + 'retr' + '\n') #print "###########" with open('empirical-dash-chosen-rate.txt', 'a') as emp: emp.write( str(segment_number) + '\t' + str(bitrate_actual_time) + '\t' + str(segment_download_rate * 8) + '\t' + str(current_bitrate) + '\t' + str(dash_player.buffer.__len__()) + '\n') if dash_player.buffer.__len__() >= ( config_dash.NETFLIX_BUFFER_SIZE): #MZ delay = 1 else: delay = 0 #print segment_number #print "==============" #print dp_object.video[current_bitrate] #print "==============" if RETRANS_OFFSET == False: bitrate_history.append(current_bitrate) #segment_number -= retransmission_delay #retransmission_delay_done = True print("-------------+++++++++++++") print(dp_list[segment][current_bitrate]) print(urllib.parse.urljoin(domain, segment_path)) print("-------------+++++++++++++") if not os.path.exists(download_log_file): header_row = "EpochTime, CurrentBufferSize, Bitrate, DownloadRate, SegmentNumber,SegmentSize,SegmentDownloadTime".split( ",") stats = (timeit.default_timer() - start_dload_time, str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate, segment_number, segment_size, segment_download_time) else: header_row = None stats = (timeit.default_timer() - start_dload_time, str(dash_player.buffer.__len__()), current_bitrate, segment_download_rate, segment_number, segment_size, segment_download_time) str_stats = [str(i) for i in stats] with open(download_log_file, "a") as log_file_handle: result_writer = csv.writer(log_file_handle, delimiter=",") if header_row: result_writer.writerow(header_row) result_writer.writerow(str_stats) segment_path = dp_list[segment][current_bitrate] segment_url = urllib.parse.urljoin(domain, segment_path) #print "+++++++++++++" #print segment_path #print segment_url #print dp_list[segment] #print "+++++++++++++" config_dash.LOG.info("{}: Segment URL = {}".format( playback_type.upper(), segment_url)) if delay: delay_start = time.time() config_dash.LOG.info("SLEEPING for {}seconds ".format( delay * segment_duration)) while time.time() - delay_start < (delay * segment_duration): time.sleep(1) delay = 0 config_dash.LOG.debug("SLEPT for {}seconds ".format(time.time() - delay_start)) start_time = timeit.default_timer() try: config_dash.LOG.info("{}: Started downloading segment {}".format( playback_type.upper(), segment_url)) segment_size, segment_filename, segment_w_chunks = download_segment( segment_url, file_identifier) config_dash.LOG.info("{}: Finished Downloaded segment {}".format( playback_type.upper(), segment_url)) except IOError as e: config_dash.LOG.error("Unable to save segment: {0}".format( e.filename)) return None segment_download_time = timeit.default_timer() - start_time segment_download_rate = segment_size / segment_download_time #with open('/dev/SQUAD/tst_hyper_http1_read_mod_chunk_seg_time_rate.txt', 'a') as rate_f: # rate_f.write(str(segment_size)+'\t'+str(segment_download_time)+'\t'+str(segment_download_rate*8)+'\n') previous_segment_times.append(segment_download_time) recent_download_sizes.append(segment_size) # Updating the JSON information segment_name = os.path.split(segment_url)[1] if "segment_info" not in config_dash.JSON_HANDLE: config_dash.JSON_HANDLE["segment_info"] = list() config_dash.JSON_HANDLE["segment_info"].append( (segment_name, current_bitrate, segment_size, segment_download_time)) total_downloaded += segment_size config_dash.LOG.info( "{} : The total downloaded = {}, segment_size = {}, segment_number = {}" .format(playback_type.upper(), total_downloaded, segment_size, segment_number)) if playback_type.upper() == "SMART" and weighted_mean_object: weighted_mean_object.update_weighted_mean(segment_size, segment_download_time) segment_info = { 'playback_length': video_segment_duration, 'size': segment_size, 'bitrate': current_bitrate, 'data': segment_filename, 'URI': segment_url, 'segment_number': segment_number, 'segment_layer': bitrates.index(current_bitrate) } segment_duration = segment_info['playback_length'] dash_player.write(segment_info) segment_files.append(segment_filename) segment_number += 1 if retransmission_delay_switch == True: segment_number = original_segment_number #if segment_number > 10: # if original_segment_number != segment_number: # print "!!!!!!!!! not equal !!!!!!!!!!!!" # print "segment_number " + str(segment_number) # print "original segment number : " + str(original_segment_number) # retransmission_delay_switch = True # #original_segment_number += 1 config_dash.LOG.info( "Download info: segment URL: %s. Size = %s in %s seconds" % (segment_url, segment_size, str(segment_download_time))) if previous_bitrate: if previous_bitrate < current_bitrate: config_dash.JSON_HANDLE['playback_info']['up_shifts'] += 1 elif previous_bitrate > current_bitrate: config_dash.JSON_HANDLE['playback_info']['down_shifts'] += 1 previous_bitrate = current_bitrate # waiting for the player to finish playing while dash_player.playback_state not in dash_buffer.EXIT_STATES: time.sleep(1) write_json() if not download: clean_files(file_identifier)