Example #1
0
    def log(self):
        mean_download_rate = stats.avg(self.download_rates)
        std_download_rate = stats.std(self.download_rates)

        mean_upload_rate = stats.avg(self.upload_rates)
        std_upload_rate = stats.std(self.upload_rates)

        logger.log("--*--Torrent statistics--*--")
        logger.log("Download rate (KiB/s) - mean: %f" % mean_download_rate)
        logger.log("Download rate (KiB/s) - standard deviation: %f" % std_download_rate)
        logger.log("Upload rate (KiB/s) - mean: %f" % mean_upload_rate)
        logger.log("Upload rate (KiB/s) - standard deviation: %f" % std_upload_rate)

        logger.log_to_file("download_rate_mean, %f\r\n" % mean_download_rate)
        logger.log_to_file("download_rate_stdev, %f\r\n" % std_download_rate)
        logger.log_to_file("upload_rate_mean, %f\r\n" % mean_upload_rate)
        logger.log_to_file("upload_rate_stdev, %f\r\n" % std_upload_rate)

        if self.download_finished:
            logger.log("Download time (s): %d" % self.download_time)
            logger.log_to_file("download_time, %d\r\n" % self.download_time)
        else:
            logger.log_to_file("download_time, %d\r\n" % -1)

        self.buffer_manager.log()
Example #2
0
def main():
    opts = parse_args(sys.argv)

    for (root, dirnames, filenames) in os.walk(opts['data_dir']):
        filenames = filter(lambda name: name.endswith('.dat'), filenames)
        break

    data = [parse_file(opts['data_dir'] + '/' + filename) for filename in filenames]
    delta_t1 = [float(datum['end'] - datum['start'])/(opts['comp_mhz']*1000000)
                for datum in data]
    delta_t2 = [float(datum['register'] - datum['start'])/(opts['comp_mhz']*1000000)
                for datum in data]

    result = {}
    result['qemu_delta_t'] = avg(delta_t1)
    result['qemu_delta_t_stddev'] = stddev(delta_t1)
    result['total_delta_t'] = avg(delta_t2)
    result['total_delta_t_sttdev'] = stddev(delta_t2)
    result['hostname'] = socket.gethostname()
    result['comp_mhz'] = opts['comp_mhz']
    result['override_clean_check'] = True
    result['git_rev'] = get_git_rev(result['override_clean_check'])

    result_filename = opts['data_dir'] + '/' + 'summary'
    f = open(result_filename,'w+')
    json.dump(result, f, indent=4)
    f.write("\n")
    f.close()

    print('Wrote summary to %s' % result_filename)
Example #3
0
def main():
    global sample_cutoff, gpu_sample_cutoff, sample_cutoff_last

    opts = parse_args(sys.argv)
    sample_cutoff = opts.get('sample_cutoff', sample_cutoff)
    sample_cutoff_last = opts.get('sample_cutoff_last', sample_cutoff_last)

    num_runs = 1
    while os.path.exists(sample_filename(opts['data_dir'], num_runs)):
        num_runs += 1
    num_runs -= 1

    runs = []
    for i in range(1,num_runs+1):
        samples_file = sample_filename(opts['data_dir'], i)
        samples = parse_mpstat(samples_file)
        gpu_samples_file = gpu_sample_filename(opts['data_dir'], i)
        gpu_samples = parse_gpu_samples(gpu_samples_file)

        print(samples)

        run = {}
        run['avg'] = avg(samples)
        run['stddev'] = stddev(samples)
        if gpu_samples:
            run['avg_frames'] = avg(gpu_samples)
            run['stddev_frames'] = stddev(gpu_samples)
        runs.append(run)

    result = {}
    result['sample_cutoff'] = sample_cutoff
    result['sample_cutoff_last'] = sample_cutoff_last
    result['gpu_sample_cutoff'] = gpu_sample_cutoff
    result['git_rev'] = get_git_rev(override_clean_check=True)
    result['override_clean_check'] = True
    result['runs'] = runs
    result['enc'] = runs[0].has_key('avg_frames')

    run_avgs = [run['avg'] for run in runs]
    run_stddevs = [run['stddev'] for run in runs]
    result['run_avg'] = avg(run_avgs)
    result['run_stddev'] = stddev(run_avgs)
    result['run_var'] = max(run_stddevs)

    if result['enc']:
        run_avg_frames = [run['avg_frames'] for run in runs]
        run_stddev_frames = [run['stddev_frames'] for run in runs]
        result['run_avg_frames'] = avg(run_avg_frames)
        result['run_stddev_frames'] = stddev(run_avg_frames)
        result['run_var_frames'] = max(run_stddev_frames)

    result_filename = opts['data_dir'] + '/' + 'summary'
    f = open(result_filename, 'w+')
    json.dump(result, f, indent=4)
    f.write('\n')
    f.close()

    print('Wrote summary to %s' % result_filename)
Example #4
0
def centroid(*points):
    """Calculate the centroid point of a points set in a 2-dimensional space"""
    cx = stats.avg()
    cy = stats.avg()
    cx.next()
    cy.next()
    for x, y in points:
        x = float(x)
        y = float(y)
        cx.send(x)
        cy.send(y)
    return cx.next(), cy.next()
Example #5
0
def centroid(*points):
    """Calculate the centroid point of a points set in a 2-dimensional space"""
    cx = stats.avg()
    cy = stats.avg()
    cx.next()
    cy.next()
    for x, y in points:
        x = float(x)
        y = float(y)
        cx.send(x)
        cy.send(y)
    return cx.next(), cy.next()
Example #6
0
    def log(self):
        interruptions = len(self.buffering_time) - 1

        # Checking if player is on initial buffering state
        if interruptions > 0 or not self.is_buffering:
            initial_wait = self.buffering_time[0]
        else:
            initial_wait = -1

        # Removing invalid samples
        buffering_time = self.buffering_time[1:]
        if self.is_buffering:
            buffering_time = buffering_time[:-1]

        # Calculating statistics
        mean_time = stats.avg(buffering_time)
        std_time = stats.std(buffering_time)

        # Logging
        logger.log("--*--Buffer statistics--*--")
        logger.log("Time to start playback (s): %d" % initial_wait)
        logger.log("Number of interruptions: %d" % interruptions)
        logger.log("Interruption time (s) - mean: %f" % mean_time)
        logger.log("Interruption time (s) - standard deviation: %f" % std_time)
        logger.log("Interruptions (s): %r" % buffering_time)

        logger.log_to_file("playback_start_time, %d\r\n" % initial_wait)
        logger.log_to_file("interruptions, %d\r\n" % interruptions)
        logger.log_to_file("interruption_time_mean, %f\r\n" % mean_time)
        logger.log_to_file("interruption_time_stdev, %f\r\n" % std_time)
    def log(self):
        interruptions = len(self.buffering_time) - 1

        # Checking if player is on initial buffering state
        if interruptions > 0 or not self.is_buffering:
            initial_wait = self.buffering_time[0]
        else:
            initial_wait = -1

        # Removing invalid samples
        buffering_time = self.buffering_time[1:]
        if self.is_buffering:
            buffering_time = buffering_time[:-1]

        # Calculating statistics
        mean_time = stats.avg(buffering_time)
        std_time = stats.std(buffering_time)

        # Logging
        logger.log("--*--Buffer statistics--*--")
        logger.log("Time to start playback (s): %d" % initial_wait)
        logger.log("Number of interruptions: %d" % interruptions)
        logger.log("Interruption time (s) - mean: %f" % mean_time)
        logger.log("Interruption time (s) - standard deviation: %f" % std_time)
        logger.log("Interruptions (s): %r" % buffering_time)

        logger.log_to_file("playback_start_time, %d\r\n" % initial_wait)
        logger.log_to_file("interruptions, %d\r\n" % interruptions)
        logger.log_to_file("interruption_time_mean, %f\r\n" % mean_time)
        logger.log_to_file("interruption_time_stdev, %f\r\n" % std_time)
Example #8
0
def write_travel_times(fileName="output.csv"):
	outputFile = open(fileName, "w")
	result = get_rides_fastest()
	print "**** Writing results to file ****"
	length = len(result.keys())
	for i, key in enumerate(result.keys()):
		outputFile.write(str(key[0]) + ";" + str(key[1]) + ",")
		outputFile.write("%.02f" % avg(result[key]) + "," + "%.02f" % stddev(result[key]) + "," + "%.02f" % stderr(result[key]) + "," + str(len(result[key])))
		# outputFile.write(",")
		# outputFile.write(",".join([str(element) for element in result[key]]))
		outputFile.write("\n")
		if i % 400000 == 0:
			progress = 100.0*i/length
			if progress > 105:
				break
			if i > 0:
				sys.stdout.write('\r')
			sys.stdout.write("Progress: " + "%.01f" % progress + "%" + " completed.")
			sys.stdout.flush()
	sys.stdout.write("\rProgress: " + "%.01f" % 100.0 + "%" + " completed.\n")
	sys.stdout.flush()
	outputFile.close()
	return None
				short_list.append(short_corr)
			
				print ticker + " - Processed " + str((i / float(len(earnings_list))) * 100) + '%'
			else:
				print "Warning: could not get historical prices for ticker: " + ticker
				earnings_list.remove(earnings_info)
			
			i += 1	
		
		corr_change = stats.minus(short_list, long_list)
		# corr_change = [math.fabs(a) for a in corr_change]
		
		print len(corr_change)
		print len(earnings_list)
		
		change_avg = stats.avg(corr_change)	
		change_sigma = stats.sigma(corr_change)
		
		price_changes = []
			
		for i in range(0, len(earnings_list)):
			earnings_list[i].append(long_list[i])
			earnings_list[i].append(short_list[i])
			
			prev_date = yfDate(getPrevWeekday(earnings_date))
			next_date = yfDate(getNextWeekday(earnings_date))
			
			ticker = earnings_list[i][1]
			
			before_quote = yf.get_historical_prices(ticker, prev_date, prev_date)
			before_quote = yfutils.get_open_as_float(before_quote)