def get_qualities(layer): qualities = [] with open( os.path.join('data', FOLDER_NAME, 'quality', FILENAME.format(layer))) as f: psnrs = [] ssims = [] next(f) next(f) reader = csv.reader(f, delimiter='\t') for row in reader: if row[0] == "Avg:": if not math.isclose(float(row[PSNR]), avg(psnrs), abs_tol=0.01): # abs_tol=1e-05): # print(float(row[PSNR]), avg(psnrs)) raise Exception( "Wrong PSNR average for layer {}".format(layer)) if not math.isclose( float(row[SSIM]), avg(ssims), abs_tol=1e-05): raise Exception( "Wrong SSIM average for layer {}".format(layer)) elif row[0] not in IGNORE_LINE_NAMES: psnrs.append(float(row[PSNR])) ssims.append(float(row[SSIM])) for i in range(0, len(psnrs), FRAMES_IN_SEG): psnr_subset = psnrs[i:i + FRAMES_IN_SEG] ssim_subset = ssims[i:i + FRAMES_IN_SEG] qualities.append(Quality(psnr_subset, ssim_subset)) return qualities
def run(G, k, iterations=5): total_average = 0.0 max_average = 0.0 min_average = float('inf') average_query_time = 0.0 average_dijkstra_time = 0.0 with open(f'results/{k}_{G.name[:-9]}.log', 'w') as output: print(f'Running algorithm on {G.name}, k={k}', file=output) print(f'Nodes: {len(G)}, Edges: {len(G.edges)}', file=output) # draw_graph.draw(G) # how to draw the graph with it's weights algo = ApproximateDistanceOracles(G, k=k) time = {} timeit(algo.pre_processing, output=time)() print('Pre-processing time:', time['pre_processing'] / 1000, file=output) print('Running algorithm', file=output) for i in range(iterations): # Iterating each node and its shortest paths distances start = datetime.now() for source_node, dijkstra_distances in nx.all_pairs_dijkstra_path_length( G): average_dijkstra_time += (datetime.now() - start).total_seconds() # Querying & timing our algorithm times = {} algo_distances = [ timeit(algo.compute_distance, log_name=f'{source_node, target_node}', output=times)(source_node, target_node) for target_node in G ] # Comparing result node_stretch = average_difference(algo_distances, dijkstra_distances.values()) min_average = min(min_average, node_stretch) max_average = max(max_average, node_stretch) total_average += node_stretch average_query_time += avg(times.values()) start = datetime.now() d = len(G) * iterations total_average /= d average_query_time /= d average_dijkstra_time /= d print(f'Total average stretch: {total_average}', f'Average query time: {average_query_time}', f'Average dijkstra time: {average_dijkstra_time}', f'Max stretch value: {max_average}', f'Min stretch value: {min_average}', sep='\n', file=output)
def run_timing(read_end, write_end): dts = [] res1 = Channel() res2 = Channel() for i in range(100): N = 1000 print(f" Run {i}:", end="") run_CSP(writer_timed(N, write_end, res1.write), reader_timed(N, read_end, res2.write), get_res(N, res1.read, res2.read, dts)) print(" -- min {:.3f} avg {:.3f} max {:.3f} ".format( min(dts), avg(dts), max(dts)))
def day_highest_osd_avg(self, day, action): day_highest = {} day_actions = self.day_osd_actions(day, action) if day_actions: for osd in day_actions: a = avg([s['end'] - s['start'] for s in day_actions[osd]]) if not day_highest: day_highest = {'osd': osd, 'avg': a} elif day_highest['avg'] < a: day_highest = {'osd': osd, 'avg': a} return day_highest
def run_timing(read_end, write_end): dts = [] for i in range(100): N = 1000 print(f" Run {i}:", end="") # t1 = time.time() Parallel(writer_timed(N, write_end), reader_timed(N, read_end)) # t2 = time.time() dt_ms = (t2 - t1) * 1000 dt_op_us = (dt_ms / N) * 1000 print(f" DT = {dt_ms:8.3f} ms per op: {dt_op_us:8.3f} us") dts.append(dt_op_us) print(" -- min {:.3f} avg {:.3f} max {:.3f} ".format( min(dts), avg(dts), max(dts)))
for i in check_output([args.nw_distance, '-mp', '-si', args.sim]).split() ] sim_pen_bl = [ float(i) for i in check_output([args.nw_distance, '-mp', '-sf', args.sim]).split() ] sim_tot_bl = sim_int_bl + sim_pen_bl sim_root2tip = [ float(i) for i in check_output([args.nw_distance, '-mr', '-sf', args.sim]).split() ] # perform analyses print("Analysis\tReference Tree\tSimulated Tree\tTest Statistic\tp-value") print("Average Branch Length\t%g\t%g\tNA\tNA" % (avg(ref_tot_bl), avg(sim_tot_bl))) print("Standard Deviation Branch Length\t%g\t%g\tNA\tNA" % (std(ref_tot_bl), std(sim_tot_bl))) print("Kolmogorov-Smirnov Test Branch Length\tNA\tNA\t%g\t%g" % ks_2samp(ref_tot_bl, sim_tot_bl)) print("Average Internal Branch Length\t%g\t%g\tNA\tNA" % (avg(ref_int_bl), avg(sim_int_bl))) print("Standard Deviation Internal Branch Length\t%g\t%g\tNA\tNA" % (std(ref_int_bl), std(sim_int_bl))) print("Kolmogorov-Smirnov Test Internal Branch Length\tNA\tNA\t%g\t%g" % ks_2samp(ref_int_bl, sim_int_bl)) print("Average Terminal Branch Length\t%g\t%g\tNA\tNA" % (avg(ref_pen_bl), avg(sim_pen_bl))) print("Standard Deviation Terminal Branch Length\t%g\t%g\tNA\tNA" % (std(ref_pen_bl), std(sim_pen_bl))) print("Kolmogorov-Smirnov Test Terminal Branch Length\tNA\tNA\t%g\t%g" %
dEF = ds_v + ds_t + ds_b else: dIF = dS dEF = ds # should probably scale these with order? # alpha = Constant(4.0) # gamma = Constant(8.0) # ddx = CellSize(mesh) # ddx_avg = (ddx('+') + ddx('-'))/2 # penalty_int = alpha/ddx_avg # penalty_ext = gamma/ddx aV = inner(grad(u), grad(v)) * dx # volume term aIF = (inner(jump(u, n), jump(v, n)) * penalty_int - inner(avg(grad(u)), jump(v, n)) - inner(avg(grad(v)), jump(u, n))) * dIF # interior facet term aEF = (inner(u, v) * penalty_ext - inner(grad(u), v * n) - inner(grad(v), u * n)) * dEF # exterior facet term a = aV + aEF + aIF Rlhs = action(a, x) Rrhs = u * rhsexpr * dx # create solvers J = derivative(Rlhs - Rrhs, x) problem = NonlinearVariationalProblem(Rlhs - Rrhs, x, J=J, Jp=J, bcs=[]) problem._constant_jacobian = True solver = NonlinearVariationalSolver(problem, options_prefix='linsys_', solver_parameters={'snes_type': 'ksponly'})
# parse user args parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-r', '--ref', required=True, type=str, help="Reference Contact Network") parser.add_argument('-s', '--sim', required=True, type=str, help="Simulated Contact Network") args, unknown = parser.parse_known_args() assert isfile(args.ref), "ERROR: Invalid file: %s" % args.ref assert isfile(args.sim), "ERROR: Invalid file: %s" % args.sim # load data from contact networks ref_degrees = degrees(open(args.ref)) sim_degrees = degrees(open(args.sim)) # perform analyses print( "Analysis\tReference Contact Network\tSimulated Contact Network\tTest Statistic\tp-value" ) print("Average Degree\t%g\t%g\tNA\tNA" % (avg(ref_degrees), avg(sim_degrees))) print("Standard Deviation Degree\t%g\t%g\tNA\tNA" % (std(ref_degrees), std(sim_degrees))) print("Kolmogorov-Smirnov Test Degree\tNA\tNA\t%g\t%g" % ks_2samp(ref_degrees, sim_degrees))
def date_avg(self): for d in self.aggrs_by_date: a = avg([a[1] for a in self.aggrs_by_date[d]]) self.date_avgs.append((d, a))
collection.parse() osds = list(collection.osd_stats.keys()) osds = sorted(osds, key=lambda s: float(s.partition('.')[2])) print "Slow request stats for %s OSDs" % len(osds) print "Total slow requests: %s" % collection.total_slow_requests() for osd in osds: delays = [e[1] for e in collection.osd_stats[osd]['slow_requests']] m = min(delays) collection.keep_top_mins(osd, m) collection.osd_stats[osd]['min'] = m m = max(delays) collection.keep_top_maxs(osd, m) collection.osd_stats[osd]['max'] = m a = avg(delays) collection.keep_top_avgs(osd, a) collection.osd_stats[osd]['avg'] = a collection.aggregate(osd) collection.date_avg() collection.date_max() _collection = {} for a in collection.date_avgs: d = str(a[0].month) if d not in _collection: _collection[d] = [a[1]] else: _collection[d].append(a[1])