def simple_test(raid_level, test_recovery=True): """a simple test function""" init_logger() raid = raid_level(4) data_fname = 'good.dat' original_content = 'good_morning_sir' # original_content = b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11\x12\x13' size = len(original_content) raid.write(original_content, data_fname) raid_content = raid.read(data_fname, size) print(raid_content.__repr__()) assert raid_content == original_content if test_recovery: error_index = 2 raid.recover(data_fname, error_index)
get_logger().warning("testing recover_d_p") error_indexes = [0, r6.N - 2] size = SIZE / (r6.N - 2) _corrupt2(data_fname, error_indexes, size) r6.recover_d_p(data_fname, error_indexes[0]) r6.detect_corruption(data_fname) def test_from_content(r6): get_logger().warning("testing from content") original_content = b'good_morning\x03_sir_yes_great\x01\x02' # original_content = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11\x12\x13' data_fname = 'my.dat' # r6.write(original_content, data_fname) # r6.recover_d_or_p(data_fname, error_index) # r6.recover_d_p(data_fname, 1) # r6.recover_2d(data_fname, 0, 1) # r6_content = r6.read(data_fname, len(original_content)) # assert r6_content == original_content r6.detect_corruption(data_fname) if __name__ == '__main__': # utils.simple_test(RAID6, False) init_logger() r6 = RAID6(10) # test_from_data_file(r6) # test_from_content(r6) data_name = "doge.png" r6.recover_d_p(data_name, 3)
def starter(): """ a starter function for random data generation :return: """ if not os.path.isdir(config.root): os.mkdir(config.root) gen_rnd_file('data1', SIZE, 'text') gen_rnd_file('data2', SIZE, 'bin') SIZE = 3276800 N_DISK = 8 if __name__ == '__main__': init_logger() starter() # for fname in ['data1', 'data2']: for fname in ['doge.png']: fpath = os.path.join(config.root, fname) with open(fpath, 'rb') as fh: content = fh.read() for raid_type in [RAID4, RAID5, RAID6]: raid = raid_type(10) start_time = time.time() raid.write(content, fname) print("{:10.4f}s during 'write' for raid={} against data={}, size={}".format( time.time() - start_time, raid.__class__.__name__, fname, SIZE)) size = len(content) start_time = time.time() content_raid = raid.read(fname, size)
for l in gzip_data: edges = [int(e) for e in l.split()] assert len(edges) == 2 G.add_edge(*edges) with open(pickle_file, "wb") as pickle_data: pickle.dump(G, pickle_data) return cls(G) @classmethod def from_ego(cls, name, ego_list): if utils.graph_directness[name]: G = nx.DiGraph() else: G = nx.Graph() for ego_id in ego_list: ego_graph = ego.Ego(name, ego_id) ego_graph.graph_generator() graph = ego_graph.graph G.add_nodes_from(graph.nodes()) G.add_edges_from(graph.edges()) return cls(G) if __name__ == "__main__": log_helper.init_logger() name = "facebook" ego_list = utils.collect_ego_list(name) gzip_fname = utils.get_gzip_fname(name) social = Network.from_combined(name, gzip_fname) g_info = graph_info.GraphInfo(social.graph)
summary_pattern = """------------ Theme: {} Total tests count: {} Passed tests count: {} Failed tests count: {} Passed: {} % """ email_footer = """ Python-generated email with the CI test results spreadsheet. If you want to unsubscribe, please, click |HERE| or just email to [email protected]. Happy {}! """.format(datetime.today().strftime('%A')) logger = init_logger() def create_brief_summary_for_theme(path_to_theme, total_trx, failed_trx): try: theme = path_to_theme.split('\\')[-4] logger.info("Started brief summary creation for {}".format(theme)) passed_trx = total_trx - failed_trx if total_trx: passed_percent = "%.2f" % ((passed_trx / total_trx) * 100) parser.brief_summary.append( [theme, total_trx, passed_trx, failed_trx, passed_percent]) else: logger.warning( "Found {} test results for theme {}. Summary wasn't created".