def analyze(): # gen = scripts.load_template_ascii_gen_from_file('mx.tl') # fields, _ = scripts.init_correct_field_by_ascii_gen(gen) # field = fields[0] # with T(): # covers_ret = pickle_cover.to_cover(field, max_spear=18, max_answer=20000) # covers = [cov[1] for cov in covers_ret] # # with T(): # with open(conf['field'], 'w') as f: # f.write(field.take_json()) # pickle_cover.dump(covers, conf['covers']) # with T('alternative_path_lens'): # apl = map(scripts.alternative_path_lens, scripts.covered(t_field, covers)) # pickle_cover.dump(apl, conf['apl']) # with T('ans_len'): # ans_len = map(len, itertools.imap(solver.solve_one, scripts.covered(t_field, covers))) # pickle_cover.dump(ans_len, conf['ans_len']) # with T('walked_spear'): # sc = scripts # isec = lambda f: set(solver.solve_one(c)) & set(c.get_spear_coords()) # walked_spear = [len(isec(c)) for c in sc.covered(t_field, covers)] # pickle_cover.dump(walked_spear, conf['walked_spear']) # with T('field_min_max'): # lens = map(len, solver.solve(t_field)) # field_min_max = min(lens), max(lens) # pickle_cover.dump(field_min_max, conf['field_min_max']) with T('Load'): t_field = Field.load_by_json(conf['field']) covers = pickle_cover.load(conf['covers']) apl = pickle_cover.load(conf['apl']) ans_len = pickle_cover.load(conf['ans_len']) walked_spear = pickle_cover.load(conf['walked_spear']) field_min_max = pickle_cover.load(conf['field_min_max']) with T('work'): dp = DataProcessing(t_field, covers) dp.solver dp.alternative_path_len """
def dump_current_field_covers(path_, i_): def unique_append(list_, item): if item not in list_: list_.append(item) return list_.index(item) pr_periods = list() pr_cells = list() pr_paths = list() pr_covers = list() pr_cover_paths = list() base = Field.load_by_json('%s/base_%i.json' % (path_, i_)) with T(): path_and_spears_group = pickle_cover.to_cover(base, max_spear=20, max_answer=50000) for path, spears in path_and_spears_group: new_cover = list() for spear in spears: cell, period = spear[:2], spear[2:] cell_id = unique_append(pr_cells, cell) period_id = unique_append(pr_periods, period) new_cover.append((cell_id, period_id)) pr_covers.append(tuple(new_cover)) new_path = list() for cell in path[0]: cell_id = unique_append(pr_cells, cell) new_path.append(cell_id) path_id = unique_append(pr_paths, tuple(new_path)) pr_cover_paths.append(path_id) dump_dict = dict() dump_dict['base'] = base.take_json() dump_dict['periods'] = pr_periods dump_dict['cells'] = pr_cells dump_dict['paths'] = pr_paths dump_dict['covers'] = pr_covers dump_dict['cover_paths'] = pr_cover_paths with open('%s/covers/case_%i.dump' % (path_, i_), 'w') as f: cPickle.dump(dump_dict, f)
def show_count_paths(path_, i_): base = Field.load_by_json('/home/anosov/data/hard_base/base_%i.json' % (i_, )) with T(): res = solver.solve(base) print len(res)