def generate_file_stats(in_dir_path, out_dir_path, G_name, req): G_path = os.path.join(os.path.abspath(in_dir_path), G_name) # remove vertices with empty preference list subprocess.run( ['sed', '-i', '-E', '/^[[:lower:][:upper:][:digit:]]+ : ;/d', G_path], check=True) # generate matchings that are needed matchings = {} for mdesc, cppopt, seadesc in (('H', '-h', sea.HRLQ_HHEURISTIC), ('S', '-s', sea.STABLE), ('P', '-p', sea.MAX_CARD_POPULAR), ('M', '-m', sea.POP_AMONG_MAX_CARD)): if mdesc in req: mpath = os.path.join(os.path.abspath(out_dir_path), '{}_{}'.format(mdesc, G_name)) subprocess.run([ os.path.join(CPPCODE_DIR, 'graphmatching'), '-A', cppopt, '-i', G_path, '-o', mpath ], check=True) matchings[seadesc] = sea.read_matching(mpath) # generate statistics for the files sea.generate_hr_tex(graph_parser.read_graph(G_path), matchings, out_dir_path, G_name)
def main(): # dirpath = '/home/amitrawt/Dropbox/projects/matching/tests+data/hand-crafted' dirpath = '/media/amitrawt/5073c06b-f306-4e3a-9e18-23d2c9921453/ms_project/MIN_BP/couples/Experiment3' # dirpath = '/home/amitrawt/Dropbox/SEA2017/dataset/HR/mahdian_prob_ml/n1_1000_n2_100_k_5' # dirpath = '/home/amitrawt/Dropbox/SEA2017/dataset/HR/mrandom/n1_1000_n2_100_k_5' # dirpath = '/home/amitrawt/Dropbox/projects/matching/n1_100_n2_10_k_5' # G = graph_parser.read_graph(os.path.join(dirpath, '1000_10_5_100_1.txt')) files = ['100-10-10-100-3-5-false-5-5-Iteration1.txt', '100-10-10-100-3-5-false-5-5-Iteration2.txt', '100-10-10-100-3-5-false-5-5-Iteration3.txt'] # files = ['1000_100_5_10_1.txt', '1000_100_5_10_2.txt', '1000_100_5_10_3.txt'] #files = ['100_10_5_10_1.txt', '100_10_5_10_2.txt', '100_10_5_10_3.txt'] for filename in files: filepath = os.path.join(dirpath, filename) G = graph_parser.read_graph(filepath) mat_h = rank_matrix(G, G.B, G.A) avg_h = avg_rank(G.B, mat_h) generate_line_plot('H', 'avg rank', range(0, len(avg_h)), [p for _, p in avg_h], 'H_{}.png'.format(filename)) mat_r = rank_matrix(G, G.A, G.B) avg_r = avg_rank(G.A, mat_r) generate_line_plot('R', 'avg rank', range(0, len(avg_r)), [p for _, p in avg_r], 'R_{}.png'.format(filename))
def main(): # dirpath = '/home/amitrawt/Dropbox/projects/matching/tests+data/hand-crafted' dirpath = '/media/amitrawt/5073c06b-f306-4e3a-9e18-23d2c9921453/ms_project/MIN_BP/couples/Experiment3' # dirpath = '/home/amitrawt/Dropbox/SEA2017/dataset/HR/mahdian_prob_ml/n1_1000_n2_100_k_5' # dirpath = '/home/amitrawt/Dropbox/SEA2017/dataset/HR/mrandom/n1_1000_n2_100_k_5' # dirpath = '/home/amitrawt/Dropbox/projects/matching/n1_100_n2_10_k_5' # G = graph_parser.read_graph(os.path.join(dirpath, '1000_10_5_100_1.txt')) files = [ '100-10-10-100-3-5-false-5-5-Iteration1.txt', '100-10-10-100-3-5-false-5-5-Iteration2.txt', '100-10-10-100-3-5-false-5-5-Iteration3.txt' ] # files = ['1000_100_5_10_1.txt', '1000_100_5_10_2.txt', '1000_100_5_10_3.txt'] #files = ['100_10_5_10_1.txt', '100_10_5_10_2.txt', '100_10_5_10_3.txt'] for filename in files: filepath = os.path.join(dirpath, filename) G = graph_parser.read_graph(filepath) mat_h = rank_matrix(G, G.B, G.A) avg_h = avg_rank(G.B, mat_h) generate_line_plot('H', 'avg rank', range(0, len(avg_h)), [p for _, p in avg_h], 'H_{}.png'.format(filename)) mat_r = rank_matrix(G, G.A, G.B) avg_r = avg_rank(G.A, mat_r) generate_line_plot('R', 'avg rank', range(0, len(avg_r)), [p for _, p in avg_r], 'R_{}.png'.format(filename))
def main(): parser = argparse.ArgumentParser(description='''Generate statistics in latex format given a bipartite graph and matchings''') parser.add_argument('-G', dest='G', help='Bipartite graph', required=True, metavar='') parser.add_argument('-S', dest='S', help='Stable matching in the graph', metavar='') parser.add_argument('-P', dest='P', help='Max-cardinality popular matching in the graph', metavar='') parser.add_argument('-M', dest='M', help='Popular among max-cardinality matchings in the graph', metavar='') parser.add_argument('-H', dest='H', help='Hospital proposing HRLQ heuristic in the graph', metavar='') parser.add_argument('-R', dest='R', help='Resident proposing HRLQ heuristic in the graph', metavar='') parser.add_argument('-O', dest='O', help='Directory where the statistics should be stored', metavar='') args = parser.parse_args() G, matchings = graph_parser.read_graph(args.G), {} for mdesc, mfile in ((STABLE, args.S), (MAX_CARD_POPULAR, args.P), (POP_AMONG_MAX_CARD, args.M), (HRLQ_HHEURISTIC, args.H), (HRLQ_RHEURISTIC, args.R)): if mfile is not None: M = read_matching(mfile) matchings[mdesc] = M # if not matching_utils.is_feasible(G, M): # raise Exception('{} matching is not feasible for the graph'.format(mdesc)) # print(args.H, matchings) if args.H: # generate heuristic tex file generate_heuristic_tex(G, matchings, args.O, os.path.basename(args.G)) else: # generate tex for M_s, M_p, and M_m generate_hr_tex(G, matchings, args.O, os.path.basename(args.G))
def main(): dirpath = sys.argv[1] for entry in os.scandir(dirpath): if entry.name.startswith('1000_') and entry.name.endswith('txt'): G = graph_parser.read_graph(entry.path) count, lq_sum = lq_count(G) print(entry.name, '# lq: {}, lq sum: {}'.format(count, lq_sum))
def main(): import sys import graph_parser import matching_algos if len(sys.argv) < 2: print('usage: {} <graph file>'.format(sys.argv[0])) else: G = graph_parser.read_graph(sys.argv[1]) M1 = matching_algos.stable_matching_man_woman(copy_graph(G)) M2 = matching_algos.popular_matching_man_woman(copy_graph(G)) print(to_easy_format(G, M1), to_easy_format(G, M2), sep='\n')
def generate_stats(dirpath): for entry in os.scandir(dirpath): if entry.is_file(): if is_graph_file(entry): mpath, statpath = corr_matching_and_stats(entry) if os.path.isfile(mpath): M = sea.read_matching(mpath) G = graph_parser.read_graph(entry.path) M_ = envy_free_matching(G, M) print_matching_stats(G, M_, statpath) elif entry.is_dir(): generate_stats(entry.path)
def main(): if len(sys.argv) < 4: print('usage: {} <graph-file> <stable-file> <popular-file>'.format(sys.argv[0])) else: import graph_parser, matching_stats gfile, sfile, pfile = sys.argv[1], sys.argv[2], sys.argv[3] G = graph_parser.read_graph(gfile) #M_stable = stable_matching_hospital_residents(G) #print(G, M_stable, sep='\n') M_stable = stable_matching_hospital_residents(graph.copy_graph(G)) M_popular = popular_matching_hospital_residents(graph.copy_graph(G)) matching_stats.print_matching(G, M_stable, sfile) matching_stats.print_matching(G, M_popular, pfile)
def main(): parser = argparse.ArgumentParser( description='''Generate statistics in latex format given a bipartite graph and matchings''' ) parser.add_argument('-G', dest='G', help='Bipartite graph', required=True, metavar='') parser.add_argument('-S', dest='S', help='Stable matching in the graph', metavar='') parser.add_argument('-P', dest='P', help='Max-cardinality popular matching in the graph', metavar='') parser.add_argument( '-M', dest='M', help='Popular among max-cardinality matchings in the graph', metavar='') parser.add_argument('-H', dest='H', help='Hospital proposing HRLQ heuristic in the graph', metavar='') parser.add_argument('-R', dest='R', help='Resident proposing HRLQ heuristic in the graph', metavar='') parser.add_argument('-O', dest='O', help='Directory where the statistics should be stored', metavar='') args = parser.parse_args() G, matchings = graph_parser.read_graph(args.G), {} for mdesc, mfile in ((STABLE, args.S), (MAX_CARD_POPULAR, args.P), (POP_AMONG_MAX_CARD, args.M), (HRLQ_HHEURISTIC, args.H), (HRLQ_RHEURISTIC, args.R)): if mfile is not None: M = read_matching(mfile) matchings[mdesc] = M # if not matching_utils.is_feasible(G, M): # raise Exception('{} matching is not feasible for the graph'.format(mdesc)) # print(args.H, matchings) if args.H: # generate heuristic tex file generate_heuristic_tex(G, matchings, args.O, os.path.basename(args.G)) else: # generate tex for M_s, M_p, and M_m generate_hr_tex(G, matchings, args.O, os.path.basename(args.G))
def main(): if len(sys.argv) < 4: print('usage: {} <graph-file> <stable-file> <popular-file>'.format( sys.argv[0])) else: import graph_parser, matching_stats gfile, sfile, pfile = sys.argv[1], sys.argv[2], sys.argv[3] G = graph_parser.read_graph(gfile) #M_stable = stable_matching_hospital_residents(G) #print(G, M_stable, sep='\n') M_stable = stable_matching_hospital_residents(graph.copy_graph(G)) M_popular = popular_matching_hospital_residents(graph.copy_graph(G)) matching_stats.print_matching(G, M_stable, sfile) matching_stats.print_matching(G, M_popular, pfile)
def generate_file_stats(in_dir_path, out_dir_path, G_name, req): G_path = os.path.join(os.path.abspath(in_dir_path), G_name) # remove vertices with empty preference list subprocess.run(['sed', '-i', '-E', '/^[[:lower:][:upper:][:digit:]]+ : ;/d', G_path], check=True) # generate matchings that are needed matchings = {} for mdesc, cppopt, seadesc in (('H', '-h', sea.HRLQ_HHEURISTIC), ('S', '-s', sea.STABLE), ('P', '-p', sea.MAX_CARD_POPULAR), ('M', '-m', sea.POP_AMONG_MAX_CARD)): if mdesc in req: mpath = os.path.join(os.path.abspath(out_dir_path), '{}_{}'.format(mdesc, G_name)) subprocess.run([os.path.join(CPPCODE_DIR, 'graphmatching'), '-A', cppopt, '-i', G_path, '-o', mpath], check=True) matchings[seadesc] = sea.read_matching(mpath) # generate statistics for the files sea.generate_hr_tex(graph_parser.read_graph(G_path), matchings, out_dir_path, G_name)
def main(): G = graph_parser.read_graph(sys.argv[1]) G1 = hreduction(graph.copy_graph(G)) print(graph.graph_to_UTF8_string(G1))
def main(): import sys if len(sys.argv) < 2: print('usage: {} <graph file>'.format(sys.argv[0])) else: G = graph_parser.read_graph(sys.argv[1])