def main(): if len(sys.argv) < 4 or len(sys.argv) > 7: print( "Usage: __name__ jobs url1 url2 [nr_cycles [working_dir [comments_file]]]" ) print( " Example: __name__ 4 http://127.0.0.1:8090 http://127.0.0.1:8091 [ 20 my_comments_data_dir [comments]]" ) print( " by default: nr_cycles = 3; set nr_cycles to 0 if you want to use all comments )" ) print(" set jobs to 0 if you want use all processors") print(" url1 is reference url for list_comments") exit() global wdir global errors global nr_cycles jobs = int(sys.argv[1]) if jobs <= 0: import multiprocessing jobs = multiprocessing.cpu_count() url1 = sys.argv[2] url2 = sys.argv[3] if len(sys.argv) > 4: nr_cycles = int(sys.argv[4]) if len(sys.argv) > 5: wdir = Path(sys.argv[5]) comments_file = sys.argv[6] if len(sys.argv) > 6 else "" if comments_file != "": try: with open(comments_file, "rt") as file: comments = file.readlines() except: exit("Cannot open file: " + comments_file) else: comments = list_comments(url1) length = len(comments) if length == 0: exit("There are no any comment!") create_wdir() print(str(length) + " comments") if jobs > length: jobs = length print("setup:") print(" jobs: {}".format(jobs)) print(" url1: {}".format(url1)) print(" url2: {}".format(url2)) print(" wdir: {}".format(wdir)) print(" comments_file: {}".format(comments_file)) if jobs > 1: first = 0 last = length comments_per_job = length // jobs with ProcessPoolExecutor(max_workers=jobs) as executor: for i in range(jobs - 1): future = executor.submit( compare_results, url1, url2, comments[first:first + comments_per_job]) future.add_done_callback(future_end_cb) first = first + comments_per_job future = executor.submit(compare_results, url1, url2, comments[first:last]) future.add_done_callback(future_end_cb) else: errors = (compare_results(url1, url2, comments) == False) exit(errors)
def main(): if len( sys.argv ) < 4 or len( sys.argv ) > 7: print( "Usage: __name__ jobs url1 url2 [nr_cycles [working_dir [comments_file]]]" ) print( " Example: __name__ 4 http://127.0.0.1:8090 http://127.0.0.1:8091 [ 20 my_comments_data_dir [comments]]" ) print( " by default: nr_cycles = 3; set nr_cycles to 0 if you want to use all comments )" ) print( " set jobs to 0 if you want use all processors" ) print( " url1 is reference url for list_comments" ) exit () global wdir global errors global nr_cycles jobs = int(sys.argv[1]) if jobs <= 0: import multiprocessing jobs = multiprocessing.cpu_count() url1 = sys.argv[2] url2 = sys.argv[3] if len( sys.argv ) > 4: nr_cycles = int( sys.argv[4] ) if len( sys.argv ) > 5: wdir = Path(sys.argv[5]) comments_file = sys.argv[6] if len( sys.argv ) > 6 else "" if comments_file != "": try: with open(comments_file, "rt") as file: comments = file.readlines() except: exit("Cannot open file: " + comments_file) else: comments = list_comments(url1) length = len(comments) if length == 0: exit("There are no any comment!") create_wdir() print( str(length) + " comments" ) if jobs > length: jobs = length print( "setup:" ) print( " jobs: {}".format(jobs) ) print( " url1: {}".format(url1) ) print( " url2: {}".format(url2) ) print( " wdir: {}".format(wdir) ) print( " comments_file: {}".format(comments_file) ) if jobs > 1: first = 0 last = length comments_per_job = length // jobs with ProcessPoolExecutor(max_workers=jobs) as executor: for i in range(jobs-1): future = executor.submit(compare_results, url1, url2, comments[first : first+comments_per_job]) future.add_done_callback(future_end_cb) first = first + comments_per_job future = executor.submit(compare_results, url1, url2, comments[first : last]) future.add_done_callback(future_end_cb) else: errors = (compare_results(url1, url2, comments) == False) exit( errors )