def main(): # We create an argument parser arg_parser = arguments.create_argument_parser() # We parse the arguments args = arg_parser.parse_args(sys.argv[1:]) arguments.process_arguments(args, arg_parser) # If the verbose mode is on, we display the arguments if args.verbose: arguments.print_arguments(args) # We open the database file where the proxies will be stored connection, cursor = database.initialize_database(args.database_file) try: # We generate the proxies for proxy in parser.generate_proxy(args): # And we store them in the database database.insert_in_database(cursor, proxy) except KeyboardInterrupt: if args.verbose: print('') print('[warn] received interruption signal') # We save the changes made to the database, and close the file connection.commit() connection.close() return 0
def main(): # We create an argument parser arg_parser = arguments.create_argument_parser() # We parse the arguments args = arg_parser.parse_args(sys.argv[1:]) arguments.process_arguments(args, arg_parser) # If the verbose mode is on, we display the arguments if args.verbose: arguments.print_arguments(args) if args.database_file is not None and args.text_file is None: # We open the database file where the proxies will be stored connection, cursor = database.initialize_database(args.database_file) try: # We generate the proxies for proxy in parser.generate_proxy(args): # And we store them in the database database.insert_in_database(cursor, proxy) except KeyboardInterrupt: if args.verbose: print('') print('[warn] received interruption signal') # We save the changes made to the database, and close the file connection.commit() connection.close() return 0 # Write to text file with priority elif args.text_file is not None or args.text_file is not None and args.database_file is not None: with open(args.text_file, 'w') as tf: for proxy in parser.generate_proxy(args): proxy_line = proxy[2].lower() + '://' + str( proxy[0]) + ':' + str(proxy[1]) + '\n' tf.write(proxy_line) elif args.database_file is None and args.text_file is None: return 'Please specify output file!'
# model name model_name = '{}_nlayer{}_{}_lr{}_batch{}_momentum{}_schedule{}_nepoch{}_{}'.format( args.name, args.nlayer, args.optimizer, args.lr, args.batch_size, args.momentum, args.schedule, args.nepochs, args.arch ) logger = make_logger(log_file=model_name) logger.info("saved model name "+model_name) arguments.print_arguments(args, logger) multi_gpu = args.multi_gpu set_gpu_num = args.gpu_num if torch.cuda.is_available(): print("gpu", torch.cuda.current_device(), torch.cuda.get_device_name()) else: print("cpu") #----- model ----- if args.arch =='hrnet': model = get_pose_hrnet() else: if args.flatten: