def test_that_fails_in_case_arg_not_in_schema(self): """If there is an -l argument, but it is not in schema then fail.""" # given arg_list = ['-l'] schema = {'p': 'flag'} # then with self.assertRaises(ValueError): # when parse_args(schema, arg_list)
def get_input(app_data): app_args = parse_args() app_data['app_args'] = app_args app_data['mode'] = app_args.mode app_data['threshold'] = float(app_args.threshold) app_data['weight'] = int(app_args.weight) app_data['runs'] = int(app_args.runs) app_data['graph_gen'] = bool(app_args.graph_gen) app_data['timer'] = app_args.timer # storage optimization app_data['optimize_storage'] = bool(app_args.optimize_storage) # early freeing of allocated arrays app_data['early_free'] = bool(app_args.early_free) # pool allocate option app_data['pool_alloc'] = bool(app_args.pool_alloc) # inline app_data['inline'] = bool (app_args.inline) app_data['multi-level-tiling'] = bool(app_args.multi_level_tiling) app_data['dpfuse'] = bool(app_args.dpfuse) app_data['logdpchoices'] = bool(app_args.logdpchoices) app_data['logmaxchildren'] = int(app_args.logmaxchildren) return
def main(): # Parse arguments from arg_parser import parse_args try: args = parse_args() except SystemExit: return # Multiprocessing multiprocessing.set_start_method('spawn') queue = multiprocessing.Queue(-1) stop_logger = multiprocessing.Event() stop_workers = multiprocessing.Event() logger = get_logger(queue=queue, conf_file=args.mplog, stop_event=stop_logger) workers = get_workers(queue=queue, conf_file=args.conf, url=args.url, stop_event=stop_workers, num_workers=args.nproc) try: term_workers(workers) except KeyboardInterrupt: stop_workers.set() term_workers(workers) finally: stop_logger.set() term_logger(logger)
def main(): exit_msg = None args = parse_args() try: if args.storage == "gdrive": wrapper = GDriveWrapper() else: wrapper = YaDiskWrapper() if args.operation == "ls": wrapper.lsdir(args.remote_file, order_key=args.order_by) elif args.operation == "dl": wrapper.download(wrapper.get_file(args.remote_file), local_destination=Path(args.destination), ov=args.overwrite) exit_msg = DOWNLOAD_COMPLETED_MSG elif args.operation == "ul": wrapper.upload(Path(args.local_file), args.destination) exit_msg = UPLOAD_COMPLETED_MSG elif args.operation == "rm": wrapper.remove(args.remote_file, permanently=args.permanently) if exit_msg: print(exit_msg) except (ApiResponseException, FileExistsError, FileNotFoundError, PermissionError, CredentialsNotFoundException) as e: print(e) sys.exit(1) except KeyboardInterrupt: print("Interrupted by user.") sys.exit(1)
def get_input(app_data): app_args = parse_args() app_data['app_args'] = app_args app_data['mode'] = app_args.mode app_data['cycle'] = app_args.cycle app_data['nit'] = int(app_args.nit) app_data['runs'] = int(app_args.runs) app_data['graph_gen'] = bool(app_args.graph_gen) cycle_name = app_data['cycle']+"cycle" app_data['cycle_name'] = cycle_name app_data['timer'] = app_args.timer # storage optimization app_data['optimize_storage'] = bool(app_args.optimize_storage) # early freeing of allocated arrays app_data['early_free'] = bool(app_args.early_free) # pool allocate option app_data['pool_alloc'] = bool(app_args.pool_alloc) # multi-dimensional parallelism option app_data['multipar'] = bool(app_args.multipar) return
def test_that_invalid_integer_raises_value_error(self): # given arg_list = ['-p', 'pi'] schema = {'p': 'int'} # when, then with self.assertRaises(ValueError): _ = parse_args(schema, arg_list)
def get_input(app_data): app_args = parse_args() app_data['app_args'] = app_args app_data['mode'] = app_args.mode app_data['cycle'] = app_args.cycle app_data['nit'] = int(app_args.nit) app_data['runs'] = int(app_args.runs) app_data['graph_gen'] = bool(app_args.graph_gen) cycle_name = app_data['cycle'] + "cycle" app_data['cycle_name'] = cycle_name app_data['timer'] = app_args.timer # storage optimization app_data['optimize_storage'] = bool(app_args.optimize_storage) # early freeing of allocated arrays app_data['early_free'] = bool(app_args.early_free) # pool allocate option app_data['pool_alloc'] = bool(app_args.pool_alloc) # multi-dimensional parallelism option app_data['multipar'] = bool(app_args.multipar) app_data['naive'] = bool(app_args.naive) return
def test_that_arguments_have_digit_as_name_and_value(self): # given arg_list = ['-1', '2'] schema = {'1': 'int'} # when r = parse_args(schema, arg_list) # then self.assertEqual(r['1'], 2)
def test_that_flags_can_have_digits_as_names(self): # given arg_list = ['-1'] schema = {'1': 'flag'} # when r = parse_args(schema, arg_list) # then self.assertTrue(r['1'])
def test_that_integer_param_is_parsed_to_int(self): # given arg_list = ['-p', '8080'] schema = {'p': 'int'} # when r = parse_args(schema, arg_list) # then self.assertEqual(r['p'], 8080)
def test_that_string_values_get_parsed(self): # given arg_list = ['-s', 'abc'] schema = {'s': 'str'} # when r = parse_args(schema, arg_list) # then self.assertEqual(r['s'], 'abc')
def test_that_integer_is_0_if_value_not_present(self): # given arg_list = ['-l'] schema = {'p': 'int', 'l': 'flag'} # when r = parse_args(schema, arg_list) # then self.assertEqual(r['p'], 0)
def test_that_negative_ints_are_parsed_as_ints(self): # given arg_list = ['-p', '-3'] schema = {'p': 'int'} # when r = parse_args(schema, arg_list) # then self.assertEqual(r['p'], -3)
def test_that_double_digit_negative_int_is_parsed(self): # given arg_list = ['-p', '-13'] schema = {'p': 'int'} # when r = parse_args(schema, arg_list) # then self.assertEqual(r['p'], -13)
def test_that_negative_num_args_have_digits_as_name(self): # given arg_list = ['-1', '-12'] schema = {'1': 'int'} # when r = parse_args(schema, arg_list) # then self.assertEqual(r['1'], -12)
def test_not_provided_str_arg_defaults_to_empty_string(self): # given arg_list = [] schema = {'s': 'str'} # when r = parse_args(schema, arg_list) # then self.assertEqual(r['s'], '')
def test_that_schema_validation_works_with_numeric_arg_names(self): # given arg_list = ['-1', '-2', '-3'] schema = {'1': 'flag', '2': 'flag'} # when # then with self.assertRaises(ValueError): _ = parse_args(schema, arg_list)
def test_that_flag_is_false_when_other_flag_passed(self): # given arg_list = ['-l'] schema = {'p': 'flag', 'l': 'flag'} # when r = parse_args(schema, arg_list) # then self.assertFalse(r['p'])
def test_that_a_flag_followed_by_a_positional_is_collected(self): # given arg_list = ['-s', '1'] schema = {'s': 'flag'} # when r = parse_args(schema, arg_list) # then self.assertTrue(r['s'])
def test_that_a_positional_that_follows_int_arg_is_collected(self): # given arg_list = ['-i', '2', '1'] schema = {'i': 'int'} # when r = parse_args(schema, arg_list) # then self.assertEqual(r.positional, ['1'])
def test_that_positional_args_that_follow_a_flag_are_collected(self): # given arg_list = ['-s', '1'] schema = {'s': 'flag'} # when r = parse_args(schema, arg_list) # then self.assertEqual(r.positional, ['1'])
def test_that_positional_args_are_collected(self): # given arg_list = ['1'] schema = {} # when r = parse_args(schema, arg_list) # then self.assertEqual(r.positional, ['1'])
def test_that_unknown_options_are_detected_after_options_that_take_arguments( self): # given arg_list = ['-p', '3', '-l'] schema = {'p': 'int'} # when, then with self.assertRaises(ValueError): _ = parse_args(schema, arg_list)
def test_that_other_letter_works(self): """Parser has to work for arbitrary flag letter.""" # given arg_list = ['-p'] schema = {'p': 'flag'} # when r = parse_args(schema, arg_list) # then self.assertTrue(r['p'])
def test_that_flag_is_true_if_passed(self): """If there is an -l argument, value should be true.""" # given arg_list = ['-l'] schema = {'l': 'flag'} # when parsed_args = parse_args(schema, arg_list) # then self.assertTrue(parsed_args['l'])
def test_that_flag_is_false_if_not_passed(self): """If there's no -l argument, value should be false.""" # given arg_list = [] schema = {'l': 'flag'} # when parsed_args = parse_args(schema, arg_list) # then self.assertFalse(parsed_args['l'])
def main() -> None: args = parse_args() log_1_path = Path(args.log1_file) log_2_path = Path(args.log2_file) output_path = Path(args.merged_log_file) t0 = time.time() _merge_logs(log_1_path, log_2_path, output_path) print(f"finished in {time.time() - t0:0f} sec")
def main(argv): opts, args = parse_args(argv[0], argv[1:]) config_file = opts['configfile'] db_type = opts['dbtype'] options = get_options(config_file) warhammerdb = options['classes']['warhammerdb'] engine = engine_utils.create_engine(db_type, options, warhammerdb) metadata = MetaData() classes = get_classes_model(options, metadata) generate_dot(engine, classes)
def main(): args = parse_args() random.seed(args.seed) torch.manual_seed(args.seed) logging.info('Reading dataset metadata') train_loader, val_loader = get_data_loaders(args) args.classification_weights = train_loader.dataset.classification_weights logging.info('Constructing model') model, loss, restarting_epoch = get_model_and_loss(args) if args.mode == 'train': if not args.data_parallel: optimizer = model.optimizer() else: optimizer = torch.optim.Adam(model.parameters(), lr=args.base_lr) for i in range(restarting_epoch, args.epochs): train_one_epoch(model, loss, optimizer, train_loader, i + 1, args) if i % args.save_frequency == 0: torch.save( model.state_dict(), os.path.join(args.save, 'model_state_{:02d}.pytar'.format(i + 1))) elif args.mode == 'test' or args.mode == 'testtrain': if args.mode == 'testtrain': val_loader = train_loader if args.reload_dir is not None: all_saved_models = [ f for f in os.listdir(args.reload_dir) if f.endswith('.pytar') ] all_indices = [ f.split('_')[-1].replace('.pytar', '') for f in all_saved_models ] int_indices = [int(f) for f in all_indices] int_indices.sort() for epoch in int_indices: args.reload = os.path.join( args.reload_dir, 'model_state_{:02d}.pytar'.format(epoch)) model, loss, restarting_epoch = get_model_and_loss(args) test_one_epoch(model, loss, val_loader, epoch, args) else: test_one_epoch(model, loss, val_loader, 0, args) else: raise NotImplementedError("Unsupported mode {}".format(args.mode))
def main(argv): opts, args = parse_args(argv[0], argv[1:]) db_type = opts['dbtype'] config_file = opts['configfile'] options = get_options(config_file) warhammerdb = options['classes']['warhammerdb'] engine = engine_utils.create_engine(db_type, options, warhammerdb) metadata = MetaData(bind=engine) csv_model = importlib.import_module(options['classes']['csv_model']) table = csv_model.get_classes_model(options, metadata) #create table if it doesn't exist and a truncate metadata.create_all(engine) load_csv(engine, table, options, opts)
def main(): args = arg_parser.parse_args() if args.debug: train_logger = None val_logger = None else: train_logger = tensorboard_logger.Logger( os.path.join(args.tensorboard_dir, "train")) val_logger = tensorboard_logger.Logger( os.path.join(args.tensorboard_dir, "val")) solver: BaseSolver = args.solver(args, train_logger, val_logger) curr_iteration = 1 try: if args.test_first: print("Running initial Val") solver.reset_epoch() solver.run_val() starting_lr = solver.adjust_learning_rate() while solver.epoch < args.epochs: solver.reset_epoch() print("Running Train") for ii in tqdm.tqdm(range(solver.iterations_per_epoch)): if args.use_warmup: if curr_iteration <= 500: lr_scale = min(1.0, curr_iteration / 500.0) new_lr = lr_scale * starting_lr for pg in solver.optimizer.param_groups: pg["lr"] = new_lr print("new lr", new_lr) curr_iteration += 1 output = solver.run_train_iteration() print("Running Val") solver.run_val() solver.epoch += 1 except: traceback.print_exc() finally: if args.save: print("Saving models") solver.save()
def get_input(app_data): # parse the command-line arguments app_args = parse_args() app_data['app_args'] = app_args app_data['mode'] = app_args.mode app_data['runs'] = int(app_args.runs) app_data['graph_gen'] = bool(app_args.graph_gen) app_data['timer'] = app_args.timer # storage optimization app_data['optimize_storage'] = bool(app_args.optimize_storage) # early freeing of allocated arrays app_data['early_free'] = bool(app_args.early_free) # pool allocate option app_data['pool_alloc'] = bool(app_args.pool_alloc) return
def get_input(app_data): app_args = parse_args() app_data['app_args'] = app_args app_data['mode'] = app_args.mode #app_data['threshold'] = float(app_args.threshold) #app_data['weight'] = int(app_args.weight) app_data['runs'] = int(app_args.runs) app_data['graph_gen'] = bool(app_args.graph_gen) app_data['timer'] = app_args.timer # storage optimization app_data['optimize_storage'] = bool(app_args.optimize_storage) # early freeing of allocated arrays app_data['early_free'] = bool(app_args.early_free) # pool allocate option app_data['pool_alloc'] = bool(app_args.pool_alloc) return