Esempio n. 1
0
def run(argv_str=None):
    from template_lib.utils.config import parse_args_and_setup_myargs, config2args
    run_script = os.path.relpath(__file__, os.getcwd())
    args1, myargs, _ = parse_args_and_setup_myargs(argv_str,
                                                   run_script=run_script,
                                                   start_tb=False)
    myargs.args = args1
    myargs.config = getattr(myargs.config, args1.command)

    args = default_argument_parser().parse_args(args=[])
    args = config2args(myargs.config.args, args)

    args.opts += ['OUTPUT_DIR', args1.outdir + '/detectron2']
    print("Command Line Args:", args)

    myargs = D2Utils.unset_myargs_for_multiple_processing(
        myargs, num_gpus=args.num_gpus)

    launch(
        main,
        args.num_gpus,
        num_machines=args.num_machines,
        machine_rank=args.machine_rank,
        dist_url=args.dist_url,
        args=(args, myargs),
    )
Esempio n. 2
0
def run(argv_str=None):
    from template_lib.utils.config import parse_args_and_setup_myargs, config2args
    from template_lib.utils.modelarts_utils import prepare_dataset
    run_script = os.path.relpath(__file__, os.getcwd())
    args1, myargs, _ = parse_args_and_setup_myargs(argv_str,
                                                   run_script=run_script,
                                                   start_tb=False)
    myargs.args = args1
    myargs.config = getattr(myargs.config, args1.command)

    if hasattr(myargs.config, 'datasets'):
        prepare_dataset(myargs.config.datasets, cfg=myargs.config)

    parser = build_parser()
    args = parser.parse_args([])
    args = config2args(myargs.config.args, args)

    outdir = os.path.join(myargs.args.outdir, 'NAO')
    os.makedirs(outdir, exist_ok=True)
    args.output_dir = outdir

    log_format = '%(asctime)s %(message)s'
    logging.basicConfig(stream=sys.stdout,
                        level=logging.INFO,
                        format=log_format,
                        datefmt='%m/%d %I:%M:%S %p')

    main(args, myargs)
Esempio n. 3
0
def run(argv_str=None):
    from template_lib.utils.config import parse_args_and_setup_myargs, config2args
    from template_lib.utils.modelarts_utils import prepare_dataset
    run_script = os.path.relpath(__file__, os.getcwd())
    args1, myargs, _ = parse_args_and_setup_myargs(argv_str,
                                                   run_script=run_script,
                                                   start_tb=False)
    myargs.args = args1
    myargs.config = getattr(myargs.config, args1.command)

    if hasattr(myargs.config, 'datasets'):
        prepare_dataset(myargs.config.datasets, cfg=myargs.config)

    helper = Helper(save_dir=os.path.join(myargs.args.outdir, 'robust_darts'),
                    args_list=[])
    args = helper.config
    args = config2args(myargs.config.args, args)
    if args.drop_path_prob == 0:
        args.cutout = False

    log_format = '%(asctime)s %(message)s'
    # logging.basicConfig()
    logging.basicConfig(level=logging.INFO,
                        format=log_format,
                        datefmt='%m/%d %I:%M:%S %p')
    logging.getLogger().handlers = myargs.logger.handlers
    # for handler in myargs.logger.handlers:
    #   logging.getLogger().addHandler(handler)

    space = spaces_dict[args.space]
    main(space, args=args, helper=helper, myargs=myargs)
Esempio n. 4
0
def run(argv_str=None):
    from template_lib.utils.config import parse_args_and_setup_myargs, config2args
    from template_lib.utils.modelarts_utils import prepare_dataset
    run_script = os.path.relpath(__file__, os.getcwd())
    args1, myargs, _ = parse_args_and_setup_myargs(argv_str,
                                                   run_script=run_script,
                                                   start_tb=False)
    myargs.args = args1
    myargs.config = getattr(myargs.config, args1.command)

    args = config2args(myargs.config, args1)

    build_start(cfg=args, myargs=myargs)
    pass
Esempio n. 5
0
def run(argv_str=None):
  from template_lib.utils.config import parse_args_and_setup_myargs, config2args
  from template_lib.utils.modelarts_utils import prepare_dataset
  run_script = os.path.relpath(__file__, os.getcwd())
  args1, myargs, _ = parse_args_and_setup_myargs(argv_str, run_script=run_script, start_tb=False)
  myargs.args = args1
  myargs.config = getattr(myargs.config, args1.command)

  if hasattr(myargs.config, 'datasets'):
    prepare_dataset(myargs.config.datasets, cfg=myargs.config)

  args = parser.parse_args([])
  args = config2args(myargs.config.args, args)

  args.path = os.path.join(myargs.args.outdir, args.path)
  main(args, myargs)
Esempio n. 6
0
def run(argv_str=None):
  from template_lib.utils.config import parse_args_and_setup_myargs, config2args
  from template_lib.utils.modelarts_utils import prepare_dataset
  run_script = os.path.relpath(__file__, os.getcwd())
  args1, myargs, _ = parse_args_and_setup_myargs(argv_str, run_script=run_script, start_tb=False)
  myargs.args = args1
  myargs.config = getattr(myargs.config, args1.command)

  if hasattr(myargs.config, 'datasets'):
    prepare_dataset(myargs.config.datasets, cfg=myargs.config)

  opt = TestOptions().parse(print_options=False)

  opt = config2args(myargs.config.args, opt)

  main(opt, myargs)
  pass
Esempio n. 7
0
def run(argv_str=None):
    from template_lib.utils.config import parse_args_and_setup_myargs, config2args
    args1, myargs, _ = parse_args_and_setup_myargs(argv_str, start_tb=False)
    myargs.args = args1
    myargs.config = getattr(myargs.config, args1.command)

    args = parse_args()
    args = config2args(myargs.config, args)

    args.datadir = os.path.expanduser(args.datadir)
    args.outdir = os.path.expanduser(args.outdir)

    if args.dataset == "cityscapes_instance_only":
        convert_cityscapes_instance_only(args.datadir, args.outdir)
    elif args.dataset == "cocostuff":
        convert_coco_stuff_mat(args.datadir, args.outdir)
    else:
        print("Dataset not supported: %s" % args.dataset)
Esempio n. 8
0
def run1(argv_str=None):
  from template_lib.utils.config import parse_args_and_setup_myargs, config2args
  from template_lib.utils.modelarts_utils import prepare_dataset
  run_script = os.path.relpath(__file__, os.getcwd())
  args1, myargs, _ = parse_args_and_setup_myargs(argv_str, run_script=run_script, start_tb=False)
  myargs.args = args1
  myargs.config = getattr(myargs.config, args1.command)

  # prepare_dataset(myargs.config.dataset)

  parser = prepare_parser()
  args = parser.parse_args([])
  args = config2args(myargs.config, args)

  args.data_root = os.path.expanduser(args.data_root)

  print(args)
  config = vars(args)
  run(config, stdout=myargs.stdout)
Esempio n. 9
0
def run(argv_str=None):
    from template_lib.utils.config import parse_args_and_setup_myargs, config2args
    from template_lib.utils.modelarts_utils import prepare_dataset
    run_script = os.path.relpath(__file__, os.getcwd())
    args1, myargs, _ = parse_args_and_setup_myargs(argv_str,
                                                   run_script=run_script,
                                                   start_tb=False)
    myargs.args = args1
    myargs.config = getattr(myargs.config, args1.command)

    if hasattr(myargs.config, 'datasets'):
        prepare_dataset(myargs.config.datasets, cfg=myargs.config)

    parser = build_parser()
    args = parser.parse_args()
    if args.rand_seed is None or args.rand_seed < 0:
        args.rand_seed = random.randint(1, 100000)

    args = config2args(config=myargs.config.args, args=args)
    args.save_dir = os.path.join(args1.outdir, '201')
    main(args, myargs)
Esempio n. 10
0
def run(argv_str=None):
    from template_lib.utils.config import parse_args_and_setup_myargs, config2args
    from template_lib.utils.modelarts_utils import prepare_dataset
    run_script = os.path.relpath(__file__, os.getcwd())
    args1, myargs, _ = parse_args_and_setup_myargs(argv_str,
                                                   run_script=run_script,
                                                   start_tb=False)
    myargs.args = args1
    myargs.config = getattr(myargs.config, args1.command)

    if hasattr(myargs.config, 'datasets'):
        prepare_dataset(myargs.config.datasets, cfg=myargs.config)

    parser = argparse.ArgumentParser()
    parser.add_argument('--out', type=str)
    parser.add_argument('--n_worker', type=int, default=8)
    parser.add_argument('--path', type=str)
    args = parser.parse_args([])
    args = config2args(myargs.config.args, args)

    main(args, myargs)
Esempio n. 11
0
def main(myargs):
    parser = argparse.ArgumentParser(description="PyTorch Object Detection Training")
    parser.add_argument(
        "--config-file",
        default="",
        metavar="FILE",
        help="path to config file",
        type=str,
    )
    parser.add_argument("--local_rank", type=int, default=0)
    parser.add_argument(
        "--skip-test",
        dest="skip_test",
        help="Do not test the final model",
        action="store_true",
    )
    parser.add_argument(
        "opts",
        help="Modify config options using the command-line",
        default=None,
        nargs=argparse.REMAINDER,
    )

    args = parser.parse_args([])
    args = config2args(myargs.config, args)
    cfg.OUTPUT_DIR = os.path.join(myargs.args.outdir, 'maskrcnn_benchmark')

    num_gpus = int(os.environ["WORLD_SIZE"]) if "WORLD_SIZE" in os.environ else 1
    args.distributed = num_gpus > 1

    if args.distributed:
        myargs = setup_myargs_for_multiple_processing(myargs)
        torch.cuda.set_device(args.local_rank)
        torch.distributed.init_process_group(
            backend="nccl", init_method="env://"
        )
        synchronize()

    cfg.merge_from_file(args.config_file)
    cfg.merge_from_list(args.opts)
    if 'opts_private' in args:
        cfg.merge_from_list(args.opts_private)
    cfg.freeze()

    output_dir = cfg.OUTPUT_DIR
    if output_dir:
        mkdir(output_dir)

    logger = setup_logger("maskrcnn_benchmark", output_dir, get_rank())
    logger.info("Using {} GPUs".format(num_gpus))
    logger.info(args)

    if comm.is_main_process():
        # Note: some of our scripts may expect the existence of
        # config.yaml in output directory
        path = os.path.join(output_dir, "config.yaml")
        with open(path, "w") as f:
            f.write(cfg.dump())
        logger.info("Full config saved to {}".format(os.path.abspath(path)))

    logger.info("Collecting env info (might take some time)")
    logger.info("\n" + collect_env_info())

    logger.info("Loaded configuration file {}".format(args.config_file))
    with open(args.config_file, "r") as cf:
        config_str = "\n" + cf.read()
        logger.info(config_str)
    logger.info("Running with config:\n{}".format(cfg))

    model = train(cfg, args.local_rank, args.distributed, myargs=myargs)

    if not args.skip_test:
        test(cfg, model, args.distributed)

    modelarts_sync_results(args=myargs.args, myargs=myargs, join=True, end=True)
Esempio n. 12
0
def run(argv_str=None):
    from template_lib.utils.config import parse_args_and_setup_myargs, config2args
    from template_lib.utils.modelarts_utils import prepare_dataset
    run_script = os.path.relpath(__file__, os.getcwd())
    args1, myargs, _ = parse_args_and_setup_myargs(argv_str,
                                                   run_script=run_script,
                                                   start_tb=False)
    myargs.args = args1
    myargs.config = getattr(myargs.config, args1.command)

    if hasattr(myargs.config, 'datasets'):
        prepare_dataset(myargs.config.datasets, cfg=myargs.config)

    parser = argparse.ArgumentParser(description='Progressive Growing of GANs')

    parser.add_argument('--path', type=str, help='path of specified dataset')
    parser.add_argument(
        '--phase',
        type=int,
        default=600_000,
        help='number of samples used for each training phases',
    )
    parser.add_argument('--lr',
                        default=0.001,
                        type=float,
                        help='learning rate')
    parser.add_argument('--sched',
                        action='store_true',
                        help='use lr scheduling')
    parser.add_argument('--init_size',
                        default=8,
                        type=int,
                        help='initial image size')
    parser.add_argument('--max_size',
                        default=1024,
                        type=int,
                        help='max image size')
    parser.add_argument('--ckpt',
                        default=None,
                        type=str,
                        help='load from previous checkpoints')
    parser.add_argument(
        '--no_from_rgb_activate',
        action='store_true',
        help='use activate in from_rgb (original implementation)',
    )
    parser.add_argument('--mixing',
                        action='store_true',
                        help='use mixing regularization')
    parser.add_argument(
        '--loss',
        type=str,
        default='wgan-gp',
        choices=['wgan-gp', 'r1'],
        help='class of gan loss',
    )

    args = parser.parse_args([])
    args = config2args(myargs.config.args, args)

    main(args, myargs)
def main(myargs):
    parser = argparse.ArgumentParser(
        description="PyTorch Object Detection Inference")
    parser.add_argument(
        "--config-file",
        default=
        "/private/home/fmassa/github/detectron.pytorch_v2/configs/e2e_faster_rcnn_R_50_C4_1x_caffe2.yaml",
        metavar="FILE",
        help="path to config file",
    )
    parser.add_argument("--local_rank", type=int, default=0)
    parser.add_argument(
        "opts",
        help="Modify config options using the command-line",
        default=None,
        nargs=argparse.REMAINDER,
    )

    args = parser.parse_args([])
    args = config2args(myargs.config, args)

    num_gpus = int(
        os.environ["WORLD_SIZE"]) if "WORLD_SIZE" in os.environ else 1
    distributed = num_gpus > 1

    if distributed:
        torch.cuda.set_device(args.local_rank)
        torch.distributed.init_process_group(backend="nccl",
                                             init_method="env://")
        synchronize()

    cfg.merge_from_file(args.config_file)
    cfg.merge_from_list(args.opts)
    cfg.OUTPUT_DIR = myargs.args.outdir
    cfg.freeze()

    output_dir = cfg.OUTPUT_DIR
    if output_dir:
        mkdir(output_dir)

    save_dir = output_dir
    logger = setup_logger("maskrcnn_benchmark", save_dir, get_rank())
    logger.info("Using {} GPUs".format(num_gpus))
    logger.info(cfg)

    logger.info("Collecting env info (might take some time)")
    logger.info("\n" + collect_env_info())

    model = build_detection_model(cfg)
    model.to(cfg.MODEL.DEVICE)

    checkpointer = DetectronCheckpointer(cfg, model, save_dir=output_dir)
    _ = checkpointer.load(cfg.MODEL.WEIGHT)

    iou_types = ("bbox", )
    if cfg.MODEL.MASK_ON:
        iou_types = iou_types + ("segm", )
    if cfg.MODEL.KEYPOINT_ON:
        iou_types = iou_types + ("keypoints", )
    output_folders = [None] * len(cfg.DATASETS.TEST)
    dataset_names = cfg.DATASETS.TEST
    if cfg.OUTPUT_DIR:
        for idx, dataset_name in enumerate(dataset_names):
            output_folder = os.path.join(cfg.OUTPUT_DIR, "inference",
                                         dataset_name)
            mkdir(output_folder)
            output_folders[idx] = output_folder
    data_loaders_val = make_data_loader(cfg,
                                        is_train=False,
                                        is_distributed=distributed)
    for output_folder, dataset_name, data_loader_val in zip(
            output_folders, dataset_names, data_loaders_val):
        inference(
            model,
            data_loader_val,
            dataset_name=dataset_name,
            iou_types=iou_types,
            box_only=False if cfg.MODEL.RETINANET_ON else cfg.MODEL.RPN_ONLY,
            device=cfg.MODEL.DEVICE,
            expected_results=cfg.TEST.EXPECTED_RESULTS,
            expected_results_sigma_tol=cfg.TEST.EXPECTED_RESULTS_SIGMA_TOL,
            output_folder=output_folder,
        )
        synchronize()