Exemplo n.º 1
0
def setup(args):
    """
    Create configs and perform basic setups.
    """
    cfg_file = args['config']
    print('Det2 config file : {}'.format(cfg_file))
    model_weights = args['weights']
    positive_thresh = args['thresh']
    cfg = get_cfg()
    add_tridentnet_config(cfg)
    cfg.merge_from_file(cfg_file)
    # This is the way we set the thresh, and model weights. TODO: take in as params from args
    print('Det2 threshold : {}'.format(positive_thresh))
    cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = positive_thresh
    print('Det2 model loaded from : {}'.format(model_weights))
    cfg.MODEL.WEIGHTS = model_weights
    cfg.freeze()
    # register_coco_instances("Ships", {},"ships-lite.json","")
    # register_coco_instances("Ships", {},"ships-lite.json","Ships")
    # register_coco_instances("Ships", {},"config/ships-lite.json","Ships")
    # metadata = MetadataCatalog.get("Ships")
    
    classes_path = args['classes_path']
    with open(classes_path) as f:
        class_names = f.readlines()
    class_names = [c.strip() for c in class_names]

    return cfg, class_names
Exemplo n.º 2
0
def setup(args):
    """
    Create configs and perform basic setups.
    """
    cfg = get_cfg()
    add_tridentnet_config(cfg)
    cfg.merge_from_file(args.config_file)
    cfg.merge_from_list(args.opts)
    cfg.freeze()
    default_setup(cfg, args)
    return cfg
Exemplo n.º 3
0
def setup(args):
    """
    Create configs and perform basic setups.
    """

    cfg = get_cfg()
    add_tridentnet_config(cfg)
    cfg.merge_from_file(args.config_file)
    cfg.merge_from_list(args.opts)

    # if args.eval_only:
    #     cfg.MODEL.WEIGHTS = "/root/detectron2/projects/TridentNet/log_80_20/model_0029999.pth"
    cfg.freeze()
    default_setup(cfg, args)
    return cfg
def setup(args):
    """
    Create configs and perform basic setups.
    """
    cfg = get_cfg()
    add_tridentnet_config(cfg)
    cfg.merge_from_file(
        "/data/mostertrij/tridentnet/detectron2/projects/TridentNet/configs/my_tridentnet_fast_R_101_C4_3x.yaml"
    )
    DATASET_NAME = "LGZ_v10"
    cfg.DATASETS.TRAIN = (f"{DATASET_NAME}_train", )
    cfg.DATASETS.VAL = (f"{DATASET_NAME}_val", )
    cfg.DATASETS.TEST = (f"{DATASET_NAME}_test", )
    #cfg.merge_from_file(args.config_file)
    cfg.merge_from_list(args.opts)
    cfg.freeze()
    default_setup(cfg, args)
    return cfg
Exemplo n.º 5
0
def setup(args):
    """
    Create configs and perform basic setups.
    """
    # cfg = get_cfg()
    dataset_path = '/raid/cesar_workspace/cesar_workspace/Object_Detection/Detectron2/detectron2/detectron2/data/Datasets/'
    train = dataset_path + "up_trees_train_2021"
    val = dataset_path + "up_trees_val_2021"
    train_dataset = train  # cfg.DATASETS.TRAIN
    val_dataset = val  # cfg.DATASETS.TEST
    # print(cfg.DATASETS.TRAIN, 'eee')
    dic_marks_path = dataset_path + "up_trees_labels.json"
    datasets_dic = {'train': train_dataset, 'val': val_dataset}
    dic_marks = {'0': 'up_tree'}
    #     with open(dic_marks_path, 'w') as out:
    #         json.dump(dic_marks, out)
    with open(dic_marks_path, 'r') as out:
        dic_marks = json.load(out)
    # cat_ids = {label:key for key, label in dic_marks.items()}
    classes = [label for key, label in dic_marks.items()]

    def get_board_dicts(imgdir):
        json_file = imgdir + '.json'  # Fetch the json file
        print(json_file)
        with open(json_file) as f:
            dataset_dicts = json.load(f)
        for i in dataset_dicts:
            filename = i["file_name"]
            for j in i["annotations"]:
                # Setting the required Box Mode
                j["bbox_mode"] = BoxMode.XYWH_ABS
                j["category_id"] = int(j["category_id"])
        return dataset_dicts

    # Registering the Dataset

    for d in ['val', 'train']:
        # print(datasets_dic[d])
        dataset_name = os.path.basename(datasets_dic[d])
        print(dataset_name)

        DatasetCatalog.register(dataset_name,
                                lambda d=d: get_board_dicts(datasets_dic[d]))
        MetadataCatalog.get(dataset_name).set(thing_classes=classes)

    train_name = os.path.basename(datasets_dic['train'])
    val_name = os.path.basename(datasets_dic['val'])
    print(train_name, val_name)
    board_metadata = MetadataCatalog.get(train_name)
    dataset_dicts = get_board_dicts(train_dataset)
    n_imgs = len(dataset_dicts)
    dataset_dicts = get_board_dicts(val_dataset)
    n_imgs_val = len(dataset_dicts)
    print('Number of images on training data is :', n_imgs)
    cfg = get_cfg()
    add_tridentnet_config(cfg)

    #     cfg.DATALOADER.NUM_WORKERS = 2

    cfg.merge_from_file(args.config_file)
    cfg.merge_from_list(args.opts)

    cfg.DATASETS.TRAIN = (train_name, )
    cfg.DATASETS.TEST = (val_name, )
    cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 128

    # No. of iterations after which the Validation Set is evaluated.
    cfg.TEST.EVAL_PERIOD = (n_imgs // cfg.SOLVER.IMS_PER_BATCH)

    cfg.MODEL.ROI_HEADS.NUM_CLASSES = len(classes)

    path = '/raid/cesar_workspace/cesar_workspace/Object_Detection/Detectron/detectron2/ImageNetPretrained/MSRA/'
    os.makedirs(path, exist_ok=True)
    backbone = os.path.basename(cfg.MODEL.WEIGHTS)
    print('Number of images on training data is :', n_imgs, n_imgs_val)
    backbone += '.pkl' if '.pkl' not in backbone else ''
    weight = path + backbone
    print(weight)
    if not os.path.isfile(weight):
        print("Downloading ImageNet weights")
        url_weights = weights_catalog[backbone] + backbone
        urllib.request.urlretrieve(url_weights, weight)

    cfg.MODEL.WEIGHTS = weight
    print(weight)

    # cfg.OUTPUT_DIR = './output_%s_X-101_b/'%accr
    print(cfg, '~~ I dedicate this to Shadow Moon ~~')

    default_setup(cfg, args)
    cfg.freeze()
    return cfg