Пример #1
0
##########
# Prepare
##########
# Get train data
train_data = COCODataset(train_data_dir, img_shape)
train_loader = DataLoader(train_data, batch_size=batch_size, shuffle=True)

# Get val data
val_data = COCODataset(train_data_dir, img_shape)
val_loader = DataLoader(val_data, batch_size=batch_size, shuffle=False)

# Load
mrcnn = Model.MRCNN(img_shape,
                    n_classes=n_classes,
                    mode=mode,
                    pretrain=True,
                    scales=scales,
                    p4_box_size=p4_box_size)
# mrcnn.half()
mrcnn.to(device)
# mrcnn = torch.nn.DataParallel(mrcnn, device_ids=[2, 3])
if load_weight:
    mrcnn.load_state_dict(torch.load(save_path))

min_loss = 100
patience_now = 0

########
# Train
########
Model.set_trainable(mrcnn, train_part, train_bn=train_bn)
Пример #2
0
device = torch.device("cuda:2, 3" if torch.cuda.is_available() else "cpu")
# device = torch.device("cuda:3" if torch.cuda.is_available() else "cpu")
# device = torch.device("cpu")
# save_path = r'/home/yuruiqi/PycharmProjects/Mask_RCNN/save/try_coco2.pkl'
save_path = r'/home/yuruiqi/PycharmProjects/Mask_RCNN/save/try_coco.pkl'
# test_data_dir = r'/home/yuruiqi/PycharmProjects/COCOData_mrcnn/train2017_cat_dog'
test_data_dir = r'/home/yuruiqi/PycharmProjects/COCOData_mrcnn/train_val2017_cat_dog'
# test_data_dir = r'/home/yuruiqi/PycharmProjects/COCOData_mrcnn/val2017_cat_dog'
img_shape = [800, 800]

# Get test data
test_data = COCODataset(test_data_dir, img_shape)
test_loader = DataLoader(test_data, batch_size=16, shuffle=True)

# Load
mrcnn = Model.MRCNN(img_shape, n_classes=2, mode='inference', pretrain=False,
                    scales=(32, 64, 128, 256, 512), p4_box_size=224.0)
mrcnn.to(device)
mrcnn.load_state_dict(torch.load(save_path))

mrcnn.eval()
test_loss_list = []
with torch.no_grad():
    for images, class_ids, rois, boxs in test_loader:
        # Data
        images = images.to(torch.float32).to(device)
        class_ids = class_ids.to(torch.int32).to(device)
        rois = rois.to(torch.float32).to(device)
        boxs = boxs.to(torch.float32).to(device)

        observer = Observer(images, boxs, class_ids, rois, '/home/yuruiqi/visualization')
        # observer.show_dataset()