Example #1
0
        head = k[:7]
        if head == 'module.':
            name = k[7:] # remove `module.`
        else:
            name = k
        new_state_dict[name] = v
    net.load_state_dict(new_state_dict)

cudnn.benchmark = True

# okay now we want to add new layers

for params in net.parameters():  # set all layers to false
    params.requires_grad = False

net.ClassHead = net._make_class_head(fpn_num=3, inchannels=cfg['out_channel'])  # re-initiliaze the layers
net.BboxHead = net._make_bbox_head(fpn_num=5, inchannels=cfg['out_channel'])
for name, param in net.named_parameters():
    print(name,param.shape)
# for name, param in net.named_parameters():  # util to print layers that are now trainable
#     if param.requires_grad:
#         print (name)

Plist = []

for params in net.parameters():  # stores parameters that will be updated in Plist
    if params.requires_grad:
        Plist.append(params)

if num_gpu > 1 and gpu_train:  # now transfer net to gpu if possible
    net = torch.nn.DataParallel(net).cuda()
Example #2
0
initial_lr = args.lr
gamma = args.gamma
training_dataset = args.training_dataset
validation_dataset = args.validation_dataset
save_folder = args.save_folder

net = RetinaFace(cfg=cfg)
print("Printing net...")
i = 0
for name, params in net.named_parameters():
    if (i < 219):
        params.requires_grad = False
    print(i, name, params.requires_grad)
    i += 1

    net.ClassHead = net._make_class_head(fpn_num=3,
                                         inchannels=cfg['out_channel'])
    net.BboxHead = net._make_bbox_head(fpn_num=3,
                                       inchannels=cfg['out_channel'])
    net.LandmarkHead = net._make_landmark_head(fpn_num=3,
                                               inchannels=cfg['out_channel'])

print("printing the previous ones 220 pe jo hain")
i = 0
for name, params in net.named_parameters():
    if (i == 219):

        print(i, name, params)
    i += 1
print(
    "===================================================================\nprinting the new ones ones 220 pe jo hain"
)