コード例 #1
0
ファイル: dense201.py プロジェクト: hyb1234hi/emci
    def __init__(self, pretrained=True, num_classes=212, ckpt=None, **kwargs):
        super(Dense201, self).__init__(num_init_features=64,
                                       growth_rate=32,
                                       block_config=(6, 12, 48, 32),
                                       num_classes=num_classes,
                                       **kwargs)
        if pretrained:
            pretrained_dict = dict(
                model_zoo.load_url(densenet.model_urls['densenet201']))
            print(pretrained_dict.keys())
            del pretrained_dict['classifier.weight']
            del pretrained_dict['classifier.bias']
            model_dict = self.state_dict()
            pattern = re.compile(
                r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$'
            )
            for key in list(pretrained_dict.keys()):
                res = pattern.match(key)
                if res:
                    new_key = res.group(1) + res.group(2)
                    pretrained_dict[new_key] = pretrained_dict[key]
                    del pretrained_dict[key]

            model_dict.update(pretrained_dict)
            self.load_state_dict(model_dict)
        self.sigmoid = nn.Sigmoid()
        if not ckpt is None:
            saver = Saver(ckpt, 'model')
            saver.load_last_ckpt(self)
コード例 #2
0
#PATH = './ckpt'
a = AlignDataset(
    '/data/icme/data/picture',
    '/data/icme/data/landmark',
    '/data/icme/data/pred_landmark',
    '/data/icme/valid',
    Align(
        'cache/mean_landmarks.pkl',
        (224, 224),
        (0.15, 0.1),
    ),  # idx=list(range(51, 66))),
    phase='eval',
    # ldmk_ids=list(range(51, 66))
)
#Saver.dir=PATH
saver = Saver('backup', 'model')
current = None
net.eval()

epoch_size = len(a)
metrics = Metrics().add_nme().add_auc()
model_name = 'align-jitter227-model-120000.pth'
saver.load(net, model_name)

all_pr = []
all_gt = []
save_dir = '/data/icme/data/pred_landmark_align'
if not os.path.exists(save_dir):
    os.makedirs(save_dir)
batch_iterator = iter(DataLoader(a, batch_size=1, shuffle=False,
                                 num_workers=4))
コード例 #3
0
ファイル: train.py プロジェクト: hyb1234hi/emci
    #                  '/data/icme/data/landmark',
    #                  '/data/icme/train',
    #                  Align('./cache/mean_landmarks.pkl', (224, 224), (0.2, 0.1),
    #                        ),# idx=list(range(51, 66))),
    #                  flip=True,
    #                  max_jitter=3,
    #                  max_radian=0
    #                  # ldmk_ids=list(range(51, 66))
    #                  )
    batch_iterator = iter(DataLoader(a, batch_size=cfg.batch_size, shuffle=True, num_workers=4))

    criterion = loss.get_criterion(cfg.loss)
    # criterion = WingLoss(10, 2)
    optimizer = optim.Adam(net.parameters(), lr=0, weight_decay=cfg.weight_decay)
    # optimizer = optim.SGD(net.parameters(), lr=0, weight_decay=cfg.weight_decay, momentum=0.9)
    saver = Saver(os.path.join(cfg.root, 'ckpt'), 'model', 10)
    last = saver.last_ckpt()
    start_iter = 0 if last is None else int(last.split('.')[0].split('-')[-1])
    if start_iter > 0:
        saver.load(net, last)
        lr_gen.set_global_step(start_iter)

    running_loss = 0.0
    batch_size = cfg.batch_size
    epoch_size = len(a) // batch_size
    epoch = start_iter // epoch_size + 1
    for iteration in range(start_iter, cfg.max_iter + 1):
        if iteration % epoch_size == 0:
            # create batch iterator
            batch_iterator = iter(DataLoader(a, batch_size,
                                                  shuffle=True, num_workers=4))
コード例 #4
0
ファイル: train_ba.py プロジェクト: hyb1234hi/emci
            (0.15, 0.1),
        ),  # idx=list(range(51, 66))),
        shape=(256, 256),
        flip=True,
        # ldmk_ids=list(range(51, 66))
    )
    batch_iterator = iter(
        DataLoader(a, batch_size=args.batch_size, shuffle=True, num_workers=4))

    criterion = WingLoss(10, 2)
    # optimizer = optim.Adam(net.parameters(), lr=args.lr, weight_decay=args.weight_decay)
    optimizer = optim.SGD(net.parameters(),
                          lr=args.lr,
                          weight_decay=args.weight_decay,
                          momentum=0.9)
    saver = Saver('../ckpt', 'model', 10)
    last = saver.last_ckpt()
    start_iter = 0 if last is None else int(last.split('.')[0].split('-')[-1])
    if start_iter > 0:
        saver.load(net, last)
    running_loss = 0.0
    batch_size = args.batch_size
    epoch_size = len(a) // batch_size
    epoch = start_iter // epoch_size + 1
    for iteration in range(start_iter, 300001):
        if iteration % epoch_size == 0:
            # create batch iterator
            batch_iterator = iter(
                DataLoader(a, batch_size, shuffle=True, num_workers=4))
            epoch += 1
コード例 #5
0
    '/data/icme/data/picture',
    '/data/icme/data/landmark',
    '/data/icme/data/pred_landmark',
    '/data/icme/valid',
    Align(
        '../cache/mean_landmarks.pkl',
        (256, 256),
        (0.15, 0.1),
    ),  # idx=list(range(51, 66))),
    phase='eval',
    shape=(256, 256),
    # ldmk_ids=list(range(51, 66))
)
batch_iterator = iter(DataLoader(a, batch_size=4, shuffle=True, num_workers=4))
#Saver.dir=PATH
saver = Saver('../ckpt', 'model')
current = None
net.eval()
batch_size = 4
epoch_size = len(a) // batch_size
writer = SummaryWriter('logs/BA/valid')
metrics = Metrics().add_nme(0.9).add_auc(decay=0.9).add_loss(decay=0.9)
while True:
    if current == saver.last_ckpt():
        time.sleep(1)
    else:
        last = saver.last_ckpt()
        current_iter = 0 if last is None else int(
            last.split('.')[0].split('-')[-1])
        while True:
            try:
コード例 #6
0
a = BBoxDataset('/data/icme/data/picture',
                    '/data/icme/data/landmark',
                    '/data/icme/bbox',
                    '/data/icme/valid', phase='eval')
# a = AlignDataset('/data/icme/data/picture',
#                  '/data/icme/data/landmark',
#                  '/data/icme/data/pred_landmark',
#                  '/data/icme/valid',
#                  Align('../cache/mean_landmarks.pkl', (224, 224), (0.2, 0.1),
#                        ), # idx=list(range(51, 66))),
#                  phase='eval',
#                  # ldmk_ids=list(range(51, 66))
#                  )
batch_iterator = iter(DataLoader(a, batch_size=4, shuffle=True, num_workers=4))
#Saver.dir=PATH
saver = Saver(os.path.join(cfg.root, 'ckpt'), 'model')
saver2 = Saver(os.path.join(cfg.root, 'snapshot'), 'model', max_keep=10)
current = None
net.eval()
batch_size = 4
epoch_size = len(a) // batch_size
writer = SummaryWriter(os.path.join(cfg.root, 'logs/valid'))
metrics = Metrics().add_nme(0.9).add_auc(decay=0.9).add_loss(decay=0.9)
while True:
    if current == saver.last_ckpt():
        time.sleep(1)
    else:
        last = saver.last_ckpt()
        current_iter = 0 if last is None else int(last.split('.')[0].split('-')[-1])
        while True:
            try: