Пример #1
0
from learner import Learner
from sklearn import model_selection as ms
from classifier import Classifier

if __name__ == '__main__':
    device = torch.device("cpu")

    net = Classifier('tf_efficientnet_b4_ns', 5, pretrained=True)

    transform = T.Compose([
        T.ToTensor(),
        T.Resize((380, 380)),
        T.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225))
    ])

    data_root = "/home/namnd/personal-workspace/cassava-leaf-disease-classification"
    df = pd.read_csv(os.path.join(data_root, 'train.csv'))
    # train_df, val_df = ms.train_test_split(df, test_size=0.2, random_state=42, stratify=df.label.values)
    #
    # train_dataset = CassavaLeafDiseaseDataset(data_root, df=train_df, transform=transform)
    # val_dataset = CassavaLeafDiseaseDataset(data_root, df=val_df, transform=transform)
    #
    # train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=8, shuffle=True, num_workers=mp.cpu_count())
    # val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=4, shuffle=False, num_workers=mp.cpu_count())

    dataloader = CassavaLeafDiseaseDataset(data_root, df, transform=transform)
    learner = Learner(net, dataloader, device)
    lr_finder = LRFinder(learner)
    lr_finder.find()
    lr_finder.plot()
Пример #2
0

#Model: define inputs and outputs
model = Model(inputs=[in_1, in_2, in_3], outputs=out_vals)  #probabilities)#
opt = optimizers.Adam(
    clipnorm=1.,
    lr=lrate)  #remove clipnorm and add loss penalty - clipnorm works better
model.compile(loss=bin_loss, optimizer=opt)

if find_lr == True:
    lr_finder = LRFinder(model)

    X_train = [X1_train, X2_train, X3_train]
    lr_finder.find(X_train,
                   y_train,
                   start_lr=0.00000001,
                   end_lr=1,
                   batch_size=batch_size,
                   epochs=1)
    losses = lr_finder.losses
    lrs = lr_finder.lrs
    l_l = np.asarray([lrs, losses])
    np.savetxt(out_dir + 'lrs_losses.txt', l_l)
    num_epochs = 0


#LR schedule
class LRschedule(Callback):
    '''lr scheduel according to one-cycle policy.
  '''
    def __init__(self, interval=1):
        super(Callback, self).__init__()