Example #1
0
    print("CUDA won't be used")

data_path = sys.argv[1]
models_rep = sys.argv[2]

BATCH_SIZE = 100
HEIGHT = 80
WIDTH = 80

print(f"Loading dataset from {data_path}...")
ds = CustomDataSetSimple(nb_digit=5, nb_samples=100)
print(f"...dataset loaded")
dataloader = DataLoader(ds, batch_size=BATCH_SIZE, shuffle=False)
model = to_best_device(SimpleModel())

do_load_model(models_rep, model, exit_on_error=True)

model.eval()
start = time.time()
results = []


def from_target_labels(target: torch.Tensor) -> str:
    """

    :param target: tensor of shape (n) with n being the length of the sequence
    and each element containing the index of one of the character
    :return: a trimmed string containing only relevant characters
    """
    return ''.join([str(i) for i in target.cpu().numpy().astype(int)])
Example #2
0
print(f"Loading dataset ...")
ds = CustomRawDataSet(root_dir=data_path)
len_train = int(len(ds) * 0.8)
train_set, val_set = torch.utils.data.random_split(ds, [len_train, len(ds) - len_train])
#imshow(train_set[5][0])
#exit()
print(f"...dataset loaded")
dataloader = DataLoader(train_set, batch_size=BATCH_SIZE, shuffle=True)
dataloader_val = DataLoader(val_set, batch_size=BATCH_SIZE, shuffle=False)
model = to_best_device(CRNN())
best_model = to_best_device(CRNN())


if load_model:
    if not do_load_model(models_rep, model):
        model.initialize_weights()
else:
    if not os.path.exists(models_rep):
        os.makedirs(models_rep)
    model.initialize_weights()

model.train()
loss = to_best_device(nn.CTCLoss(blank=0, zero_infinity=True, reduction="mean"))
#optimizer = torch.optim.AdamW(model.parameters(), lr=LEARNING_RATE)
#scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'min', cooldown=0, verbose=True, patience=10)
optimizer = torch.optim.Adadelta(model.parameters())
#scheduler = torch.optim.lr_scheduler.OneCycleLR(optimizer,
#                                          max_lr=MAX_LR,
#                                          steps_per_epoch=len(ds),
#                                          epochs=NUM_EPOCHS,