Esempio n. 1
0
def lrcn_test(num_classes, time_steps, parameter):
    model = lrcn(num_classes, time_steps)
    print(model)
    if parameter:
        for name, param in model.named_parameters():
            if param.requires_grad:
                print('gradient' + name, end="")
                print(":", param.numel())
            else:
                print(name, end="")
                print(":", param.numel())
    total_params = sum(param.numel() for param in model.parameters())
    print("total_params:", total_params)
    print(type(model))
Esempio n. 2
0
data = np.array([i[1] for i in train_data]).reshape(-1,timesteps,WIDTH,HEIGHT,3)
label = np.array([i[2] for i in train_data]).reshape(-1,timesteps,7)

#data2 - np.
X_train=data[0:2500,:]
y_train=label[0:2500]

#y_train = to_categorical(y_train)

X_test=data[2500:,:]
y_test=label[2500:]

#y_test = to_categorical(y_test)

model = lrcn(WIDTH, HEIGHT, 1, LR, output=7, model_name=MODEL_NAME)

model.summary()

#print(X_train.shape[1:])
##model=Sequential();                          

##model.add(TimeDistributed(Convolution2D(32, (3,3), strides =  (3,3), border_mode='same', input_shape=X_train.shape[1:])))
##model.add(TimeDistributed(Activation('relu')))
##model.add(TimeDistributed(Convolution2D(32, (3,3), strides = (3,3), activation = 'relu')))
##model.add(TimeDistributed(Activation('relu')))
##model.add(TimeDistributed(MaxPooling2D(pool_size=(2, 2))))
##model.add(TimeDistributed(Dropout(0.25)))

##model.add(TimeDistributed(Flatten()))
##model.add(TimeDistributed(Dense(512)))
Esempio n. 3
0
def main(args, result_dir, seq, flag_it, image_threshold, label_threshold,
         verbose, gpu):
    print('loading best model and test_dataloader')
    file_path = Path(__file__)
    outputs_path = (file_path / '..' / '..' / 'outputs').resolve()
    output_path = outputs_path / result_dir
    best_model_paths = glob.glob(str(output_path / 'best_model*'))
    best_model_path = sorted(best_model_paths,
                             key=lambda x: int(x.split('epoch')[1]))[-1]
    num_classes_path = str(output_path / 'num_classes.joblib')
    label_data_path = str(output_path / 'label_data.joblib')
    with open(label_data_path, mode="rb") as f:
        label_data = joblib.load(f)
    with open(num_classes_path, mode="rb") as f:
        num_classes = joblib.load(f)
    if 'mycnn' in result_dir:
        model = mycnn(num_classes)
        model.load_state_dict(torch.load(best_model_path))
    elif 'densenet121_e' in result_dir:
        model = densenet_121(num_classes, expansion=True)
        model.load_state_dict(torch.load(best_model_path))
    elif 'densenet121' in result_dir:
        model = densenet_121(num_classes)
        model.load_state_dict(torch.load(best_model_path))
    elif 'densenet161_e' in result_dir:
        model = densenet_161(num_classes, expansion=True)
        model.load_state_dict(torch.load(best_model_path))
    elif 'densenet161' in result_dir:
        model = densenet_161(num_classes)
        model.load_state_dict(torch.load(best_model_path))
    elif 'resnet3d_e_m' in result_dir:
        model = resnet3d(num_classes, expansion=True, maxpool=True)
        model.load_state_dict(torch.load(best_model_path))
    elif 'resnet3d_m' in result_dir:
        model = resnet3d(num_classes, expansion=False, maxpool=True)
        model.load_state_dict(torch.load(best_model_path))
    elif 'resnet3d_e' in result_dir:
        model = resnet3d(num_classes, expansion=True)
        model.load_state_dict(torch.load(best_model_path))
    elif 'resnet3d' in result_dir:
        model = resnet3d(num_classes)
        model.load_state_dict(torch.load(best_model_path))
    elif 'lrcn' in result_dir:
        image_num_limit_path = str(output_path / 'image_num_limit.joblib')
        with open(image_num_limit_path, mode="rb") as f:
            image_num_limit = joblib.load(f)
        model = lrcn(num_classes, image_num_limit)
        model.load_state_dict(torch.load(best_model_path))
    else:
        exit(1)
    test_dataloader_path = str(output_path / 'test_dataloader.joblib')
    with open(test_dataloader_path, mode="rb") as f:
        test_dataloader = joblib.load(f)

    print("cuda settings")
    if gpu:
        device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
    else:
        device = torch.device("cpu")
    model = model.to(device)

    print('start testing')
    if flag_it:
        test_output_path = outputs_path / ('test-' + result_dir + '_ith-' +
                                           str(image_threshold) + '_lth-' +
                                           str(label_threshold))
    else:
        test_output_path = outputs_path / ('test-' + result_dir + '_ith-' +
                                           'none' + '_lth-' +
                                           str(label_threshold))
    output_dir_index = 0
    while os.path.isdir(test_output_path):
        output_dir_index += 1
        if flag_it:
            test_output_path = outputs_path / (
                'test-' + result_dir + '_ith-' + str(image_threshold) +
                '_lth-' + str(label_threshold) + '-' + str(output_dir_index))
        else:
            test_output_path = outputs_path / (
                'test-' + result_dir + '_ith-' + 'none' + '_lth-' +
                str(label_threshold) + '-' + str(output_dir_index))
    os.makedirs(test_output_path, exist_ok=True)
    test_result_txt_path = str(test_output_path / 'test_result.txt')
    test_result_dict_path = str(test_output_path / 'test_result.joblib')
    test_model(model=model,
               test_dataloader=test_dataloader,
               num_classes=num_classes,
               label_data=label_data,
               seq=seq,
               flag_it=flag_it,
               image_threshold=image_threshold,
               label_threshold=label_threshold,
               verbose=verbose,
               device=device,
               output_path=output_path,
               test_result_txt_path=test_result_txt_path,
               test_result_dict_path=test_result_dict_path)
Esempio n. 4
0
    pyautogui.hotkey('d')
    
def strafe_right():
    pyautogui.hotkey('c')
    
def strafe_left():
    pyautogui.hotkey('z')
    
def no_keys():
	a = 1





model = lrcn(WIDTH, HEIGHT, 1, LR, output=7, model_name = 'our')
MODEL_NAME = 'bebop_model.h5'

#model.load(MODEL_NAME)

model = load_model(MODEL_NAME)
            
print('We have loaded a previous model!!!!')

def main():

    new_hook=pyxhook.HookManager()
    new_hook.KeyDown=OnKeyPress
    new_hook.HookKeyboard()
    new_hook.start()
    screen = test()
Esempio n. 5
0
def main(args, epoch, result_dir, seq):
    print('loading last epoch model and dataloaders')
    file_path = Path(__file__)
    outputs_path = (file_path / '..' / '..' / 'outputs').resolve()
    output_path = outputs_path / result_dir
    last_epoch_model_path = glob.glob(str(output_path / 'model-epoch*'))[0]
    last_epoch = int(last_epoch_model_path.split('epoch')[1])
    best_model_paths = glob.glob(str(output_path / 'best_model*'))
    best_model_path = sorted(best_model_paths, key=lambda x: int(x.split('epoch')[1]))[-1]
    best_epoch = int(best_model_path.split('epoch')[1])
    num_classes_path = str(output_path / 'num_classes.joblib')
    with open(num_classes_path, mode="rb") as f:
        num_classes = joblib.load(f)
    if 'mycnn' in result_dir:
        model = mycnn(num_classes)
        model.load_state_dict(torch.load(last_epoch_model_path))
    elif 'densenet121_e' in result_dir:
        model = densenet_121(num_classes, expansion=True)
        model.load_state_dict(torch.load(last_epoch_model_path))
    elif 'densenet121' in result_dir:
        model = densenet_121(num_classes)
        model.load_state_dict(torch.load(last_epoch_model_path))
    elif 'densenet161_e' in result_dir:
        model = densenet_161(num_classes, expansion=True)
        model.load_state_dict(torch.load(last_epoch_model_path))
    elif 'densenet161' in result_dir:
        model = densenet_161(num_classes)
        model.load_state_dict(torch.load(last_epoch_model_path))
    elif 'resnet3d_e_m' in result_dir:
        model = resnet3d(num_classes, expansion=True, maxpool=True)
        model.load_state_dict(torch.load(last_epoch_model_path))
    elif 'resnet3d_m' in result_dir:
        model = resnet3d(num_classes, expansion=False, maxpool=True)
        model.load_state_dict(torch.load(last_epoch_model_path))
    elif 'resnet3d_e' in result_dir:
        model = resnet3d(num_classes, expansion=True)
        model.load_state_dict(torch.load(last_epoch_model_path))
    elif 'resnet3d' in result_dir:
        model = resnet3d(num_classes)
        model.load_state_dict(torch.load(last_epoch_model_path))
    elif 'lrcn' in result_dir:
        image_num_limit_path = str(output_path / 'image_num_limit.joblib')
        with open(image_num_limit_path, mode="rb") as f:
            image_num_limit = joblib.load(f)
        model = lrcn(num_classes, image_num_limit)
        model.load_state_dict(torch.load(last_epoch_model_path))
    else:
        exit(1)
    dataloaders_path = str(output_path / 'dataloaders.joblib')
    with open(dataloaders_path, mode="rb") as f:
        dataloaders = joblib.load(f)

    print("cuda settings")
    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
    model = model.to(device)

    print('optimizer and criterion setting')
    optimizer = optim.Adam(model.parameters())
    weight = torch.ones([num_classes]).to(device)
    criterion = nn.BCEWithLogitsLoss(weight=weight)

    print('start retraining')
    train_result_txt_path = str(output_path / 'train_result.txt')
    train_result_dict_path = str(output_path / 'train_result.joblib')
    model, hist = retrain_model(model=model, dataloaders=dataloaders, criterion=criterion, optimizer=optimizer,
                                num_classes=num_classes, num_epochs=epoch, last_epoch=last_epoch, best_epoch=best_epoch,
                                device=device, output_path=output_path,
                                train_result_txt_path=train_result_txt_path,
                                train_result_dict_path=train_result_dict_path)
    print(hist)
Esempio n. 6
0
def main(args,
         batch_size,
         epoch,
         dataset_name,
         model_name,
         seq=False,
         image_num_limit=60):
    print('loading dataset')
    file_path = Path(__file__)
    datasets_path = (file_path / '..' / '..' / 'datasets').resolve()
    dataset_path = datasets_path / dataset_name

    outputs_path = (file_path / '..' / '..' / 'outputs').resolve()
    output_dir = model_name + '-' + dataset_name
    output_path = outputs_path / output_dir
    output_dir_index = 0
    while os.path.isdir(output_path):
        output_dir_index += 1
        output_path = outputs_path / (output_dir + '-' + str(output_dir_index))
    os.makedirs(output_path, exist_ok=True)
    if not os.path.isdir(dataset_path):
        exit(1)

    shutil.copy(str(dataset_path / 'label_data.joblib'),
                str(output_path / 'label_data.joblib'))

    dataset_file = str(dataset_path / 'image_label.joblib')
    num_classes_path = str(dataset_path / 'num_classes.joblib')
    with open(num_classes_path, mode="rb") as f:
        num_classes = joblib.load(f)
    print('class num: ', num_classes)
    num_classes_path = str(output_path / 'num_classes.joblib')
    with open(num_classes_path, mode="wb") as f:
        joblib.dump(num_classes, f, compress=3)

    print('creating dataloaders')
    dataloaders_path = str(output_path / 'dataloaders.joblib')
    test_dataloader_path = str(output_path / 'test_dataloader.joblib')

    if seq:
        if 'resnet3d' in model_name:
            channel_first = True
            transform = transforms.Compose(
                [transforms.Resize((128, 128)),
                 transforms.ToTensor()])
            transform_3d = transforms.Compose([
                Normalize(mean=[0.485, 0.456, 0.406],
                          std=[0.229, 0.224, 0.225])
            ])
        elif model_name == 'lrcn':
            channel_first = False
            transform = transforms.Compose(
                [transforms.Resize((256, 256)),
                 transforms.ToTensor()])
            transform_3d = None
        dataloaders = dataloader(dataset_file=dataset_file,
                                 dataloaders_path=dataloaders_path,
                                 test_dataloader_path=test_dataloader_path,
                                 transform=transform,
                                 transform_3d=transform_3d,
                                 batch_size=batch_size,
                                 image_num_limit=image_num_limit,
                                 seq=seq,
                                 channel_first=channel_first)
    else:
        transform = transforms.Compose([
            transforms.Resize((256, 256)),
            transforms.ToTensor(),
            transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                 std=[0.229, 0.224, 0.225])
        ])
        # limit = 50000
        dataloaders = dataloader(dataset_file=dataset_file,
                                 dataloaders_path=dataloaders_path,
                                 test_dataloader_path=test_dataloader_path,
                                 transform=transform,
                                 batch_size=batch_size,
                                 seq=seq)
    print("creating model")
    model = None
    if model_name == 'mycnn':
        model = mycnn(num_classes)
    elif model_name == 'densenet_121':
        model = densenet_121(num_classes)
    elif model_name == 'densenet_121_expansion':
        model = densenet_121(num_classes, expansion=True)
    elif model_name == 'densenet_169':
        model = densenet_169(num_classes)
    elif model_name == 'densenet_169_expansion':
        model = densenet_169(num_classes, expansion=True)
    elif model_name == 'densenet_201':
        model = densenet_201(num_classes)
    elif model_name == 'densenet_201_expansion':
        model = densenet_201(num_classes, expansion=True)
    elif model_name == 'densenet_161':
        model = densenet_161(num_classes)
    elif model_name == 'densenet_161_expansion':
        model = densenet_161(num_classes, expansion=True)
    elif model_name == 'squeezenet':
        model = squeezenet(num_classes)
    elif model_name == 'resnet3d':
        model = resnet3d(num_classes)
    elif model_name == 'resnet3d_expansion':
        model = resnet3d(num_classes, expansion=True)
    elif model_name == 'resnet3d_maxpool':
        model = resnet3d(num_classes, expansion=False, maxpool=True)
    elif model_name == 'resnet3d_maxpool_expansion':
        model = resnet3d(num_classes, expansion=True, maxpool=True)
    elif model_name == 'lrcn':
        model = lrcn(num_classes, image_num_limit)
        image_num_limit_path = str(output_path / 'image_num_limit.joblib')
        with open(image_num_limit_path, mode="wb") as f:
            joblib.dump(image_num_limit, f, compress=3)
    else:
        exit(1)

    print("cuda settings")
    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
    model = model.to(device)

    print('optimizer and criterion setting')
    # optimizer = optim.SGD(model.parameters(), lr=0.01, momentum=0.9)
    optimizer = optim.Adam(model.parameters())
    # criterion = nn.MultiLabelSoftMarginLoss()
    weight = torch.ones([num_classes]).to(device)
    criterion = nn.BCEWithLogitsLoss(weight=weight)

    print('description setting')
    train_description_path = str(output_path / 'train_description.txt')
    with open(train_description_path, mode="w") as f:
        pprint(str(args), stream=f)
        pprint(model_name, stream=f)
        pprint(dataset_name, stream=f)
        pprint('batch_size: ' + str(batch_size), stream=f)
        pprint('epoch: ' + str(epoch), stream=f)

        pprint('transform', stream=f)
        pprint(transform, stream=f)

        pprint('criterion', stream=f)
        pprint(criterion, stream=f)

        pprint('optimizer', stream=f)
        pprint(optimizer, stream=f)

    print('start training')
    train_result_txt_path = str(output_path / 'train_result.txt')
    train_result_dict_path = str(output_path / 'train_result.joblib')
    model, hist = train_model(model=model,
                              dataloaders=dataloaders,
                              criterion=criterion,
                              optimizer=optimizer,
                              num_classes=num_classes,
                              num_epochs=epoch,
                              device=device,
                              output_path=output_path,
                              train_result_txt_path=train_result_txt_path,
                              train_result_dict_path=train_result_dict_path)
    print(hist)