コード例 #1
0
ファイル: getModulation.py プロジェクト: FPAR-NET/FPAR
model.load_state_dict(torch.load(model_state_dict), strict=True)

model.train(False)
for params in model.parameters():
    params.requires_grad = False

model = model.to(DEVICE)

normalize = transforms.Normalize(
    mean=[0.485, 0.456, 0.406],
    std=[0.229, 0.224, 0.225]
)

preprocess = transforms.Compose([
    transforms.Scale(256),
    transforms.CenterCrop(224),
    transforms.ToTensor(),
    normalize])

in_path = "/content/drive/My Drive/Lorenzo/ego-rnn-two-in-one/get_modulation/frames"
out_path = "/content/drive/My Drive/Lorenzo/ego-rnn-two-in-one/get_modulation/out2"

inputFlow = []
inputFrame = []

frames = []
for i in range(1, len(os.listdir(in_path+"/rgb"))+1):
    print(i)

    # FLOW X
    fl_name = in_path + '/X/flow_x_' + str(int(round(i))).zfill(5) + '.png'  # zfill used to add leading zeros
コード例 #2
0
        if args.view == 'front_depth' or args.view == 'front_IR':

            spatial_transform = spatial_transforms.Compose([
                crop_method,
                spatial_transforms.RandomRotate(),
                spatial_transforms.SaltImage(),
                spatial_transforms.Dropout(),
                spatial_transforms.ToTensor(args.norm_value),
                spatial_transforms.Normalize([0], [1])
            ])
        elif args.view == 'top_depth' or args.view == 'top_IR':
            spatial_transform = spatial_transforms.Compose([
                spatial_transforms.RandomHorizontalFlip(),
                spatial_transforms.Scale(args.sample_size),
                spatial_transforms.CenterCrop(args.sample_size),
                spatial_transforms.RandomRotate(),
                spatial_transforms.SaltImage(),
                spatial_transforms.Dropout(),
                spatial_transforms.ToTensor(args.norm_value),
                spatial_transforms.Normalize([0], [1])
            ])

        print(
            "=================================Loading Anormal-Driving Training Data!================================="
        )
        training_anormal_data = DAD(root_path=args.root_path,
                                    subset='train',
                                    view=args.view,
                                    sample_duration=before_crop_duration,
                                    type='anormal',
コード例 #3
0
ファイル: train.py プロジェクト: zlw9161/cpnet
                                                  std=get_std())
train_transform = spatial_transforms.Compose([
    spatial_transforms.RandomResizedCrop(size=(WIDTH, WIDTH),
                                         scale=(0.5, 1.0),
                                         ratio=(1. - 0.1, 1. + 0.1)),
    # spatial_transforms.RandomHorizontalFlip(),
    spatial_transforms.ColorJitter(brightness=0.25,
                                   contrast=0.25,
                                   saturation=0.25,
                                   hue=0.1),
    normalize
])
# validation transform
val_transform = spatial_transforms.Compose([
    # spatial_transforms.Resize(256),
    spatial_transforms.CenterCrop(WIDTH),
    normalize
])
target_transform = target_transforms.ClassLabel()

train_loader, val_loader = dataloader.get_loader(
    root=DATA,
    train_transform=train_transform,
    val_transform=val_transform,
    target_transform=target_transform,
    batch_size=BATCH_SIZE,
    num_frames=NUM_FRAMES,
    step_size=FRAME_STEP,
    val_samples=1,
    n_threads=NUM_THREADS)
DECAY_STEP = EPOCH_DECAY_STEP * len(train_loader)
コード例 #4
0
ファイル: evaluate.py プロジェクト: zlw9161/cpnet
os.system('cp %s %s' % (MODEL_FILE, DUMP_DIR))  # bkp of model def
os.system('cp utils/net_utils.py %s ' % (DUMP_DIR))  # bkp of net_utils file
LOG_FOUT = open(os.path.join(DUMP_DIR, 'log_evaluate.txt'), 'w')
LOG_FOUT.write(str(FLAGS) + '\n')

NUM_CLASSES = FLAGS.num_classes

HOSTNAME = socket.gethostname()

# validation transform
normalize = spatial_transforms.ToNormalizedTensor(mean=get_mean(),
                                                  std=get_std())
if FCN == 0:
    val_transform = spatial_transforms.Compose([
        spatial_transforms.Resize(FULL_SIZE),
        spatial_transforms.CenterCrop(WIDTH), normalize
    ])
elif FCN == 1:
    val_transform = spatial_transforms.Compose([
        spatial_transforms.Resize(FULL_SIZE),
        spatial_transforms.CenterCrop(WIDTH), normalize
    ])
elif FCN == 3:
    val_transform = spatial_transforms.Compose(
        [spatial_transforms.Resize(FULL_SIZE), normalize])
elif FCN == 5:
    val_transform = spatial_transforms.Compose(
        [spatial_transforms.Resize(FULL_SIZE), normalize])
elif FCN == 6:
    val_transform = spatial_transforms.Compose(
        [spatial_transforms.Resize(FULL_SIZE), normalize])
コード例 #5
0
normal_vec_top_ir = np.load('./normvec/normal_vec_top_ir.npy')

normal_vec_front_d = torch.from_numpy(normal_vec_front_d)
normal_vec_front_ir = torch.from_numpy(normal_vec_front_ir)
normal_vec_top_d = torch.from_numpy(normal_vec_top_d)
normal_vec_top_ir = torch.from_numpy(normal_vec_top_ir)

if use_cuda:
    normal_vec_front_d = normal_vec_front_d.cuda()
    normal_vec_front_ir = normal_vec_front_ir.cuda()
    normal_vec_top_d = normal_vec_top_d.cuda()
    normal_vec_top_ir = normal_vec_top_ir.cuda()

val_spatial_transform = spatial_transforms.Compose([
    spatial_transforms.Scale(sample_size),
    spatial_transforms.CenterCrop(sample_size),
    spatial_transforms.ToTensor(255),
    spatial_transforms.Normalize([0], [1]),
])

print(
    "===========================================Loading Test Data=========================================="
)

test_data_front_d = DAD_Test(
    root_path=root_path,
    subset='validation',
    view='front_depth',
    sample_duration=sample_duration,
    type=None,
    spatial_transform=val_spatial_transform,