Ejemplo n.º 1
0
                                            adjacent_range=adjacent_range,
                                            transform=None,
                                            downsampling=input_downsampling,
                                            network_downsampling=network_downsampling,
                                            inlier_percentage=inlier_percentage,
                                            use_store_data=True,
                                            store_data_root=training_data_root,
                                            phase="validation", is_hsv=is_hsv,
                                            num_pre_workers=num_workers, visible_interval=30, rgb_mode="rgb")

    train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True,
                                               num_workers=num_workers)
    validation_loader = torch.utils.data.DataLoader(dataset=validation_dataset, batch_size=batch_size, shuffle=False,
                                                    num_workers=batch_size)

    depth_estimation_model_student = models.FCDenseNet57(n_classes=1)
    # Initialize the depth estimation network with Kaiming He initialization
    depth_estimation_model_student = utils.init_net(depth_estimation_model_student, type="kaiming", mode="fan_in",
                                                    activation_mode="relu",
                                                    distribution="normal")
    # Multi-GPU running
    depth_estimation_model_student = torch.nn.DataParallel(depth_estimation_model_student)
    # Summary network architecture
    if display_architecture:
        torchsummary.summary(depth_estimation_model_student, input_size=(3, height, width))
    # Optimizer
    optimizer = torch.optim.SGD(depth_estimation_model_student.parameters(), lr=max_lr, momentum=0.9)
    lr_scheduler = scheduler.CyclicLR(optimizer, base_lr=min_lr, max_lr=max_lr, step_size=num_iter)

    # Custom layers
    depth_scaling_layer = models.DepthScalingLayer(epsilon=depth_scaling_epsilon)
Ejemplo n.º 2
0
            downsampling=input_downsampling,
            network_downsampling=network_downsampling,
            inlier_percentage=inlier_percentage,
            use_store_data=load_intermediate_data,
            store_data_root=evaluation_data_root,
            phase="validation",
            is_hsv=is_hsv,
            num_pre_workers=num_pre_workers,
            visible_interval=visibility_overlap,
            rgb_mode="rgb")

        test_loader = torch.utils.data.DataLoader(dataset=test_dataset,
                                                  batch_size=batch_size,
                                                  shuffle=False,
                                                  num_workers=0)
        depth_estimation_model = models.FCDenseNet57(n_classes=1)
        # Initialize the depth estimation network with Kaiming He initialization
        utils.init_net(depth_estimation_model,
                       type="kaiming",
                       mode="fan_in",
                       activation_mode="relu",
                       distribution="normal")
        # Multi-GPU running
        depth_estimation_model = torch.nn.DataParallel(depth_estimation_model)
        # Summary network architecture
        if display_architecture:
            torchsummary.summary(depth_estimation_model,
                                 input_size=(3, height, width))

        # Load trained model
        if trained_model_path.exists():