Esempio n. 1
0
    # Create a phong renderer by composing a rasterizer and a shader. The textured phong shader will
    # interpolate the texture uv coordinates for each vertex, sample from a texture image and
    # apply the Phong lighting model
    renderer = MeshRenderer(rasterizer=MeshRasterizer(
        cameras=cameras, raster_settings=raster_settings),
                            shader=SoftPhongShader(device=device,
                                                   cameras=cameras,
                                                   lights=lights))

    img = renderer(mesh)
    plt.figure(figsize=(10, 10))
    plt.imshow(img[0].cpu().numpy())
    plt.show()


if __name__ == '__main__':
    file = Path(
        "/home/lukas/rtm/rtm_files_3d/2020-08-24_11-20-27_111_RESULT.erfh5")
    from Pipeline.data_loader_mesh import DataLoaderMesh
    sensor_verts_path = Path(
        "/home/lukas/rtm/sensor_verts_3d_272_subsampled.dump")
    dl = DataLoaderMesh(sensor_verts_path=sensor_verts_path)
    data = dl.get_sensor_flowfront_mesh(file)
    sample = data[150][1]
    mc = MeshCreator(file)
    verts, faces, _ = mc.get_mesh_components()

    show_vedo_mesh_old(verts, faces, sample)
    # save_p3d_mesh(verts, faces, sample)
    pass
Esempio n. 2
0
        num_test_samples = 5000
        data_root = Path(base_path / "rtm_files")
        load_datasets_path = None
        cache_path = base_path / "cache"
    else:
        print("No valid configuration for this machine. Aborting...")
        exit()

    if "pop-os" in socket.gethostname() and debug:
        filepaths = [base_path / "debug"]
        batch_size = 4
        num_validation_samples = 4
        num_test_samples = 4
        data_root = Path(base_path / "debug")

    dlm = DataLoaderMesh(sensor_indices=((1, 2), (1, 2)))
    mc = MeshCreator(batch_size)
    mesh = mc.batched_mesh_torch(batch_size)
    model = SensorMeshToFlowFrontModel(mesh, batch_size=batch_size)

    m = ModelTrainer(
        lambda: model,
        data_source_paths=filepaths,
        save_path=save_path,
        cache_path=cache_path,
        batch_size=batch_size,
        train_print_frequency=train_print_frequency,
        epochs=epochs,
        dummy_epoch=True,
        num_workers=num_workers,
        num_validation_samples=num_validation_samples,
Esempio n. 3
0
        data_root = Path(base_path / "rtm_files_3d")
        load_datasets_path = None
        # cache_path = base_path / "cache"
        cache_path = None
    else:
        print("No valid configuration for this machine. Aborting...")
        exit()

    if "pop-os" in socket.gethostname() and debug:
        filepaths = [base_path / "debug"]
        batch_size = 4
        num_validation_samples = 4
        num_test_samples = 4
        data_root = Path(base_path / "debug")

    dlm = DataLoaderMesh(sensor_verts_path=sensor_verts_path)
    mc = MeshCreator(sample_file)
    mesh = mc.batched_mesh_dgl(batch_size)
    model = SensorMeshToFlowFrontModel(mesh, batch_size=batch_size)

    m = ModelTrainer(
        lambda: model,
        data_source_paths=filepaths,
        save_path=save_path,
        cache_path=cache_path,
        batch_size=batch_size,
        train_print_frequency=train_print_frequency,
        epochs=epochs,
        dummy_epoch=True,
        num_workers=num_workers,
        num_validation_samples=num_validation_samples,
        train_print_frequency = 50
        epochs = 20
        num_workers = 8
        num_validation_samples = 5000
        num_test_samples = 5000
        data_root = Path(base_path / "rtm_files")
        load_datasets_path = None
        cache_path = None
        weights_path = Path("/home/lukas/rtm/results/sensor2flow_2020-07-01_16-50-49/checkpoint.pth")
        dataset_split_path = Path("/home/lukas/rtm/results/sensor2dryspot/76%_2020-07-09_16-28-35")
        checkpoint_path = dataset_split_path / "checkpoint.pth"
    else:
        print("No valid configuration for this machine. Aborting...")
        exit()

    dlm = DataLoaderMesh(sensor_verts_path=sensor_verts_path)
    mesh = dlm.get_batched_mesh_torch(batch_size, sample_file)
    model = SensorMeshToDryspotResnet(mesh, batch_size=batch_size, weights_path=weights_path)

    m = ModelTrainer(
        lambda: model,
        data_source_paths=filepaths,
        dataset_split_path=dataset_split_path,
        save_path=save_path,
        cache_path=cache_path,
        batch_size=batch_size,
        train_print_frequency=train_print_frequency,
        epochs=epochs,
        dummy_epoch=True,
        num_workers=num_workers,
        num_validation_samples=num_validation_samples,
        data_root = Path(base_path / "rtm_files")
        load_datasets_path = None
        cache_path = base_path / "cache"
    else:
        print("No valid configuration for this machine. Aborting...")
        exit()

    if "pop-os" in socket.gethostname() and debug:
        filepaths = [base_path / "debug"]
        batch_size = 4
        num_validation_samples = 4
        num_test_samples = 4
        data_root = Path(base_path / "debug")

    dlm = DataLoaderMesh(sensor_indices=((1, 4), (1, 4)),
                         third_dim=False,
                         intermediate_target_size=(38, 30, 2))
    mc = MeshCreator(sample_file)
    mesh = mc.batched_mesh_dgl(batch_size)
    model = SensorMeshToFlowFrontModelDGL(mesh, batch_size=batch_size)

    m = ModelTrainer(
        lambda: model,
        data_source_paths=filepaths,
        save_path=save_path,
        cache_path=cache_path,
        batch_size=batch_size,
        train_print_frequency=train_print_frequency,
        epochs=epochs,
        dummy_epoch=False,
        num_workers=num_workers,
        x = torch.unsqueeze(x.view(self.batch_size, -1), dim=1)
        # x = self.avg_pool(x)
        x = self.adaptive_maxpool(x)
        x = x.view(size=(self.batch_size, self.bottleneck_dim, 168, 168))
        x = x.repeat(1, 3, 1, 1)
        x = self.upsample(x)

        x = torch.sigmoid(self.classifier(x))

        return x


if __name__ == '__main__':
    from Pipeline.data_loader_mesh import DataLoaderMesh
    from pathlib import Path
    dl = DataLoaderMesh(
        sensor_verts_path=Path("/home/lukas/rtm/sensor_verts.dump"))
    file = Path(
        "/home/lukas/rtm/rtm_files/2019-07-24_16-32-40_308_RESULT.erfh5")

    bs = 4

    mesh = dl.get_batched_mesh_torch(bs, file)
    # model = SensorMeshToFlowFrontModel(mesh)
    # model = SensorMeshToDryspotModel(mesh, bs).cuda()
    model = SensorMeshToDryspotResnet(mesh, bs).cuda()
    instances = dl.get_sensor_flowfront_mesh(file)
    data, labels = [], []
    batch = instances[0:bs]
    for d, l in batch:
        data.append(d)
        labels.append(l)
Esempio n. 7
0
        data_root = Path(base_path / "rtm_files_3d")
        load_datasets_path = None
        cache_path = base_path / "cache"
    else:
        print("No valid configuration for this machine. Aborting...")
        exit()

    if "pop-os" in socket.gethostname() and debug:
        filepaths = [base_path / "debug"]
        batch_size = 4
        num_validation_samples = 4
        num_test_samples = 4
        data_root = Path(base_path / "debug")

    # Sensorgrid: 17*16 = 272
    dlm = DataLoaderMesh(sensor_indices=((1, 4), (1, 4)), sensor_verts_path=sensor_verts_path)

    mc = MeshCreator(sample_file)
    # mesh = mc.batched_mesh_dgl(batch_size)
    mesh = mc.subsampled_batched_mesh_dgl(batch_size, faces_percentage=0.8)
    subsampled_nodes = None

    model = SparseSensorMeshToFlowFrontModelDGL(mesh, batch_size=batch_size)
    # model = SensorMeshToFlowFrontModelDGL(mesh, batch_size=batch_size)

    m = ModelTrainer(
        lambda: model,
        data_source_paths=filepaths,
        save_path=save_path,
        cache_path=cache_path,
        batch_size=batch_size,