def get_base_dataset(num_examples=100, **kwargs): # return tf.data.Dataset.from_tensor_slices( # get_base_data(num_exmaples=num_examples, **kwargs)) from deep_cloud.problems.modelnet import ModelnetProblem from deep_cloud.problems.builders import pointnet_builder problem = ModelnetProblem(builder=pointnet_builder(2), positions_only=False) with gin.config_scope('train'): dataset = problem.get_base_dataset('validation').map(augment_cloud) return dataset
def plot_ks(): builder = pointnet_builder(pointnet_version=2) means = [] maxs = [] radii = np.linspace(2, 10, 51) total = 100 for features, _ in tqdm(tfds.as_numpy( builder.as_dataset(split='train', as_supervised=True).take(total)), total=total): ks = get_ks(features['positions'], radii) means.append(np.mean(ks, axis=1)) maxs.append(np.max(ks, axis=-1)) ax = plt.gca() ax.plot(radii, np.mean(means, axis=0)) ax.plot(radii, np.mean(maxs, axis=0)) ax.legend(['mean', 'max']) ax.set_yscale('log') ax.set_xscale('log') plt.show()
# print('done') # exit() # coords = np.stack([base_coords] * 2, axis=0) # coords[1, :, 2] = 1 # normals = np.stack([base_normals] * 2, axis=0) # dataset = tf.data.Dataset.from_tensor_slices((dict(positions=coords, # normals=normals), [0, # 0])) from deep_cloud.problems.modelnet import ModelnetProblem from deep_cloud.problems.builders import pointnet_builder import tensorflow_datasets as tfds from time import time from tqdm import tqdm problem = ModelnetProblem(builder=pointnet_builder(2), positions_only=False) dataset = problem.get_base_dataset(split='validation') num_examples = 100 batch_size = 2 def profile(edge_fn, depth=4, num_examples=10): times = [] for example, _ in tqdm(tfds.as_numpy(dataset.take(num_examples)), total=num_examples): start = time() edge_fn(example['positions'], example['normals'], depth=depth) times.append(time() - start) return np.array(times) # t_base = profile(compute_edges_eager, num_examples=num_examples)
from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow_datasets as tfds from deep_cloud.problems.builders import pointnet_builder from deep_cloud.problems.modelnet import ModelnetProblem from scipy.spatial import cKDTree # pylint: disable=no-name-in-module import tqdm import trimesh builder = pointnet_builder(2, uniform_density=False) split = 'train' problem = ModelnetProblem(builder, positions_only=False) # dataset = builder.as_dataset(split=split, as_supervised=True) dataset = problem.get_base_dataset(split=split) # all_labels = [] # for data, label in tfds.as_numpy(dataset): # all_labels.append(label) # labels = np.array(all_labels) # minval = np.min(labels) # maxval = np.max(labels) # print(minval, maxval) # import matplotlib.pyplot as plt # plt.hist(labels, normed=True, bins=40) # plt.show() k = 10
def profile_multi(radius=4, leafsize=4): builder = pointnet_builder(pointnet_version=2) for features, _ in tfds.as_numpy( builder.as_dataset(split='train', as_supervised=True)): profile(features['positions'], radius=radius, leafsize=leafsize)
def multi_full_run(num_runs=10): builder = pointnet_builder(pointnet_version=2) for features, _ in tfds.as_numpy( builder.as_dataset(split='train', as_supervised=True).take(num_runs)): full_run(features['positions'])
from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow_datasets as tfds from deep_cloud.problems.builders import pointnet_builder from scipy.spatial import cKDTree as KDTree # pylint: disable=no-name-in-module import matplotlib.pyplot as plt builder = pointnet_builder(pointnet_version=2) PACKING_RATIOS = { 2: np.pi * np.sqrt(3) / 6, # https://en.wikipedia.org/wiki/Circle_packing 3: np.pi / (3 * np.sqrt(2) ) # https://en.wikipedia.org/wiki/Close-packing_of_equal_spheres } def approx_max_neighbors(radius, num_dims): return PACKING_RATIOS[num_dims] * (1 + radius)**num_dims def closest_neighbors(tree): if isinstance(tree, np.ndarray): tree = KDTree(tree) distances, indices = tree.query(tree.data, 2) del indices return np.min(distances[:, 1])
import tensorflow as tf from deep_cloud.problems.modelnet import ModelnetProblem from deep_cloud.problems.builders import pointnet_builder from deep_cloud.models.pointnet3 import multi_scale_group from deep_cloud.models.pointnet3 import pre_batch_map, post_batch_map from deep_cloud.ops.np_utils import tree_utils from deep_cloud.augment import augment_cloud import functools from tqdm import tqdm from time import time tf.compat.v1.enable_eager_execution() N = 500 batch_size = 16 problem = ModelnetProblem(builder=pointnet_builder(2), positions_only=False) map_fn = functools.partial(augment_cloud, angle_stddev=0.06, angle_clip=0.18, uniform_scale_range=(0.8, 1.25), rotate_scheme='random', jitter_stddev=1e-2, jitter_clip=5e-2) # map2 = pre_batch_map map2 = functools.partial(pre_batch_map, radii_lists=((0.1, ), (0.2, )), max_neighbors_lists=((16, ), (32, ))) with tf.device('/cpu:0'):