Пример #1
0
def train(model_id, max_steps):
    import tensorflow as tf
    from template_ffd.model import get_builder
    tf.logging.set_verbosity(tf.logging.INFO)
    builder = get_builder(model_id)
    builder.initialize_variables()
    builder.train(max_steps=max_steps)
Пример #2
0
def print_template_scores(model_id, by_weight=False):
    builder = get_builder(model_id)
    template_ids = builder.template_ids
    n = len(template_ids)
    counts = np.zeros((n,), dtype=np.int32)
    totals = np.zeros((n,), dtype=np.float32)
    dataset = get_predictions_dataset(model_id)

    with dataset:
        for example_id in dataset:
            probs = np.array(dataset[example_id]['probs'])
            counts[np.argmax(probs)] += 1
            totals += probs

    if by_weight:
        zipped = list(zip(template_ids, range(n), totals))
        zipped.sort(key=lambda x: x[2], reverse=True)
        for rank, (k, i, p) in enumerate(zipped):
            print(rank, i, p, k)
        print([z[1] for z in zipped])
    else:
        zipped = list(zip(template_ids, range(n), counts))
        zipped.sort(key=lambda x: x[2], reverse=True)
        for rank, (k, i, p) in enumerate(zipped):
            print(rank, i, p, k)
        print([z[1] for z in zipped])
Пример #3
0
def get_data(model_id, example_ids=None):
    edge_length_threshold = 0.02
    builder = get_builder(model_id)
    cat_id = builder.cat_id

    with get_ffd_dataset(cat_id, edge_length_threshold=0.02) as ffd_ds:
        template_ids, bs, ps = zip(*builder.get_ffd_data(ffd_ds))

    with get_template_mesh_dataset(cat_id, edge_length_threshold) as mesh_ds:
        faces = [np.array(mesh_ds[e]['faces']) for e in template_ids]

    predictions_ds = get_predictions_dataset(model_id)
    mesh_ds = get_mesh_dataset(cat_id)
    image_ds = RenderConfig().get_dataset(cat_id, builder.view_index)
    zipped = Dataset.zip(predictions_ds, mesh_ds, image_ds)
    with zipped:
        if example_ids is None:
            example_ids = list(predictions_ds.keys())
            random.shuffle(example_ids)
        for example_id in example_ids:
            print(example_id)
            pred, mesh, image = zipped[example_id]
            i = np.argmax(pred['probs'])
            dp = np.array(pred['dp'][i])
            b = bs[i]
            p = ps[i]
            yield example_id, b, p, dp, faces[i], mesh, image
Пример #4
0
def print_template_scores(model_id, by_weight=False):
    builder = get_builder(model_id)
    template_ids = builder.template_ids
    n = len(template_ids)
    counts = np.zeros((n, ), dtype=np.int32)
    totals = np.zeros((n, ), dtype=np.float32)
    dataset = get_predictions_dataset(model_id)

    with dataset:
        for example_id in dataset:
            probs = np.array(dataset[example_id]['probs'])
            counts[np.argmax(probs)] += 1
            totals += probs

    if by_weight:
        zipped = list(zip(template_ids, range(n), totals))
        zipped.sort(key=lambda x: x[2], reverse=True)
        for rank, (k, i, p) in enumerate(zipped):
            print(rank, i, p, k)
        print([z[1] for z in zipped])
    else:
        zipped = list(zip(template_ids, range(n), counts))
        zipped.sort(key=lambda x: x[2], reverse=True)
        for rank, (k, i, p) in enumerate(zipped):
            print(rank, i, p, k)
        print([z[1] for z in zipped])
Пример #5
0
def get_template_counts(model_id):
    import tensorflow as tf
    import numpy as np
    print('Getting template counts for %s' % model_id)
    graph = tf.Graph()
    with graph.as_default():
        builder = get_builder(model_id)
        features, labels = builder.get_inputs(mode='train', repeat=False)
        spec = builder.get_estimator_spec(features, labels, mode='eval')
        predictions = spec.predictions
        probs = predictions['probs']
        counts = tf.argmax(probs, axis=-1)
        totals = np.zeros((builder.n_templates, ), dtype=np.int32)
        saver = tf.train.Saver()

        with tf.train.MonitoredSession() as sess:
            saver.restore(sess, tf.train.latest_checkpoint(builder.model_dir))
            spinner = Spinner()
            while not sess.should_stop():
                c = sess.run(counts)
                for ci in c:
                    totals[ci] += 1
                spinner.next()
                # break
            spinner.finish()
    return totals
Пример #6
0
def get_data(model_id, example_ids=None):
    edge_length_threshold = 0.02
    builder = get_builder(model_id)
    cat_id = builder.cat_id

    with get_ffd_dataset(cat_id, edge_length_threshold=0.02) as ffd_ds:
        template_ids, bs, ps = zip(*builder.get_ffd_data(ffd_ds))

    with get_template_mesh_dataset(cat_id, edge_length_threshold) as mesh_ds:
        faces = [np.array(mesh_ds[e]['faces']) for e in template_ids]

    predictions_ds = get_predictions_dataset(model_id)
    mesh_ds = get_mesh_dataset(cat_id)
    image_ds = RenderConfig().get_dataset(cat_id, builder.view_index)
    zipped = Dataset.zip(predictions_ds, mesh_ds, image_ds)
    with zipped:
        if example_ids is None:
            example_ids = list(predictions_ds.keys())
            random.shuffle(example_ids)
        for example_id in example_ids:
            print(example_id)
            pred, mesh, image = zipped[example_id]
            i = np.argmax(pred['probs'])
            dp = np.array(pred['dp'][i])
            b = bs[i]
            p = ps[i]
            yield example_id, b, p, dp, faces[i], mesh, image
Пример #7
0
def eval_model(model_id):
    import tensorflow as tf
    from template_ffd.model import get_builder
    tf.logging.set_verbosity(tf.logging.INFO)
    builder = get_builder(model_id)
    builder.initialize_variables()
    print(builder.eval())
Пример #8
0
    def get_lazy_dataset(self):
        from predictions import get_predictions_dataset
        builder = get_builder(self._model_id)
        cloud_fn = builder.get_prediction_to_cloud_fn(self._n_samples)

        def map_fn(predictions):
            return cloud_fn(**predictions)['cloud']

        return get_predictions_dataset(self._model_id).map(map_fn)
Пример #9
0
    def get_lazy_dataset(self):
        from predictions import get_predictions_dataset
        builder = get_builder(self._model_id)
        cloud_fn = builder.get_prediction_to_cloud_fn(self._n_samples)

        def map_fn(predictions):
            return cloud_fn(**predictions)['cloud']

        return get_predictions_dataset(self._model_id).map(map_fn)
Пример #10
0
def get_predictions_data(model_id):
    builder = get_builder(model_id)
    cat_id = builder.cat_id
    mode = 'infer'
    example_ids = get_example_ids(cat_id, mode)

    estimator = builder.get_estimator()
    predictions = estimator.predict(builder.get_predict_inputs)
    return LengthedGenerator(predictions, len(example_ids))
Пример #11
0
    def get_lazy_dataset(self):
        from predictions import get_predictions_dataset
        builder = get_builder(self._model_id)
        mesh_fn = builder.get_prediction_to_mesh_fn(
            self._edge_length_threshold)

        def map_fn(prediction):
            mesh = mesh_fn(**prediction)
            return {k: mesh[k] for k in ('vertices', 'faces', 'attrs')}

        return get_predictions_dataset(self._model_id).map(map_fn)
Пример #12
0
    def get_lazy_dataset(self):
        from predictions import get_predictions_dataset
        builder = get_builder(self._model_id)
        mesh_fn = builder.get_prediction_to_mesh_fn(
            self._edge_length_threshold)

        def map_fn(prediction):
            mesh = mesh_fn(**prediction)
            return {k: mesh[k] for k in ('vertices', 'faces', 'attrs')}

        return get_predictions_dataset(self._model_id).map(map_fn)
Пример #13
0
def get_predictions_data(model_id, mode='infer'):
    builder = get_builder(model_id)
    cat_id = builder.cat_id
    example_ids = get_example_ids(cat_id, mode)
    n = len(example_ids)
    view_index = builder.view_index
    if isinstance(view_index, (list, tuple)):
        n *= len(view_index)

    estimator = builder.get_estimator()
    predictions = estimator.predict(builder.get_predict_inputs)
    return LengthedGenerator(predictions, n)
Пример #14
0
def vis_clouds(model_id,
               pre_sampled=True,
               n_samples=1024,
               edge_length_threshold=0.1,
               shuffle=False):
    import random
    import numpy as np
    from mayavi import mlab
    import matplotlib.pyplot as plt
    from dids import Dataset
    from shapenet.core.blender_renderings.config import RenderConfig
    from shapenet.core.meshes import get_mesh_dataset
    from util3d.mayavi_vis import vis_point_cloud
    from util3d.mayavi_vis import vis_mesh
    from template_ffd.data.ids import get_example_ids
    from template_ffd.inference.clouds import get_inferred_cloud_dataset
    from template_ffd.model import get_builder
    builder = get_builder(model_id)
    cat_id = builder.cat_id
    kwargs = dict(model_id=model_id, n_samples=n_samples)
    if not pre_sampled:
        kwargs['edge_length_threshold'] = edge_length_threshold
    cloud_dataset = get_inferred_cloud_dataset(pre_sampled=pre_sampled,
                                               **kwargs)
    image_dataset = RenderConfig().get_dataset(cat_id, builder.view_index)

    example_ids = get_example_ids(cat_id, 'eval')
    if shuffle:
        example_ids = list(example_ids)
        random.shuffle(example_ids)
    mesh_dataset = get_mesh_dataset(cat_id)
    zipped_dataset = Dataset.zip(image_dataset, cloud_dataset, mesh_dataset)
    # zipped_dataset = Dataset.zip(image_dataset, cloud_dataset)
    with zipped_dataset:
        for example_id in example_ids:
            image, cloud, mesh = zipped_dataset[example_id]
            # image, cloud = zipped_dataset[example_id]
            plt.imshow(image)
            vis_point_cloud(np.array(cloud),
                            color=(0, 1, 0),
                            scale_factor=0.01)
            v, f = (np.array(mesh[k]) for k in ('vertices', 'faces'))
            vis_mesh(v,
                     f,
                     color=(0, 0, 1),
                     opacity=0.1,
                     include_wireframe=False)
            plt.show(block=False)
            mlab.show()
            plt.close()
Пример #15
0
def get_inference(model_id, example_id, ext='png', edge_length_threshold=0.02):
    import tensorflow as tf
    from template_ffd.model import get_builder
    import PIL
    import numpy as np
    from shapenet.image import with_background
    builder = get_builder(model_id)
    cat_id = builder.cat_id

    example_ids = [example_id]
    paths = [get_path(cat_id, e, ext) for e in example_ids]
    for path in paths:
        if not os.path.isfile(path):
            raise Exception('No file at path %s' % path)

    def gen():
        for example_id, path in zip(example_ids, paths):
            image = np.array(PIL.Image.open(path))
            image = with_background(image, 255)
            yield example_id, image

    render_params = builder.params.get('render_params', {})
    shape = tuple(render_params.get('shape', (192, 256)))
    shape = shape + (3,)

    def input_fn():
        ds = tf.data.Dataset.from_generator(
            gen, (tf.string, tf.uint8), ((), shape))
        example_id, image = ds.make_one_shot_iterator().get_next()
        # image_content = tf.read_file(path)
        # if ext == 'png':
        #     image = tf.image.decode_png(image_content)
        # elif ext == 'jpg':
        #     image = tf.image.decode_jpg(image_content)
        # else:
        #     raise ValueError('ext must be in ("png", "jpg")')
        image.set_shape((192, 256, 3))
        image = tf.image.per_image_standardization(image)
        example_id = tf.expand_dims(example_id, axis=0)
        image = tf.expand_dims(image, axis=0)
        return dict(example_id=example_id, image=image)

    estimator = builder.get_estimator()
    mesh_fn = builder.get_prediction_to_mesh_fn(edge_length_threshold)
    for pred in estimator.predict(input_fn):
        example_id = pred.pop('example_id')
        mesh = mesh_fn(**pred)
        vis_mesh(**mesh)
Пример #16
0
    def get_lazy_dataset(self):
        from template_ffd.inference.predictions import \
            get_selected_template_idx_dataset
        builder = get_builder(self._model_id)
        template_ids = builder.template_ids

        gt_ds = get_gt_voxel_dataset(
            builder.cat_id, filled=self._filled, auto_save=True,
            example_ids=template_ids)
        gt_ds = gt_ds.map(lambda v: v.data)
        with gt_ds:
            template_voxels = tuple(gt_ds[tid] for tid in template_ids)

        selected_ds = get_selected_template_idx_dataset(self._model_id)
        selected_ds = selected_ds.map(lambda i: template_voxels[i])

        return Dataset.zip(selected_ds, gt_ds).map(
            lambda v: intersection_over_union(*v))
Пример #17
0
def vis_clouds(
        model_id, pre_sampled=True, n_samples=1024, edge_length_threshold=0.1,
        shuffle=False):
    import random
    import numpy as np
    from mayavi import mlab
    import matplotlib.pyplot as plt
    from dids import Dataset
    from shapenet.core.blender_renderings.config import RenderConfig
    from shapenet.core.meshes import get_mesh_dataset
    from util3d.mayavi_vis import vis_point_cloud
    from util3d.mayavi_vis import vis_mesh
    from template_ffd.data.ids import get_example_ids
    from template_ffd.inference.clouds import get_inferred_cloud_dataset
    from template_ffd.model import get_builder
    builder = get_builder(model_id)
    cat_id = builder.cat_id
    kwargs = dict(model_id=model_id, n_samples=n_samples)
    if not pre_sampled:
        kwargs['edge_length_threshold'] = edge_length_threshold
    cloud_dataset = get_inferred_cloud_dataset(
        pre_sampled=pre_sampled, **kwargs)
    image_dataset = RenderConfig().get_dataset(cat_id, builder.view_index)

    example_ids = get_example_ids(cat_id, 'eval')
    if shuffle:
        example_ids = list(example_ids)
        random.shuffle(example_ids)
    mesh_dataset = get_mesh_dataset(cat_id)
    zipped_dataset = Dataset.zip(image_dataset, cloud_dataset, mesh_dataset)
    # zipped_dataset = Dataset.zip(image_dataset, cloud_dataset)
    with zipped_dataset:
        for example_id in example_ids:
            image, cloud, mesh = zipped_dataset[example_id]
            # image, cloud = zipped_dataset[example_id]
            plt.imshow(image)
            vis_point_cloud(
                np.array(cloud), color=(0, 1, 0), scale_factor=0.01)
            v, f = (np.array(mesh[k]) for k in ('vertices', 'faces'))
            vis_mesh(
                v, f, color=(0, 0, 1), opacity=0.1, include_wireframe=False)
            plt.show(block=False)
            mlab.show()
            plt.close()
Пример #18
0
def save():
    import tensorflow as tf
    from util3d.mesh.obj_io import write_obj
    from shapenet.image import with_background
    from template_ffd.model import get_builder
    builder = get_builder(model_id)

    mesh_fn = builder.get_prediction_to_mesh_fn(0.02)
    cloud_fn = builder.get_prediction_to_cloud_fn()

    graph = tf.Graph()
    with graph.as_default():
        image = tf.placeholder(shape=(192, 256, 3), dtype=tf.uint8)
        std_image = tf.image.per_image_standardization(image)
        std_image = tf.expand_dims(std_image, axis=0)
        example_id = tf.constant(['blah'], dtype=tf.string)
        spec = builder.get_estimator_spec(
            dict(example_id=example_id, image=std_image),
            None, tf.estimator.ModeKeys.PREDICT)
        predictions = spec.predictions
        probs_tf = predictions['probs']
        dp_tf = predictions['dp']
        saver = tf.train.Saver()

    with tf.Session(graph=graph) as sess:
        saver.restore(sess, tf.train.latest_checkpoint(builder.model_dir))
        for fn in fns:
            path = os.path.join(folder, fn)
            image_data = np.array(imread(path))
            if image_data.shape[-1] == 4:
                image_data = with_background(image_data, (255, 255, 255))
            probs, dp = sess.run(
                [probs_tf, dp_tf], feed_dict={image: image_data})
            probs = probs[0]
            dp = dp[0]
            mesh = mesh_fn(probs, dp)
            cloud = cloud_fn(probs, dp)['cloud']
            v, ov, f = (
                mesh[k] for k in('vertices', 'original_vertices', 'faces'))
            path = '%s.obj' % path[:-4]
            write_obj(path, v, f)
            p2 = '%s_template.obj' % path[:-4]
            np.save('%s_cloud.npy' % path[:-4], cloud)
            write_obj(p2, ov, f)
Пример #19
0
def main(model_id):
    import tensorflow as tf
    import tf_toolbox.testing
    from template_ffd.model import get_builder

    builder = get_builder(model_id)

    def get_train_op():
        features, labels = builder.get_train_inputs()
        return builder.get_estimator_spec(
            features, labels, tf.estimator.ModeKeys.TRAIN).train_op

    update_ops_run = tf_toolbox.testing.do_update_ops_run(get_train_op)
    tf_toolbox.testing.report_train_val_changes(get_train_op)

    if update_ops_run:
        print('Update ops run :)')
    else:
        print('Update ops not run :(')
Пример #20
0
    def get_lazy_dataset(self):
        from template_ffd.inference.predictions import \
            get_selected_template_idx_dataset
        builder = get_builder(self._model_id)
        template_ids = builder.template_ids

        gt_ds = get_gt_voxel_dataset(builder.cat_id,
                                     filled=self._filled,
                                     auto_save=True,
                                     example_ids=template_ids)
        gt_ds = gt_ds.map(lambda v: v.data)
        with gt_ds:
            template_voxels = tuple(gt_ds[tid] for tid in template_ids)

        selected_ds = get_selected_template_idx_dataset(self._model_id)
        selected_ds = selected_ds.map(lambda i: template_voxels[i])

        return Dataset.zip(selected_ds,
                           gt_ds).map(lambda v: intersection_over_union(*v))
Пример #21
0
def main(model_id):
    import tensorflow as tf
    import tf_toolbox.testing
    from template_ffd.model import get_builder

    builder = get_builder(model_id)

    def get_train_op():
        features, labels = builder.get_train_inputs()
        return builder.get_estimator_spec(features, labels,
                                          tf.estimator.ModeKeys.TRAIN).train_op

    update_ops_run = tf_toolbox.testing.do_update_ops_run(get_train_op)
    tf_toolbox.testing.report_train_val_changes(get_train_op)

    if update_ops_run:
        print('Update ops run :)')
    else:
        print('Update ops not run :(')
Пример #22
0
def main(model_id, skip_runs=10):
    import os
    import tensorflow as tf
    from template_ffd.model import get_builder
    from tf_toolbox.profile import create_profile
    builder = get_builder(model_id)

    def graph_fn():
        mode = tf.estimator.ModeKeys.TRAIN
        features, labels = builder.get_inputs(mode)
        spec = builder.get_estimator_spec(features, labels, mode)
        return spec.train_op

    folder = os.path.join(
        os.path.realpath(os.path.dirname(__file__)), '_profiles')
    if not os.path.isdir(folder):
        os.makedirs(folder)
    filename = os.path.join(folder, '%s.json' % model_id)

    create_profile(graph_fn, filename, skip_runs)
Пример #23
0
def main(model_id, skip_runs=10):
    import os
    import tensorflow as tf
    from template_ffd.model import get_builder
    from tf_toolbox.profile import create_profile
    builder = get_builder(model_id)

    def graph_fn():
        mode = tf.estimator.ModeKeys.TRAIN
        features, labels = builder.get_inputs(mode)
        spec = builder.get_estimator_spec(features, labels, mode)
        return spec.train_op

    folder = os.path.join(
        os.path.realpath(os.path.dirname(__file__)), '_profiles')
    if not os.path.isdir(folder):
        os.makedirs(folder)
    filename = os.path.join(folder, '%s.json' % model_id)

    create_profile(graph_fn, filename, skip_runs)
Пример #24
0
 def f(model_id, *args, **kwargs):
     if 'view_index' in kwargs:
         view_index = kwargs['view_index']
         if isinstance(view_index, int):
             return original_fn(model_id, *args, **kwargs)
         else:
             del kwargs['view_index']
     else:
         view_index = None
     if view_index is None:
         view_index = get_builder(model_id).view_index
     if isinstance(view_index, int):
         return original_fn(model_id,
                            *args,
                            view_index=view_index,
                            **kwargs)
     assert (isinstance(view_index, (list, tuple)))
     values = [
         original_fn(model_id, *args, view_index=vi, **kwargs)
         for vi in view_index
     ]
     return np.mean(values)
Пример #25
0
    def get_lazy_dataset(self):
        from shapenet.core.point_clouds import get_point_cloud_dataset
        from util3d.point_cloud import sample_points
        from template_ffd.model import get_builder
        from template_ffd.inference.predictions import get_predictions_dataset
        builder = get_builder(self._model_id)
        cat_id = builder.cat_id
        template_ids = builder.template_ids
        clouds = []

        def sample_fn(cloud):
            return sample_points(np.array(cloud), self._n_samples)

        gt_clouds = get_point_cloud_dataset(
            cat_id, builder.n_samples).map(sample_fn)
        with gt_clouds:
            for example_id in template_ids:
                clouds.append(np.array(gt_clouds[example_id]))

        predictions = get_predictions_dataset(self._model_id)
        inf_cloud_ds = predictions.map(lambda i: clouds[i].copy())
        return _get_lazy_emd_dataset(inf_cloud_ds, cat_id, self._n_samples)
Пример #26
0
    def get_lazy_dataset(self):
        from shapenet.core.point_clouds import get_point_cloud_dataset
        from util3d.point_cloud import sample_points
        from template_ffd.model import get_builder
        from template_ffd.inference.predictions import get_predictions_dataset
        builder = get_builder(self._model_id)
        cat_id = builder.cat_id
        template_ids = builder.template_ids
        clouds = []

        def sample_fn(cloud):
            return sample_points(np.array(cloud), self._n_samples)

        gt_clouds = get_point_cloud_dataset(
            cat_id, builder.n_samples).map(sample_fn)
        with gt_clouds:
            for example_id in template_ids:
                clouds.append(np.array(gt_clouds[example_id]))

        predictions = get_predictions_dataset(self._model_id)
        inf_cloud_ds = predictions.map(lambda i: clouds[i].copy())
        return _get_lazy_emd_dataset(inf_cloud_ds, cat_id, self._n_samples)
Пример #27
0
def check_predictions(model_id):
    from template_ffd.inference.predictions import get_predictions_dataset
    from template_ffd.model import get_builder
    from template_ffd.data.ids import get_example_ids
    builder = get_builder(model_id)
    cat_id = builder.cat_id
    example_ids = get_example_ids(cat_id, 'eval')

    missing = []
    with get_predictions_dataset(model_id, 'r') as dataset:
        for example_id in example_ids:
            if example_id not in dataset:
                missing.append(example_id)
            else:
                example = dataset[example_id]
                if not all(k in example for k in ('probs', 'dp')):
                    missing.append(example_id)

    if len(missing) == 0:
        print('No predictions missing!')
    else:
        print('%d / %d predictions missing' % (len(missing), len(example_ids)))
        for example_id in example_ids:
            print(example_id)
Пример #28
0
def main(model_id):
    import tensorflow as tf
    from template_ffd.model import get_builder
    tf.logging.set_verbosity(tf.logging.INFO)
    builder = get_builder(model_id)
    builder.vis_predictions()
Пример #29
0
def build_graph(resourceid,mode):
    with tf.device('/gpu:%d'%resourceid):
        tflearn.init_graph(seed=1029,num_cores=2,gpu_memory_fraction=0.9,soft_placement=True)
#        img_inp=tf.placeholder(tf.float32,shape=(BATCH_SIZE,HEIGHT,WIDTH,3),name='img_inp')
#        pt_gt=tf.placeholder(tf.float32,shape=(BATCH_SIZE,POINTCLOUDSIZE,3),name='pt_gt')
        
        builder = get_builder('plane')
        if(mode=="train"):
            feature,pointcloud = builder.get_train_inputs()
        elif(mode=="test"):
            feature,pointcloud = builder.get_predict_inputs()
        img_inp = feature['image']
        pt_gt = pointcloud
        training_flag = tf.placeholder(tf.bool,name='training_flag')
        x=img_inp
		
        #origin hourglass
        
#192 256
        x=tflearn.layers.conv.conv_2d(x,16,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x=tflearn.layers.conv.conv_2d(x,16,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x0=x
        x=tflearn.layers.conv.conv_2d(x,32,(3,3),strides=2,activation='relu',weight_decay=1e-5,regularizer='L2')
#96 128
        x=tflearn.layers.conv.conv_2d(x,32,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x=tflearn.layers.conv.conv_2d(x,32,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x1=x
        x=tflearn.layers.conv.conv_2d(x,64,(3,3),strides=2,activation='relu',weight_decay=1e-5,regularizer='L2')
#48 64
        x=tflearn.layers.conv.conv_2d(x,64,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x=tflearn.layers.conv.conv_2d(x,64,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x2=x
        x=tflearn.layers.conv.conv_2d(x,128,(3,3),strides=2,activation='relu',weight_decay=1e-5,regularizer='L2')
#24 32
        x=tflearn.layers.conv.conv_2d(x,128,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x=tflearn.layers.conv.conv_2d(x,128,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x3=x
        x=tflearn.layers.conv.conv_2d(x,256,(5,5),strides=2,activation='relu',weight_decay=1e-5,regularizer='L2')
#12 16
        x=tflearn.layers.conv.conv_2d(x,256,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x=tflearn.layers.conv.conv_2d(x,256,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x4=x
        x=tflearn.layers.conv.conv_2d(x,512,(5,5),strides=2,activation='relu',weight_decay=1e-5,regularizer='L2')
#6 8
        x=tflearn.layers.conv.conv_2d(x,512,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x=tflearn.layers.conv.conv_2d(x,512,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x=tflearn.layers.conv.conv_2d(x,512,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x5=x
        x=tflearn.layers.conv.conv_2d(x,512,(5,5),strides=2,activation='relu',weight_decay=1e-5,regularizer='L2')
#3 4
        x_additional=tflearn.layers.core.fully_connected(x,2048,activation='relu',weight_decay=1e-3,regularizer='L2')
        x=tflearn.layers.conv.conv_2d_transpose(x,256,[5,5],[6,8],strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#6 8
        x5=tflearn.layers.conv.conv_2d(x5,256,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x5))
        x=tflearn.layers.conv.conv_2d(x,256,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x5=x  
        x=tflearn.layers.conv.conv_2d_transpose(x,128,[5,5],[12,16],strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#12 16
        x4=tflearn.layers.conv.conv_2d(x4,128,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x4))
        x=tflearn.layers.conv.conv_2d(x,128,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x4=x
        x=tflearn.layers.conv.conv_2d_transpose(x,64,[5,5],[24,32],strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#24 32
        x3=tflearn.layers.conv.conv_2d(x3,64,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x3))
        x=tflearn.layers.conv.conv_2d(x,64,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x3=x
        x=tflearn.layers.conv.conv_2d_transpose(x,32,[5,5],[48,64],strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#48 64
        x2=tflearn.layers.conv.conv_2d(x2,32,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x2))
        x=tflearn.layers.conv.conv_2d(x,32,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x2=x
        x=tflearn.layers.conv.conv_2d_transpose(x,16,[5,5],[96,128],strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#96 128
        x1=tflearn.layers.conv.conv_2d(x1,16,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x1))
        x=tflearn.layers.conv.conv_2d(x,16,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x=tflearn.layers.conv.conv_2d(x,32,(3,3),strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#48 64
        x2=tflearn.layers.conv.conv_2d(x2,32,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x2))
        x=tflearn.layers.conv.conv_2d(x,32,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x2=x
        x=tflearn.layers.conv.conv_2d(x,64,(3,3),strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#24 32
        x3=tflearn.layers.conv.conv_2d(x3,64,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x3))
        x=tflearn.layers.conv.conv_2d(x,64,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x3=x
        x=tflearn.layers.conv.conv_2d(x,128,(5,5),strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#12 16
        x4=tflearn.layers.conv.conv_2d(x4,128,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x4))
        x=tflearn.layers.conv.conv_2d(x,128,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x4=x
        x=tflearn.layers.conv.conv_2d(x,256,(5,5),strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#6 8
        x5=tflearn.layers.conv.conv_2d(x5,256,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x5))
        x=tflearn.layers.conv.conv_2d(x,256,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x5=x
        x=tflearn.layers.conv.conv_2d(x,512,(5,5),strides=2,activation='relu',weight_decay=1e-5,regularizer='L2')
#3 4
        x_additional=tflearn.layers.core.fully_connected(x_additional,2048,activation='linear',weight_decay=1e-4,regularizer='L2')
        x_additional=tf.nn.relu(tf.add(x_additional,tflearn.layers.core.fully_connected(x,2048,activation='linear',weight_decay=1e-3,regularizer='L2')))
        x=tflearn.layers.conv.conv_2d_transpose(x,256,[5,5],[6,8],strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#6 8
        x5=tflearn.layers.conv.conv_2d(x5,256,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x5))
        x=tflearn.layers.conv.conv_2d(x,256,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x5=x  
        x=tflearn.layers.conv.conv_2d_transpose(x,128,[5,5],[12,16],strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#12 16
        x4=tflearn.layers.conv.conv_2d(x4,128,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x4))
        x=tflearn.layers.conv.conv_2d(x,128,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x4=x
        x=tflearn.layers.conv.conv_2d_transpose(x,64,[5,5],[24,32],strides=2,activation='linear',weight_decay=1e-5,regularizer='L2')
#24 32
        x3=tflearn.layers.conv.conv_2d(x3,64,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.nn.relu(tf.add(x,x3))
        x=tflearn.layers.conv.conv_2d(x,64,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')
        x=tflearn.layers.conv.conv_2d(x,64,(3,3),strides=1,activation='relu',weight_decay=1e-5,regularizer='L2')

#        if(mode=="test"):
#            print "test"
#            BATCH_SIZE=1
        
        x_additional=tflearn.layers.core.fully_connected(x_additional,1024,activation='relu',weight_decay=1e-3,regularizer='L2')
        x_additional=tflearn.layers.core.fully_connected(x_additional,256*3,activation='linear',weight_decay=1e-3,regularizer='L2')
        x_additional=tf.reshape(x_additional,(-1,256,3))
        x=tflearn.layers.conv.conv_2d(x,3,(3,3),strides=1,activation='linear',weight_decay=1e-5,regularizer='L2')
        x=tf.reshape(x,(-1,32*24,3))
        x=tf.concat([x_additional,x],1)
        x=tf.reshape(x,(-1,OUTPUTPOINTS,3))
        
        
        
        
        
        dists_forward,_,dists_backward,_=tf_nndistance.nn_distance(pt_gt,x)#forward-16384*32,backward-1024*32
        mindist=dists_forward
        dist0=mindist[0,:]
        dists_forward=tf.reduce_mean(dists_forward)
        dists_backward=tf.reduce_mean(dists_backward)
        if(mode=="train"):
            loss_nodecay=(dists_forward+dists_backward/2.0)
            loss=loss_nodecay+tf.add_n(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))*0.1
        if(mode=="test"):
#            loss_nodecay=np_metrics.chamfer(pt_gt, x)
            loss_nodecay=dists_forward+dists_backward
            loss=loss_nodecay

        batchno = tf.Variable(0, dtype=tf.int32)
        optimizer = tf.train.AdamOptimizer(3e-5).minimize(loss,global_step=batchno)
        batchnoinc=batchno.assign(batchno+1)
    return img_inp,x,pt_gt,training_flag,loss,optimizer,batchno,batchnoinc,mindist,loss_nodecay,dists_forward,dists_backward,dist0
Пример #30
0
 def get_lazy_dataset(self):
     inf_cloud_ds = self.get_inferred_cloud_dataset(**self._kwargs)
     cat_id = get_builder(self._model_id).cat_id
     return _get_lazy_emd_dataset(inf_cloud_ds, cat_id, self._n_samples)
Пример #31
0
def main(model_id, mode):
    from template_ffd.model import get_builder
    builder = get_builder(model_id)
    builder.vis_inputs()
Пример #32
0
from template_ffd.inference.predictions import get_predictions_dataset
from template_ffd.data.ids import get_example_ids
from template_ffd.model import get_builder

regime = 'e'
cat_desc = 'chair'
view_index = 5
edge_length_threshold = 0.02

shuffle = True
k = 3

cat_id = cat_desc_to_id(cat_desc)
model_id = '%s_%s' % (regime, cat_desc)
builder = get_builder(model_id)

image_ds = RenderConfig().get_dataset(cat_id, view_index)
gt_mesh_ds = get_mesh_dataset(cat_id)
predictions_ds = get_predictions_dataset(model_id)

top_k_mesh_fn = builder.get_prediction_to_top_k_mesh_fn(
    edge_length_threshold, k)

all_ds = Dataset.zip(image_ds, gt_mesh_ds, predictions_ds)


def vis():
    def vis_mesh(mesh, include_wireframe=False, **kwargs):
        from util3d.mayavi_vis import vis_mesh as vm
        v, f = (np.array(mesh[k]) for k in ('vertices', 'faces'))
Пример #33
0
def main(model_id, mode):
    from template_ffd.model import get_builder
    builder = get_builder(model_id)
    builder.vis_inputs()
Пример #34
0
 def get_lazy_dataset(self):
     inf_cloud_ds = self.get_inferred_cloud_dataset(**self._kwargs)
     cat_id = get_builder(self._model_id).cat_id
     return _get_lazy_emd_dataset(inf_cloud_ds, cat_id, self._n_samples)
Пример #35
0
def vis_segmentations(model_id,
                      example_ids=None,
                      vis_mesh=False,
                      edge_length_threshold=0.02,
                      include_wireframe=False,
                      save=False):
    from scipy.misc import imsave
    if save and example_ids is None:
        raise ValueError('Cannot save without specifying example_ids')
    builder = get_builder(model_id)
    cat_id = builder.cat_id
    if example_ids is None:
        example_ids = example_ids = get_example_ids(cat_id, 'eval')
    if vis_mesh:
        segmented_fn = builder.get_segmented_mesh_fn(edge_length_threshold)
    else:
        segmented_fn = builder.get_segmented_cloud_fn()
    config = RenderConfig()

    with get_predictions_dataset(model_id) as predictions:
        with config.get_dataset(cat_id, builder.view_index) as image_ds:
            for example_id in example_ids:
                example = predictions[example_id]
                probs, dp = (np.array(example[k]) for k in ('probs', 'dp'))
                result = segmented_fn(probs, dp)
                if result is not None:
                    image = image_ds[example_id]
                    print(example_id)
                    segmentation = result['segmentation']
                    if vis_mesh:
                        vertices = result['vertices']
                        faces = result['faces']
                        original_points = result['original_points']
                        original_seg = result['original_segmentation']
                        f0 = mlab.figure(bgcolor=(1, 1, 1))
                        vis_segmented_mesh(vertices,
                                           segmented_cloud(
                                               faces, segmentation),
                                           include_wireframe=include_wireframe,
                                           opacity=0.2)
                        f1 = mlab.figure(bgcolor=(1, 1, 1))
                        vis_clouds(
                            segmented_cloud(original_points, original_seg))
                    else:
                        points = result['points']
                        original_points = result['original_points']
                        f0 = mlab.figure(bgcolor=(1, 1, 1))
                        vis_clouds(segmented_cloud(points, segmentation))
                        f1 = mlab.figure(bgcolor=(1, 1, 1))
                        vis_clouds(
                            segmented_cloud(original_points, segmentation))

                    if save:
                        folder = os.path.join(_paper_dir, 'segmentations',
                                              model_id, example_id)
                        if not os.path.isdir(folder):
                            os.makedirs(folder)
                        fn = 'inferred_%s.png' % ('mesh'
                                                  if vis_mesh else 'cloud')
                        p0 = os.path.join(folder, fn)
                        mlab.savefig(p0, figure=f0)
                        p1 = os.path.join(folder, 'annotated_cloud.png')
                        mlab.savefig(p1, figure=f1)
                        pi = os.path.join(folder, 'query_image.png')
                        imsave(pi, image)
                        mlab.close()
                    else:
                        plt.imshow(image)
                        plt.show(block=False)
                        mlab.show()
                        plt.close()
Пример #36
0
from template_ffd.inference.predictions import get_predictions_dataset
from template_ffd.data.ids import get_example_ids
from template_ffd.model import get_builder


regime = 'e'
cat_desc = 'chair'
view_index = 5
edge_length_threshold = 0.02

shuffle = True
k = 3

cat_id = cat_desc_to_id(cat_desc)
model_id = '%s_%s' % (regime, cat_desc)
builder = get_builder(model_id)

image_ds = RenderConfig().get_dataset(cat_id, view_index)
gt_mesh_ds = get_mesh_dataset(cat_id)
predictions_ds = get_predictions_dataset(model_id)

top_k_mesh_fn = builder.get_prediction_to_top_k_mesh_fn(
    edge_length_threshold, k)

all_ds = Dataset.zip(image_ds, gt_mesh_ds, predictions_ds)


def vis():

    def vis_mesh(mesh, include_wireframe=False, **kwargs):
        from util3d.mayavi_vis import vis_mesh as vm
Пример #37
0
def vis_segmentations(
        model_id, example_ids=None, vis_mesh=False,
        edge_length_threshold=0.02, include_wireframe=False,
        save=False):
    from scipy.misc import imsave
    if save and example_ids is None:
        raise ValueError('Cannot save without specifying example_ids')
    builder = get_builder(model_id)
    cat_id = builder.cat_id
    if example_ids is None:
        example_ids = example_ids = get_example_ids(cat_id, 'eval')
    if vis_mesh:
        segmented_fn = builder.get_segmented_mesh_fn(edge_length_threshold)
    else:
        segmented_fn = builder.get_segmented_cloud_fn()
    config = RenderConfig()

    with get_predictions_dataset(model_id) as predictions:
        with config.get_dataset(cat_id, builder.view_index) as image_ds:
            for example_id in example_ids:
                example = predictions[example_id]
                probs, dp = (np.array(example[k]) for k in ('probs', 'dp'))
                result = segmented_fn(probs, dp)
                if result is not None:
                    image = image_ds[example_id]
                    print(example_id)
                    segmentation = result['segmentation']
                    if vis_mesh:
                        vertices = result['vertices']
                        faces = result['faces']
                        original_points = result['original_points']
                        original_seg = result['original_segmentation']
                        f0 = mlab.figure(bgcolor=(1, 1, 1))
                        vis_segmented_mesh(
                            vertices, segmented_cloud(faces, segmentation),
                            include_wireframe=include_wireframe,
                            opacity=0.2)
                        f1 = mlab.figure(bgcolor=(1, 1, 1))
                        vis_clouds(
                            segmented_cloud(original_points, original_seg))
                    else:
                        points = result['points']
                        original_points = result['original_points']
                        f0 = mlab.figure(bgcolor=(1, 1, 1))
                        vis_clouds(segmented_cloud(points, segmentation))
                        f1 = mlab.figure(bgcolor=(1, 1, 1))
                        vis_clouds(
                            segmented_cloud(original_points, segmentation))

                    if save:
                        folder = os.path.join(
                            _paper_dir, 'segmentations', model_id, example_id)
                        if not os.path.isdir(folder):
                            os.makedirs(folder)
                        fn = 'inferred_%s.png' % (
                            'mesh' if vis_mesh else 'cloud')
                        p0 = os.path.join(folder, fn)
                        mlab.savefig(p0, figure=f0)
                        p1 = os.path.join(folder, 'annotated_cloud.png')
                        mlab.savefig(p1, figure=f1)
                        pi = os.path.join(folder, 'query_image.png')
                        imsave(pi, image)
                        mlab.close()
                    else:
                        plt.imshow(image)
                        plt.show(block=False)
                        mlab.show()
                        plt.close()