예제 #1
0
파일: builder.py 프로젝트: yymath/menpo
    def _build_shape_model(cls, shapes, max_components):
        r"""
        Builds a shape model given a set of shapes.

        Parameters
        ----------
        shapes: list of :map:`PointCloud`
            The set of shapes from which to build the model.
        max_components: None or int or float
            Specifies the number of components of the trained shape model.
            If int, it specifies the exact number of components to be retained.
            If float, it specifies the percentage of variance to be retained.
            If None, all the available components are kept (100% of variance).

        Returns
        -------
        shape_model: :class:`menpo.model.pca`
            The PCA shape model.
        """
        # centralize shapes
        centered_shapes = [Translation(-s.centre).apply(s) for s in shapes]
        # align centralized shape using Procrustes Analysis
        gpa = GeneralizedProcrustesAnalysis(centered_shapes)
        aligned_shapes = [s.aligned_source for s in gpa.transforms]

        # build shape model
        shape_model = PCAModel(aligned_shapes)
        if max_components is not None:
            # trim shape model if required
            shape_model.trim_components(max_components)

        return shape_model
예제 #2
0
def build_shape_model(shapes, max_components=None, prefix='', verbose=False):
    r"""
    Builds a shape model given a set of shapes.

    Parameters
    ----------
    shapes: list of :map:`PointCloud`
        The set of shapes from which to build the model.
    max_components: None or int or float
        Specifies the number of components of the trained shape model.
        If int, it specifies the exact number of components to be retained.
        If float, it specifies the percentage of variance to be retained.
        If None, all the available components are kept (100% of variance).

    Returns
    -------
    shape_model: :class:`menpo.model.pca`
        The PCA shape model.
    """
    if verbose:
        print_dynamic('{}Building shape model'.format(prefix))
    # compute aligned shapes
    aligned_shapes = align_shapes(shapes)
    # build shape model
    shape_model = PCAModel(aligned_shapes)
    if max_components is not None:
        # trim shape model if required
        shape_model.trim_components(max_components)
    return shape_model
예제 #3
0
def build_shape_model(shapes, max_components):
    r"""
    Builds a shape model given a set of shapes.

    Parameters
    ----------
    shapes: list of :map:`PointCloud`
        The set of shapes from which to build the model.
    max_components: None or int or float
        Specifies the number of components of the trained shape model.
        If int, it specifies the exact number of components to be retained.
        If float, it specifies the percentage of variance to be retained.
        If None, all the available components are kept (100% of variance).

    Returns
    -------
    shape_model: :class:`menpo.model.pca`
        The PCA shape model.
    """
    # centralize shapes
    centered_shapes = [Translation(-s.centre()).apply(s) for s in shapes]
    # align centralized shape using Procrustes Analysis
    gpa = GeneralizedProcrustesAnalysis(centered_shapes)
    aligned_shapes = [s.aligned_source() for s in gpa.transforms]

    # build shape model
    shape_model = PCAModel(aligned_shapes)
    if max_components is not None:
        # trim shape model if required
        shape_model.trim_components(max_components)

    return shape_model
예제 #4
0
    def _build_shape_model(cls, shapes, max_components):
        r"""
        """
        # centralize shapes
        centered_shapes = [Translation(-s.centre).apply(s) for s in shapes]
        # align centralized shape using Procrustes Analysis
        gpa = GeneralizedProcrustesAnalysis(centered_shapes)
        aligned_shapes = [s.aligned_source for s in gpa.transforms]

        # build shape model
        shape_model = PCAModel(aligned_shapes)
        if max_components is not None:
            # trim shape model if required
            shape_model.trim_components(max_components)

        return shape_model
예제 #5
0
def build_all_models_frgc(images,
                          ref_frame_path,
                          subject_id,
                          out_path='/vol/atlas/homes/pts08/',
                          transform_class=ThinPlateSplines,
                          square_mask=False):
    print "Beginning model creation for {0}".format(subject_id)
    # Build reference frame
    ref_frame = mio.import_image(ref_frame_path)
    labeller([ref_frame], 'PTS', ibug_68_closed_mouth)
    ref_frame.crop_to_landmarks(boundary=2,
                                group='ibug_68_closed_mouth',
                                label='all')
    if not square_mask:
        ref_frame.constrain_mask_to_landmarks(group='ibug_68_closed_mouth',
                                              label='all')

    reference_shape = ref_frame.landmarks['ibug_68_closed_mouth'].lms

    # Extract all shapes
    labeller(images, 'PTS', ibug_68_closed_mouth)
    shapes = [img.landmarks['ibug_68_closed_mouth'].lms for img in images]

    # Warp each of the images to the reference image
    print "Warping all frgc shapes to reference frame of {0}".format(
        subject_id)
    tps_transforms = [
        transform_class(reference_shape, shape) for shape in shapes
    ]
    warped_images = [
        img.warp_to(ref_frame.mask, t)
        for img, t in zip(images, tps_transforms)
    ]

    # Calculate the normal matrix
    print 'Extracting all normals'
    normal_matrix = extract_normals(warped_images)

    # Save memory by deleting all the images since we don't need them any more.
    # Keep one around that we can query for it's size etc
    example_image = deepcopy(warped_images[0])
    del warped_images[:]

    # Normals
    print 'Computing normal feature space'
    normal_images = create_feature_space(normal_matrix,
                                         example_image,
                                         'normals',
                                         subject_id,
                                         out_path=out_path)

    # Spherical
    print 'Computing spherical feature space'
    spherical_matrix = Spherical().logmap(normal_matrix)
    spherical_images = create_feature_space(spherical_matrix,
                                            example_image,
                                            'spherical',
                                            subject_id,
                                            out_path=out_path)

    # AEP
    print 'Computing AEP feature space'
    mean_normals = normalise_vector(np.mean(normal_matrix, 0))
    aep_matrix = AEP(mean_normals).logmap(normal_matrix)
    aep_images = create_feature_space(aep_matrix,
                                      example_image,
                                      'aep',
                                      subject_id,
                                      out_path=out_path)

    # PGA
    print 'Computing PGA feature space'
    mu = intrinsic_mean(normal_matrix, PGA, max_iters=50)
    pga_matrix = PGA(mu).logmap(normal_matrix)
    pga_images = create_feature_space(pga_matrix,
                                      example_image,
                                      'pga',
                                      subject_id,
                                      out_path=out_path)

    # PCA models
    n_components = 200
    print 'Computing PCA models ({} components)'.format(n_components)
    template = ref_frame

    normal_model = PCAModel(normal_images, center=True)
    normal_model.trim_components(200)
    cosine_model = PCAModel(normal_images, center=False)
    cosine_model.trim_components(200)
    spherical_model = PCAModel(spherical_images, center=False)
    spherical_model.trim_components(200)
    aep_model = PCAModel(aep_images, center=False)
    aep_model.trim_components(200)
    pga_model = PCAModel(pga_images, center=False)
    pga_model.trim_components(200)

    mean_normals_image = normal_model.mean
    mu_image = mean_normals_image.from_vector(mu)

    # Save out models
    pickle_model(out_path, subject_id, 'normal', normal_model, template,
                 mean_normals)
    pickle_model(out_path, subject_id, 'cosine', cosine_model, template,
                 mean_normals)
    pickle_model(out_path, subject_id, 'spherical', spherical_model, template,
                 mean_normals)
    pickle_model(out_path, subject_id, 'aep', aep_model, template,
                 mean_normals)
    pickle_model(out_path,
                 subject_id,
                 'pga',
                 pga_model,
                 template,
                 mean_normals,
                 intrinsic_means=mu_image)
예제 #6
0
def build_all_models_frgc(images, ref_frame_path, subject_id,
                          out_path='/vol/atlas/homes/pts08/',
                          transform_class=ThinPlateSplines,
                          square_mask=False):
    print "Beginning model creation for {0}".format(subject_id)
    # Build reference frame
    ref_frame = mio.import_image(ref_frame_path)
    labeller([ref_frame], 'PTS', ibug_68_closed_mouth)
    ref_frame.crop_to_landmarks(boundary=2, group='ibug_68_closed_mouth',
                                label='all')
    if not square_mask:
        ref_frame.constrain_mask_to_landmarks(group='ibug_68_closed_mouth',
                                              label='all')

    reference_shape = ref_frame.landmarks['ibug_68_closed_mouth'].lms

    # Extract all shapes
    labeller(images, 'PTS', ibug_68_closed_mouth)
    shapes = [img.landmarks['ibug_68_closed_mouth'].lms for img in images]

    # Warp each of the images to the reference image
    print "Warping all frgc shapes to reference frame of {0}".format(subject_id)
    tps_transforms = [transform_class(reference_shape, shape) for shape in shapes]
    warped_images = [img.warp_to(ref_frame.mask, t)
                     for img, t in zip(images, tps_transforms)]

    # Calculate the normal matrix
    print 'Extracting all normals'
    normal_matrix = extract_normals(warped_images)

    # Save memory by deleting all the images since we don't need them any more.
    # Keep one around that we can query for it's size etc
    example_image = deepcopy(warped_images[0])
    del warped_images[:]

    # Normals
    print 'Computing normal feature space'
    normal_images = create_feature_space(normal_matrix, example_image,
                                         'normals', subject_id,
                                         out_path=out_path)

    # Spherical
    print 'Computing spherical feature space'
    spherical_matrix = Spherical().logmap(normal_matrix)
    spherical_images = create_feature_space(spherical_matrix, example_image,
                                            'spherical', subject_id,
                                            out_path=out_path)

    # AEP
    print 'Computing AEP feature space'
    mean_normals = normalise_vector(np.mean(normal_matrix, 0))
    aep_matrix = AEP(mean_normals).logmap(normal_matrix)
    aep_images = create_feature_space(aep_matrix, example_image, 'aep',
                                      subject_id,
                                      out_path=out_path)

    # PGA
    print 'Computing PGA feature space'
    mu = intrinsic_mean(normal_matrix, PGA, max_iters=50)
    pga_matrix = PGA(mu).logmap(normal_matrix)
    pga_images = create_feature_space(pga_matrix, example_image, 'pga',
                                      subject_id,
                                      out_path=out_path)

    # PCA models
    n_components = 200
    print 'Computing PCA models ({} components)'.format(n_components)
    template = ref_frame

    normal_model = PCAModel(normal_images, center=True)
    normal_model.trim_components(200)
    cosine_model = PCAModel(normal_images, center=False)
    cosine_model.trim_components(200)
    spherical_model = PCAModel(spherical_images, center=False)
    spherical_model.trim_components(200)
    aep_model = PCAModel(aep_images, center=False)
    aep_model.trim_components(200)
    pga_model = PCAModel(pga_images, center=False)
    pga_model.trim_components(200)

    mean_normals_image = normal_model.mean
    mu_image = mean_normals_image.from_vector(mu)

    # Save out models
    pickle_model(out_path, subject_id, 'normal', normal_model, template,
                 mean_normals)
    pickle_model(out_path, subject_id, 'cosine', cosine_model, template,
                 mean_normals)
    pickle_model(out_path, subject_id, 'spherical', spherical_model, template,
                 mean_normals)
    pickle_model(out_path, subject_id, 'aep', aep_model, template,
                 mean_normals)
    pickle_model(out_path, subject_id, 'pga', pga_model, template,
                 mean_normals, intrinsic_means=mu_image)