Example #1
0
    def from_sbf_file(filename,
                      surface_property='electric_potential',
                      offset=(0, 0, 0),
                      orient=True):
        surface_data = sbf.read_file(filename)
        # set center to origin
        positions = shift_to_origin(surface_data['vertices'].data.transpose())
        radius = mean_radius(positions)
        positions /= radius * 20
        if orient:
            from sklearn.decomposition import PCA
            pca = PCA(n_components=3)
            positions = pca.fit_transform(positions)
        offset = np.array(offset)
        positions += offset
        faces = surface_data['faces'].data.transpose() - 1
        vertex_normals = surface_data['vertex normals'].data.transpose()
        surface_properties = {}

        for dset in surface_data.datasets():
            if dset.data.shape == (positions.shape[0], ):
                surface_properties[dset.name] = dset.data
        return Isosurface(positions,
                          faces,
                          vertex_normals,
                          surface_properties,
                          surface_property=surface_property)
Example #2
0
def sht_isosurface(filename, l_max=20, prop='electric_potential', test=None):
    """Given an SBF, describe the set of vertices and their esp using sht.
    Will scale the mesh to be of unit mean radius.

    Arguments:
    filename -- name of the SBF file containing a surface

    Keyword arguments:
    prop -- the name of the vertex property to describe in combination
    with the shape (or radius)
    l_max -- maximum angular momenta
    test -- use to keep the actual shape and property values for
    examination of accuracy of descriptor

    """
    name = Path(filename).stem
    LOG.debug('Describing %s surface with spherical harmonics', name)
    datafile = sbf.read_file(filename)
    pts = datafile['vertices'].data.transpose()
    LOG.debug('Loaded vertex data')
    # shift to be centered about the origin
    pts -= np.mean(pts, axis=0)

    # this is faster for some reason than np.apply_along_axis
    norms = np.sqrt(pts[:, 0]**2 + pts[:, 1]**2 + pts[:, 2]**2)
    mean_norm = np.mean(norms)
    pts /= mean_norm
    norms /= mean_norm
    pts_normalized = pts / np.reshape(norms, (pts.shape[0], 1))
    LOG.debug('Normalized points')
    sht = SHT(l_max)
    grid_cartesian = spherical_to_cartesian(np.c_[np.ones(sht.grid.shape[0]),
                                                  sht.grid[:, 1], sht.grid[:,
                                                                           0]])
    LOG.debug('Constructing tree')
    tree = KDTree(pts_normalized)
    LOG.debug('Done')
    LOG.debug('Interpolating values')
    nearest = tree.query(grid_cartesian, 1)
    LOG.debug('Done')
    shape = values_from_grid(norms, nearest[1])
    property_values = values_from_grid(datafile[prop].data, nearest[1])

    if test is not None:
        test['actual'] = shape

    # normalize property to be in [0,1], keep track of min and range
    prop_min = np.min(property_values)
    prop_scale = np.abs(np.max(property_values) - np.min(property_values))
    property_values -= prop_min
    if prop_scale != 0:
        property_values /= prop_scale
    others = [mean_norm, prop_min, prop_scale]
    combined = np.zeros(property_values.shape, dtype=np.complex128)
    combined.real = shape
    combined.imag = property_values

    return name, others, sht.analyse(combined)
Example #3
0
def sht_isosurface(filename, l_max=20, prop='electric_potential', 
                   test=None):
    """Given an SBF, describe the set of vertices and their esp using sht.
    Will scale the mesh to be of unit mean radius.

    Arguments:
    filename -- name of the SBF file containing a surface

    Keyword arguments:
    prop -- the name of the vertex property to describe in combination
    with the shape (or radius)
    l_max -- maximum angular momenta
    test -- use to keep the actual shape and property values for
    examination of accuracy of descriptor

    """
    name = Path(filename).stem
    LOG.debug('Describing %s surface with spherical harmonics', name)
    datafile = sbf.read_file(filename)
    pts = datafile['vertices'].data.transpose()
    LOG.debug('Loaded vertex data')
    # shift to be centered about the origin
    pts -= np.mean(pts, axis=0)

    # this is faster for some reason than np.apply_along_axis
    norms = np.sqrt(pts[:, 0] ** 2 + pts[:, 1] ** 2 + pts[:, 2] ** 2)
    mean_norm = np.mean(norms)
    pts /= mean_norm
    norms /= mean_norm
    pts_normalized = pts / np.reshape(norms, (pts.shape[0], 1))
    LOG.debug('Normalized points')
    sht = SHT(l_max)
    grid_cartesian = spherical_to_cartesian(
        np.c_[np.ones(sht.grid.shape[0]), sht.grid[:, 1], sht.grid[:, 0]])
    LOG.debug('Constructing tree')
    tree = KDTree(pts_normalized)
    LOG.debug('Done')
    LOG.debug('Interpolating values')
    nearest = tree.query(grid_cartesian, 1)
    LOG.debug('Done')
    shape = values_from_grid(norms, nearest[1])
    property_values = values_from_grid(datafile[prop].data, nearest[1])

    if test is not None:
        test['actual'] = shape

    # normalize property to be in [0,1], keep track of min and range
    prop_min = np.min(property_values)
    prop_scale = np.abs(np.max(property_values) - np.min(property_values))
    property_values -= prop_min
    if prop_scale != 0:
        property_values /= prop_scale
    others = [mean_norm, prop_min, prop_scale]
    combined = np.zeros(property_values.shape, dtype=np.complex128)
    combined.real = shape
    combined.imag = property_values

    return name, others, sht.analyse(combined)
Example #4
0
def load_data(filename):
    """Load the data included with this module.

    Keyword arguments:
    directory -- the folder containing data to load
    (default is the location of this file)
    """
    contents = sbf.read_file(filename)
    names = contents['names'].data
    dims = names.shape
    names = names.view('S{}'.format(dims[1])).reshape((dims[0],))
    invariants = contents['invariants'].data
    return names, invariants
Example #5
0
def load_data(filename):
    """Load the data included with this module.

    Keyword arguments:
    directory -- the folder containing data to load
    (default is the location of this file)
    """
    contents = sbf.read_file(filename)
    names = contents['names'].data
    dims = names.shape
    names = names.view('S{}'.format(dims[1])).reshape((dims[0], ))
    invariants = contents['invariants'].data
    return names, invariants
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('surface_files',
                        nargs='+',
                        help='CrystalExplorer surface files in .sbf format')
    parser.add_argument('--property',
                        default='d_norm',
                        choices=('d_norm', 'd_e', 'd_i', 'curvedness',
                                 'shape_index'),
                        help='Property to color surfaces by')
    parser.add_argument('--property-min',
                        default=None,
                        type=float,
                        help='Minimum property value for coloring')
    parser.add_argument('--property-max',
                        default=None,
                        type=float,
                        help='Maximum property value for coloring')
    parser.add_argument('--output-format',
                        default='obj',
                        choices=trimesh.io.export._mesh_exporters.keys(),
                        help='Output file format')

    args = parser.parse_args()
    for filename in args.surface_files:
        f = sbf.read_file(filename)
        prop = f[args.property].data
        name = '.'.join(filename.split('.')[:-1])
        output = '{}.{}'.format(name, args.output_format)
        print("Exporting {} using surface property '{}'".format(
            output, args.property))
        colors = colormap(prop,
                          scheme=args.property,
                          minval=args.property_min,
                          maxval=args.property_max)
        vertices = f['vertices'].data.transpose()
        faces = f['faces'].data.transpose() - 1
        normals = f['vertex normals'].data.transpose()
        mesh = get_mesh(vertices, faces, normals, colors)
        mesh.export(output)
def read_de_di(surface_file):
    f = sbf.read_file(surface_file)
    return f['d_e'].data, f['d_i'].data
Example #8
0
"""
lebedev.py

Contains wrapper methods to extract saved grids
from the h5file

"""
from os.path import dirname, abspath, join
import sbf
import numpy as np

DIR = dirname(abspath(__file__))
LEBEDEV_GRID_FILE = join(DIR, 'lebedev.sbf')
_GRIDS = sbf.read_file(LEBEDEV_GRID_FILE)

AVAILABLE_GRIDS = [i for i in range(3, 32, 2)] + [i for i in range(35, 132, 6)]
MAX_DEGREE = max(AVAILABLE_GRIDS)


def lebedev_grid(degree=21):
    """
    Returns the *angular* lebedev grid capable of exactly
    integrating a polynomial with given degree on the sphere.

    Grids are of shape(num_points, 3), with column 0
    being between (0, 2 Pi), column 1 between (0, Pi)
    and column 2 representing the weight for this grid point.

    >>> lebedev_grid(3)
    array([[3.14159265, 1.57079633, 0.16666667],
           [6.28318531, 1.57079633, 0.16666667],