Beispiel #1
0
def xyzr_to_vertices(xyzr_filename, gts_filename, probe_radius=1.4):

    master_vlist, master_tlist = get_spherical_mesh(2)

    spheres = open(xyzr_filename,'r').readlines()
    vlist = []
    tlist = []
    nlist = []
    for sph in spheres:
        offset = len(vlist)
        x,y,z,r = [float(xx) for xx in sph.split()]
        r += probe_radius
        for vv in master_vlist:
            n = Vector(vv)
            nlist.append(n)
            xx = (vv.x * r) + x
            yy = (vv.y * r) + y
            zz = (vv.z * r) + z
            vlist.append(Vector(xx,yy,zz))

        for v1_idx, v2_idx, v3_idx in master_tlist:
            tlist.append( (v1_idx+offset, v2_idx+offset, v3_idx+offset) )

    write_gts(gts_filename, vlist, tlist)

    return
Beispiel #2
0
def clean_gts(filename):
    
    vertices, triangles = get_vertices_triangles_from_gts(filename)
    
    # get centre of set of points
    centre = Vector(0,0,0)
    for v in vertices:
        centre += v
    centre = centre / len(vertices)

    # get maximum xyz extent of set of points
    maxdim = max([ (v - centre).length() for v in vertices ])

    # insert vertices into an Octree
    max_neighbours = 27
    adaptive_tree = Octree(max_neighbours, Vector(centre.x, centre.y, centre.z), maxdim*2)

    duplicates = []
    ctr = 0
    mappingA = {}
    mappingB = {}
    for i,v in enumerate(vertices):

        # check for duplicate already in tree
        pre_existing = adaptive_tree.check_for_duplicate(v)
        if (pre_existing == -1):
            mappingA[i] = ctr
            mappingB[ctr] = i
            adaptive_tree.insert(v)
            ctr += 1
        else:
            pre_existing_idx = mappingB[pre_existing]
            print("Clash between %d %s and %d %s" %(i, v, pre_existing_idx, vertices[pre_existing_idx]))
            duplicates.append(i)
            mappingA[i] = pre_existing

    vertices = [v for i,v in enumerate(vertices) if i not in duplicates]

    print("Number of vertices: ", len(vertices))
    new_tri = []
    for i,t in enumerate(triangles):
        #print t[0]
        new_v1 = mappingA[t[0]]
        new_v2 = mappingA[t[1]]
        new_v3 = mappingA[t[2]]
        if (new_v1 in (new_v2, new_v3) or
            new_v2 in (new_v1, new_v3) or
            new_v3 in (new_v1, new_v2)
            ):
            print("skipped a dud triangle (%d) " %(i), t)
            continue
        new_tri.append([new_v1, new_v2, new_v3])

    # write the gts file
    write_gts(filename, vertices, new_tri)
Beispiel #3
0
def gts2off(gts_filename, off_filename):

    vertices, triangles = get_vertices_triangles_from_gts(gts_filename)

    vnormals = [Vector(0,0,0) for ii in range(len(vertices))]

    for tri in triangles:
        v1,v2,v3 = tri
        v1v = vertices[v1]
        v2v = vertices[v2]
        v3v = vertices[v3]
        vnormals[v1] +=  tnormal(v1v, v2v, v3v) * tarea(v1v,v2v,v3v)
        vnormals[v2] +=  tnormal(v1v, v2v, v3v) * tarea(v1v,v2v,v3v)
        vnormals[v3] +=  tnormal(v1v, v2v, v3v) * tarea(v1v,v2v,v3v)
    vnormals = [v.normalised() for v in vnormals]

    fout = open(off_filename,'w')
    print("nOFF", file=fout)
    print("3", file=fout)
    # last number is number of edges; unused by OFF format, but must be present
    print("%9d%9d%9d" %(len(vertices),len(triangles),0), file=fout)
    for v,vn in zip(vertices, vnormals):
        print("%9.3f%9.3f%9.3f%9.3f%9.3f%9.3f" %(v.x, v.y, v.z, vn.x, vn.y, vn.z), file=fout)
    for t in triangles:
        print("    3 %8d%8d%8d" %(t[0],t[1],t[2]), file=fout)
    fout.close()

    return
Beispiel #4
0
def gts2msms(gts_filename, msms_filename):

    vertices, triangles = get_vertices_triangles_from_gts(gts_filename)

    vnormals = [Vector(0,0,0) for ii in range(len(vertices))]

    for tri in triangles:
        v1,v2,v3 = tri
        v1v = vertices[v1]
        v2v = vertices[v2]
        v3v = vertices[v3]
        vnormals[v1] +=  tnormal(v1v, v2v, v3v) * tarea(v1v,v2v,v3v)
        vnormals[v2] +=  tnormal(v1v, v2v, v3v) * tarea(v1v,v2v,v3v)
        vnormals[v3] +=  tnormal(v1v, v2v, v3v) * tarea(v1v,v2v,v3v)

    vnormals = [v.normalised() for v in vnormals]

    fout = open(msms_filename,'w')
    print("%d    %d" %(len(vertices),len(triangles)), file=fout)
    for ii,(v,vn) in enumerate(zip(vertices, vnormals)):
        print("%9.3f %9.3f %9.3f %9.3f %9.3f %9.3f %7d %7d  2" %(v.x, v.y, v.z, vn.x, vn.y, vn.z, 0, 0), file=fout)
    for t in triangles:
        print("%8d%8d%8d" %(t[0]+1,t[1]+1,t[2]+1), file=fout)
    fout.close()

    return
Beispiel #5
0
def off2gts(off_filename, gts_filename):

    off_file = open(off_filename,'r')
    first_line = off_file.readline()
    num_verts, num_tris = [float(xx) for xx in first_line.split()]
    vertex_lines = [off_file.readline() for ii in range(num_verts)]
    tri_lines = [off_file.readline() for ii in range(num_tris)]

    vertices = [[float(xx) for xx in line.split()]
                for line in vertex_lines]
    triangles = [[int(xx) for xx in line.split()[1:4]]
                 for line in tri_lines]
    vnormals = [Vector(0,0,0) for ii in range(num_verts)]
    for tri in triangles:
        v1,v2,v3 = tri
        v1v = vertices[v1]
        v2v = vertices[v2]
        v3v = vertices[v3]
        vnormals[v1] +=  tnormal(v1v, v2v, v3v) * tarea(v1v,v2v,v3v)
        vnormals[v2] +=  tnormal(v1v, v2v, v3v) * tarea(v1v,v2v,v3v)
        vnormals[v3] +=  tnormal(v1v, v2v, v3v) * tarea(v1v,v2v,v3v)
    vnormals = [v.normalised() for v in vnormals]

    write_gts(gts_filename, vertices, triangles)

    return
Beispiel #6
0
def get_clean_msms_vertices(msms_vertex_filename):

    vfloat = [line.split() for line in open(msms_vertex_filename,'r') if line[0] != '#']

    # delete the first lines of each list, which are column totals
    vfloat.remove(vfloat[0])

    vertices = []
    normals = []
    for vertex_descriptor in vfloat:
        x,y,z,xn,yn,zn = [float(v) for v in vertex_descriptor[:6]]
        vertices.append( Vector(x,y,z) )
        normals.append(  Vector(xn,yn,zn) )

    # get centre of set of points
    centre = Vector(0,0,0)
    for v in vertices:
        centre += v
    centre = centre / len(vertices)

    # get maximum xyz extent of set of points
    maxdim = max([ (v - centre).length() for v in vertices ])

    # insert vertices into an Octree
    max_neighbours = 27
    adaptive_tree = Octree(max_neighbours, Vector(centre.x, centre.y, centre.z), maxdim*2)


    remap = {}
    for i,(v,n) in enumerate(zip(vertices,normals)):

        # check for duplicate already in tree
        pre_existing = adaptive_tree.check_for_duplicate(v)
        if (pre_existing == -1):
            adaptive_tree.insert(v)
        else:
            pre_existing = new_vertex_map[pre_existing]
            print("Clash between %d %s %s and %d %s %s" %(i, v, n, pre_existing, vertices[pre_existing], normals[pre_existing]))
            remap[i] = pre_existing
            normals[pre_existing] += n

    vertices = [v for i,v in enumerate(vertices) if i not in remap.keys()]
    normals = [v.normalised() for i,v in enumerate(normals) if i not in remap.keys()]

    print("done cleaning duplicate vertices")

    return vertices, normals
Beispiel #7
0
    def __init__(self, a: float, size: float, centre: Vector):
        """ Constructor with packed sphere radius a, arena radius size """
        # Set the arena size
        if size <= a:
            size = a
        self.radius = size
        self.centre = centre
        # Packed sphere radius, half row height and half layer height
        (self._rx, self._ry, self._rz) = (a, sqrt(3) * a / 2,
                                          sqrt(2 / 3.0) * a)
        log.debug(f"Packed sphere radius {self._rx}, centre {self.centre}")
        # Arena dimensions in packed spheres, rows and layers
        self._psx = int((self._rx + size - 1e-3) / (2 * self._rx))
        self._psy = int((self._ry + size - 1e-3) / (2 * self._ry))
        self._psz = int((self._rz + size - 1e-3) / (2 * self._rz))
        # Build the arena: x,y,z at least 1, and 1 means only 1 sphere/row/layer
        self._psa = dict()
        for layer in range(-self._psz + 1, self._psz):
            for row in range(-self._psy + 1, self._psy):
                start = centre + Vector(
                    ((layer + row) % 2) * a,
                    sqrt(3) *
                    (row +
                     (layer % 2) / 3.0) * a, 2 * sqrt(2 / 3.0) * layer * a)
                for sphere in range(self._psx):  # Reflection captured below
                    # Intention is that arena is spherical (roughly)
                    pv = start + Vector(2 * a, 0, 0) * sphere
                    if (pv - self.centre).length() > self.radius:
                        continue
                    # As plus is in sphere, so is minus...
                    self._psa[(sphere, row, layer)] = pv
                    log.debug(f"init ({sphere},{row},{layer}) = %s" % \
                       (_asString(self._psa[(sphere,row,layer)])))
                    self._psa[(-sphere,row,layer)] = \
                     start-Vector(2*a,0,0)*sphere
                    log.debug(f"init (-{sphere},{row},{layer}) = %s" % \
                       (_asString(self._psa[(-sphere,row,layer)])))

        # Initialise sphere occupancy
        self._pso = dict()
        self.clear()

        # Calculate the volume of the arena
        self._psvoleq = 4 * (a**3) * sqrt(
            2)  # Volume equivalent of packed sphere
        self.volume = len(self._psa) * self._psvoleq
Beispiel #8
0
def normalised(vector):
    """Return normalised (length=1.0) Vector version of the input.

    This function is here to remove some ambiguity in the term 'normal' where
    I sometimes mean perpendicular to a plane, and sometimes I mean,
    normalised to length=1.0 (i.e. a unit vector)."""

    return Vector(vector).normal()
Beispiel #9
0
 def _makeVectorRange(m: re.match, sx: int) -> List[Vector]:
     vr = [
         Scenario._makeRange(m, n, n + 3, n + 6) for n in range(sx, sx + 3)
     ]
     vl = max([len(r) for r in vr])  # Length of longest coordinate list
     # Extend each coordinate list to the length of the longest one
     vr = [vr[n] + [vr[n][-1]] * (vl - len(vr[n])) for n in range(3)]
     return [Vector(vr[0][n], vr[1][n], vr[2][n]) for n in range(vl)]
Beispiel #10
0
 def getLocation(self, ref: PSARef) -> Vector:
     if ref in self._psa:
         return self._psa[ref]
     a = self._rx
     (sphere, row, layer) = ref
     return self.centre + Vector(
         (2 * sphere + (layer + row) % 2) * a,
         sqrt(3) * (row +
                    (layer % 2) / 3.0) * a, 2 * sqrt(2 / 3.0) * layer * a)
Beispiel #11
0
def calculate_mass(pdb: str) -> Tuple[float, Vector]:
    mass = 0.0
    com = Vector(0, 0, 0)
    atom = "none"
    try:
        with open(pdb) as f:
            for line in f:
                if line[0:6] != "ATOM  ":
                    continue
                pos = Vector(float(line[31:39]), float(line[39:47]),
                             float(line[47:55]))
                atom = line[77] if line[76] == " " else line[76:78]
                matom = MolMass[atom]
                mass += matom
                com -= (com - pos) * matom / mass
    except Exception as e:
        log.warning(f"Zero mass assumed for {pdb} atom {atom} because {e}")
    return (mass, com)
Beispiel #12
0
def getResidueCharges(filename):
    """Returns a list of residues and total charge located at Calpha from PQR file."""

    pqr_file = open(filename, 'r')
    pqr_data = []
    pqr_atoms, err_lines = readPQR(pqr_file)

    atom_set = []
    residue = []
    last_seq = -1
    for pqr in pqr_atoms:
        if (pqr.resSeq == 0):
            raise Exception  # can't identify residues by sequence number
        if (pqr.resSeq != last_seq):
            if (last_seq != -1):
                atom_set.append(residue)
            residue = [pqr]
            last_seq = pqr.resSeq
        else:
            residue.append(pqr)
    if (last_seq != -1 and len(residue) > 0):
        atom_set.append(residue)

    for residue in atom_set:

        backbone = [xx for xx in residue if xx.name in ["O", "N", "C"]]
        assert (len(backbone) == 3)
        sidechain = [
            xx for xx in residue if xx.name not in ["CA", "O", "N", "C"]
        ]
        alpha = [pqr for pqr in residue if pqr.name == "CA"]
        assert (len(alpha) == 1)

        for pqr in backbone:
            pqr_data.append(Charge(Vector(pqr.x, pqr.y, pqr.z), pqr.q, pqr.r))

        calpha_xyz = Vector(alpha[0].x, alpha[0].y, alpha[0].z)
        total_sidechain_charge = sum([pqr.q for pqr in sidechain])
        pqr_data.append(
            Charge(calpha_xyz, total_sidechain_charge + alpha[0].q,
                   alpha[0].r))

    return pqr_data
Beispiel #13
0
def reflect(gts_filename, output_filename):

    vertices, triangles = get_vertices_triangles_from_gts(gts_filename)
    vertices = [Vector(-v.x, v.y, v.z) for v in vertices]

    tnormals = [tnormal(vertices[t[0]],vertices[t[1]],vertices[t[2]])
                for t in triangles]

    write_gts(output_filename, vertices, triangles)
    return;
Beispiel #14
0
def getCharges(filename):
    """Returns a list of charges and corresponding lines from PQR file."""

    pqr_file = open(filename, 'r')
    pqr_data = []
    pqr_atoms, err_lines = readPQR(pqr_file)

    for pqr in pqr_atoms:
        xyz = Vector(pqr.x, pqr.y, pqr.z)
        pqr_data.append(Charge(xyz, pqr.q, pqr.r))
    return pqr_data
Beispiel #15
0
def msms2xyzn(msms_vertex_filename, xyzn_filename):
    """Convert msms files to xyz-with-normal format"""

    vfloat = [line.split() for line in open(msms_vertex_filename,'r') if line[0] != '#']

    # delete the first lines of each list, which are column totals
    vfloat.remove(vfloat[0])

    vertices = []
    normals = []
    for vertex_descriptor in vfloat:
        x,y,z,xn,yn,zn = [float(v) for v in vertex_descriptor[:6]]
        vertices.append( Vector(x,y,z) )
        normals.append(  Vector(xn,yn,zn) )


    fout = open(xyzn_filename,'w')
    for v,n in zip(vertices, normals):
        print(v.x, v.y, v.z, n.x, n.y, n.z, file=fout)
    fout.close()

    return
Beispiel #16
0
def test_rand_rot():

    xx = Vector(1.0, 0.0, 0.0)
    from geometry import apply_quaternion_to_vector

    f = open("random_rotation_test.kin", "w")
    print("@kinemage", file=f)
    print("@dotlist", file=f)

    for ii in range(10000):
        rand_pt_on_surface_of_sphere = apply_quaternion_to_vector(
            _rand_rot(), xx)
        print("%f %f %f" %
              (rand_pt_on_surface_of_sphere.x, rand_pt_on_surface_of_sphere.y,
               rand_pt_on_surface_of_sphere.z),
              file=f)

    f.close()
Beispiel #17
0
def get_vertices_triangles_from_gts(filename):

    # first get the number of vertices, edges, faces
    (nv, ne, nf), f = get_gts_info(filename, return_handle=True)

    vertices, triangles = [], []

    try:

        while (len(vertices) < nv):

            # convert to Vertex object
            vx, vy, vz = [float(xx) for xx in get_next_line_not_comment(f).split()]
            vertices.append(Vector(vx, vy, vz))

        # edges are defined as an ordered pair of vertices
        edges = []
        while (len(edges) < ne):
            edge_verts = get_next_line_not_comment(f).split()
            edges.append( set([int(edge_verts[0])-1, int(edge_verts[1])-1]) )

        # triangle defs come after the edge defs
        while (len(triangles) < nf):
            e1, e2, e3 = [edges[int(xx)-1] for xx in get_next_line_not_comment(f).split()]

            # get the 3 vertices, in order
            v1_idx = e1.intersection(e2).pop()
            v2_idx = e2.intersection(e3).pop()
            v3_idx = e3.intersection(e1).pop()

            new_t = (v1_idx,v2_idx,v3_idx)
            triangles.append(new_t)

    except IOError:
        print("Bad GTS file.")
        vertices, triangles = [], []

    finally:
        f.close()

    return vertices, triangles
Beispiel #18
0
def get_vertices_from_gts(filename):
    """returns a list of vertices and the number of faces from a gts file."""

    # first get the number of vertices, edges, faces
    (nv, ne, nf), f = get_gts_info(filename, return_handle=True)

    vertices = []
    try:

        while (len(vertices) < nv):

            # convert to Vertex object
            vx, vy, vz = [float(xx) for xx in get_next_line_not_comment(f).split()]
            vertices.append(Vector(vx, vy, vz))

    except IOError:
        print("Bad GTS file.")
        vertices = []

    finally:
        f.close()

    return vertices
Beispiel #19
0
from pybeep import BEEP, Mesh, Vector, Quaternion
from geometry import _rand_rot
from constants import calculate_kappa
import sys

#running_output = open("ache-fas-pair.txt",'w')
#running_output.close()

Dsolvent = 80.0
Dprotein = 2.0
kappa = float(sys.argv[1])
GMRES_tolerance = 1e-6
GMRES_max_iterations = 100

no_rotation = Quaternion(1, 0, 0, 0)
centre_crystal_ache = Vector(35.3081, 17.9041, 169.832)
centre_crystal_fas = Vector(6.91911, 25.184, 168.836)
axis = (centre_crystal_fas - centre_crystal_ache)
crystal_separation = axis.length()
print(crystal_separation)
axis.normalise()
minimum_separation = 10.165
ache_location = Vector(0, 0, 0)

ache = "ache-fas-png1/1MAH-ache.7.mtz"
fas = "ache-fas-png1/1MAH-fas.10.mtz"

qual_pts = 0
quad_pts = 0
nbsize = 2200
for dummy in [0]:
Beispiel #20
0
def gts_overmesh(filename_in, filename_out):
    """Overmesh the mesh by subividing all triangles into 6 new triangles."""

    # first get the number of vertices, edges, faces
    (nv, ne, nf), f = get_gts_info(filename_in, return_handle=True)

    vertices, triangles = [], []

    mid_edge_vertices = {}
    tri_norms = []

    try:

        while (len(vertices) < nv):

            # convert to Vertex object
            vx, vy, vz = [float(xx) for xx in get_next_line_not_comment(f).split()]
            vertices.append(Vector(vx, vy, vz))

        edges = []
        # edges are defined as an ordered pair of vertices
        while (len(edges) < ne):
            edge_verts = get_next_line_not_comment(f).split()
            edges.append( set([int(edge_verts[0])-1, int(edge_verts[1])-1]) )

        # triangle defs come after the edge defs
        while (len(triangles) < nf*6):
            next_line = get_next_line_not_comment(f).split()
            e1_idx, e2_idx, e3_idx = [int(xx)-1 for xx in next_line]
            e1, e2, e3 = [edges[int(xx)-1] for xx in next_line]

            # get the 3 vertices, in order
            v1_idx = e1.intersection(e2).pop()
            v2_idx = e2.intersection(e3).pop()
            v3_idx = e3.intersection(e1).pop()

            #new_t = (v1_idx,v2_idx,v3_idx)
            t_centre = (vertices[v1_idx] + vertices[v2_idx] + vertices[v3_idx]) / 3.0
            t_centre_idx = len(vertices)
            vertices.append(t_centre)

            # add new vertices to vertex list if necessary
            def get_mid_edge_vertex(edge_idx):
                try:
                    e_mid = mid_edge_vertices[edge_idx]
                except KeyError:
                    e_mid = len(vertices)
                    v1_idx, v2_idx = edges[edge_idx]
                    vertices.append( (vertices[v1_idx] + vertices[v2_idx]) / 2.0)
                    mid_edge_vertices[edge_idx] = e_mid
                return e_mid

            def add_triangle(v1,v2,v3):
                triangles.append( (v1,v2,v3) )
                tri_norms.append( tnormal(vertices[v1],vertices[v2],vertices[v3]) )

            add_triangle(v1_idx, t_centre_idx, get_mid_edge_vertex(e1_idx))
            add_triangle(get_mid_edge_vertex(e1_idx), t_centre_idx, v3_idx)
            add_triangle(v3_idx, t_centre_idx, get_mid_edge_vertex(e3_idx))
            add_triangle(get_mid_edge_vertex(e3_idx), t_centre_idx, v2_idx)
            add_triangle(v2_idx, t_centre_idx, get_mid_edge_vertex(e2_idx))
            add_triangle(get_mid_edge_vertex(e2_idx), t_centre_idx, v1_idx)

    except IOError:
        print("Bad GTS file.")

    finally:
        f.close()

    write_gts(filename_out,vertices, triangles)

    return
Beispiel #21
0
#for n in range(cbase, cbase+len(scenario.crwdlist)):
#	m = beep.get_library_mesh(n);
#	r = m.get_radius()
#	a = max(a, r)
a = maxcr  # packed sphere radius
if a == 0.0:
    a = scenario.parameters['ArenaGrainSize']
if a == 0.0 or a > scenario.radius:  # Having no crowders is ok, use arena size
    a = scenario.radius

# Build the arena
psa = PackedSphereArena(a, scenario.radius, scenario.centre)
log.info(f"Arena initialised with {psa.capacity()} packed spheres")

# Load BEEP with subject mesh instances as these persist
origin = Vector(0, 0, 0)
no_rotation = Quaternion(1, 0, 0, 0)
for s in range(cbase):
    log.debug(f"Insert mesh instance {s}")
    m = beep.insert_mesh_instance(s, scenario.locnlist[s][0], no_rotation,
                                  scenario.parameters['Dprotein'])
log.info("Subject instances created, "
         f"Dprotein={scenario.parameters['Dprotein']}")

# Outline of the rest of the program:
# For each subject-location
# Clear crowder instances
# Move the subjects to new locations
# For each crowder count
# Load the crowder instances
# MCMC loop: BEEP, propose crowder instance moves
Beispiel #22
0
    def __init__(self, spec: TextIOWrapper):
        # Public object attributes
        self.centre = Vector(0.0, 0.0, 0.0)
        self.radius = 0.0
        self.subjlist = list()
        self.crwdlist = list()
        self.proplist = list()
        self.locnlist = list()
        self.rotnlist = list()
        self.parameters = {
            'ArenaRadius': -1,
            'ArenaGrainSize': 0.0,
            'ArenaCentre': None,
            'CrowderRotate': True,
            'RhoProtein': 1.35,
            'RhoSolvent': 1.02,
            'MCwarmup': -1,
            'MCiter': -1,
            'Dsolvent': 80.0,
            'Dprotein': 2.0,
            'Kappa': 0.102,
            'GMREStol': 1e-6,
            'GMRESmaxit': 100,
            'QualPts': 4,
            'QuadPts': 0,
            'NbSize': 2200,
            'Planar': False
        }
        self._paramTypes = {
            'ArenaRadius': float,
            'ArenaGrainSize': float,
            'ArenaCentre': Vector,
            'CrowderRotate': strtobool,
            'RhoProtein': float,
            'RhoSolvent': float,
            'MCwarmup': int,
            'MCiter': int,
            'Dsolvent': float,
            'Dprotein': float,
            'Kappa': float,
            'GMREStol': float,
            'GMRESmaxit': int,
            'QualPts': int,
            'QuadPts': int,
            'NbSize': int,
            'Planar': bool
        }

        # Read scenario specification file
        lastmatch = 0  # Type of active configuration line last matched
        n = 0
        for line in spec:
            n += 1
            if len(line.split()) == 0 or line.split()[0][0] == '#':
                continue

            ml = self._subjre.match(line)
            if ml:
                lastmatch = 1  # Subject code
                self.subjlist += [ml.group(1)]
                self.locnlist += [Scenario._makeVectorRange(ml, 2)]
                self.rotnlist += [Scenario._makeQuaternionRange(ml, 11)]
                continue

            ml = self._contre.match(line)
            if ml:
                if lastmatch != 1:
                    log.warning(f"Continuation line out of order, "
                                f"line {n} -- ignored")
                    continue
                self.locnlist[-1] += Scenario._makeVectorRange(ml, 1)
                self.rotnlist[-1] += Scenario._makeQuaternionRange(ml, 10)
                continue

            mc = self._crwdre.match(line)
            if mc:
                lastmatch = 2  # Crowder code
                self.crwdlist += [mc.group(1)]
                self.proplist += [Scenario._makeRange(mc, 2, 3, 4)]
                continue

            mp = self._parmre.match(line)
            if mp:
                lastmatch = 3  # Parameter code
                if mp.group(1) in self.parameters:
                    self.parameters[mp.group(1)] = \
                     self._paramTypes[mp.group(1)](mp.group(2))
                else:
                    log.warning(f"Unrecognised parameter {mp.group(1)}, "
                                f"line {n} -- ignored")
                log.debug(f"parameter {mp.group(1)}="
                          f"{self.parameters[mp.group(1)]}, line {n}")
            else:
                log.warning(f"Invalid line [{n}] in scenario specification: "
                            f"{line} -- ignored")

        # Extend the shorter lists so all are the same length
        llen = Scenario._makeSameLength(self.locnlist + self.rotnlist)
        plen = Scenario._makeSameLength(self.proplist)

        # Scenario specification checks
        # There has to be at least one subject protein
        if len(self.subjlist) == 0:
            log.error("No subject proteins in specification")
            exit(1)

        # Ensure crowder proportions are in [0,1]
        # Divide by the largest sum of proportions if outside this
        maxsum = max([0] + \
            [sum([x[n] for x in self.proplist]) for n in range(plen)])
        if maxsum > 1:
            log.warning(f"Maximum proportions {maxsum} greater than one "
                        "- corrected")
            self.proplist = [[y / maxsum for y in x] for x in self.proplist]

        # Calculate the arena radius and the centre if not provided
        n = sum([len(locn) for locn in self.locnlist])
        maxsep = 0.0
        for i in range(len(self.locnlist)):
            for u in self.locnlist[i]:
                self.centre += u
                for j in range(i + 1, len(self.locnlist)):
                    for v in self.locnlist[j]:
                        maxsep = max(maxsep, (u - v).length())
        if maxsep == 0.0:
            maxsep = 2 / 3  # to get a default of 1.0 below
        self.centre /= n
        if self.parameters['ArenaCentre'] != None:
            self.centre = self.parameters['ArenaCentre']
        if self.parameters['ArenaRadius'] <= 0:
            self.radius = 3 * maxsep / 2  # Default value
        else:
            self.radius = self.parameters['ArenaRadius']
        log.debug(f"Arena radius {self.radius}, centre {self.centre}")
Beispiel #23
0
def pack(crowdant_radius, volume_fraction, distance_limit, forbidden, minimum_separation):
    """pack a load of spherical crowders around a set of existing spherical objects, to a 
       specified distance_limit with specified total volume_fraction occluded (approx.)"""
    
    # centroid of the forbidden set
    centroid = Vector(0,0,0)
    for xx,rad in forbidden:
        centroid += xx
    centroid /= len(forbidden)
    
    # semi-axis limits -- centroid to furthest point in forbidden set
    dists = [(xx-centroid).length()+radx for (xx,radx) in forbidden]
    edge_length = max(dists)*2 + distance_limit*2 + crowdant_radius*4
    volume = edge_length ** 3
    
    # calculate number required to reach specified volume fraction
    individual_vol = (4. / 3.) * pi * crowdant_radius ** 3
    target_vol = volume * volume_fraction
    n = int(ceil(target_vol / individual_vol))
    
    # now loop 
    centres = forbidden[:]
    num_added = 0
    iterations_since_successful_addition = 0
    while (num_added < n and iterations_since_successful_addition < 10000):
        
        new_pt = _rand_xyz(edge_length) + centroid
        ok = True
        for xx,r in centres:
            if ((new_pt - xx).length() < (minimum_separation+crowdant_radius+r)):
                ok = False
                break
        if ok:
            iterations_since_successful_addition = 0
            num_added += 1
            centres.append((new_pt,crowdant_radius))
        else:
            iterations_since_successful_addition += 1

    ##rot = Quaternion(0,0,0,0)
    #for ctr,(pt,d) in enumerate(centres):
        ##rot = _rand_rot()
        #rot = Quaternion(0,0,0,0)
        #print "<instance instance_id=%d mesh_id=0 conformation=0 x=%f y=%f z=%f a=%f b=%f c=%f d=%f/>" %(ctr, pt.x, pt.y, pt.z, rot.x, rot.y, rot.z, rot.w)
                ##print "%f %f %f" %(pt.x, pt.y, pt.z)
    #print

    final_vol_fraction = num_added * individual_vol / volume
    print("Added %d crowders, reached volume fraction of %f" %(num_added, final_vol_fraction))
    
    centres = centres[len(forbidden):]
    
    print("Culling anything further than %f from the forbidden range" %(distance_limit))
    
    # utility function- returns true if the point/radius is further than
    # distance_limit from any object in forbidden (NB: could use lambda
    # function here)
    def cull_function(x,r):
        dists = [(x - fx).length()-fr-r for fx,fr in forbidden]
        return (min(dists) < distance_limit)

    # cull using list comprehension
    centres = [x for x,r in centres if cull_function(x,r)]
    
    print("%d crowders remain" %(len(centres)))
    
    return centres
Beispiel #24
0
             log.error(f"Integer not found for face count, line {n}")
             exit(1)
         if fn % 2 != 0:
             log.error(f"Weird face count {fn}, line {n}")
             exit(1)
     elif line[0:10] == "end_header":
         en = int(3 * fn / 2)
         print(f"{vn} {en} {fn} GtsSurface GtsFace GtsEdge GtsVertex",
               file=out)
         state = 1
 # Vertices
 elif state == 1:
     ll = line.split()
     print(f"{ll[0]} {ll[1]} {ll[2]}", file=out)
     try:
         vertex.append(Vector(*[float(ll[i]) for i in range(3)]))
     except:
         log.error(f"Bad vertex {ll[0:3]}, line {n}")
         exit(1)
     vn -= 1
     if vn == 0:
         state = 2
 # Faces, er, edges
 elif state == 2:
     ll = line.split()
     if ll[0] != "3":
         log.error(f"Weird face vertex count {ll[0]}, line {n}")
         exit(1)
     # define and store unique edges and define faces in terms of these
     try:
         vix = [int(ll[i]) for i in range(1, 4)]
Beispiel #25
0
    loglevel = args['loglevel']
    logstream = args['log']
    ifs = args['in']
    ofs = args['out']
    if args['fmt'] == 'pdb':
        fmt = 0
        ofmt = str().join(["{:8.3f}" for i in range(3)])  # [-999.999,9999.999]
    elif args['fmt'] == 'xyz':
        fmt = 1
        ofmt = " ".join(["{:.6f}" for i in range(3)])
    else:
        print(f"This should not have happened - bad format {args['fmt']}")
        exit(1)
    beg = args['beg']
    end = args['end']
    ax = [Vector(*args['axis'][i]) for i in range(2)]
    qa = ax[0].cross(ax[1])  # a for axis
    qd = ax[0].dot(ax[1])  # d for dot
    # To get half-way, add (1,0,0,0) but ql not normalised yet, so adjust unit
    unit = sqrt(qd * qd + qa.length2())
    ql = Quaternion(unit + qd, qa.x, qa.y, qa.z)
    ql.normalise()

    # Set up logging - if to stdout, assume caller handles time and module name
    if logstream == stdout:
        lfmt = "%(levelname)s:%(message)s"
    else:
        lfmt = "%(asctime)s %(module)s %(levelname)s:%(message)s"
    log.basicConfig(stream=logstream,
                    format=lfmt,
                    level=getattr(log, loglevel.upper()))
Beispiel #26
0
def msms_remesh(msms_vertex_filename, gts_filename):
    """Triangulate an MSMS vertex list to GTS format."""

    vertices, normals = get_clean_msms_vertices(msms_vertex_filename)

    # get centre of set of points
    centre = Vector(0,0,0)
    for v in vertices:
        centre += v
    centre = centre / len(vertices)

    # get maximum xyz extent of set of points
    maxdim = max([ (v - centre).length() for v in vertices ])

    patch_vertex_list = [ [] for v in vertices ]

    # insert vertices into an Octree
    max_neighbours = 100
    adaptive_tree = Octree(max_neighbours, Vector(centre.x, centre.y, centre.z), maxdim*2)

    for i,(v,n) in enumerate(zip(vertices,normals)):
        adaptive_tree.insert(Vector(v.x, v.y, v.z))


    def get_neighbourlist(idx):
        near_pts = [0]*(max_neighbours)
        try:
            num = adaptive_tree.get(idx,near_pts)
        except IndexError:
            print("IndexError: %d (%d vertices)" %(idx, len(vertices)))
            raise IndexError
        #print near_pts
        near_pts = [pt for pt in near_pts[:num] if pt != idx]
        return near_pts

    for working_idx,(v,vn) in enumerate(zip(vertices, normals)):

        near_pts = get_neighbourlist(working_idx)

        # select set of working points which define this patch
        proximity_scores = []
        for pt_idx in near_pts:
            pt = vertices[pt_idx]
            r = (pt - v)
            score = (r.length()**2) / (1.0 - r.normalised().dot(vn))
            proximity_scores.append( (score, pt_idx) )
        proximity_scores.sort()
        closest = proximity_scores[0][1]

        points = [closest]
        near_pts.remove(closest)

        while True:

            if len(points) == 3:
                near_pts.append(points[0])


            # now find the next point which best forms a triangle
            for pt_idx in near_pts:
                pt = vertices[pt_idx]
                r = (pt - v)
                score = (r.length()**2) / (1.0 - r.normalised().dot(vn))
                proximity_scores.append( (score, pt_idx) )
            proximity_scores.sort()
            closest = proximity_scores[0][1]









    return
Beispiel #27
0
def upsample_fh_vals(low_res_mesh, high_res_mesh, low_res_results, high_res_results):
    """Forces all vertices in the mesh_to_realign to lie on the surface defined by reference_mesh."""

    low_res_results = [(float(x.split()[0]), float(x.split()[1])) for x in open(low_res_results,'r').readlines()]
    high_res_out = open(high_res_results, 'w')

    low_res_vertices, low_res_triangles = get_vertices_triangles_from_gts(low_res_mesh)
    high_res_vertices, high_res_triangles = get_vertices_triangles_from_gts(high_res_mesh)

    # get centre of set of points
    centre = Vector(0,0,0)
    for v in low_res_vertices:
        centre += v
    centre = centre / len(low_res_vertices)

    # get maximum xyz extent of set of points
    maxdim = max(max([ (v - centre).length() for v in high_res_vertices ]),
                 max([ (v - centre).length() for v in low_res_vertices ])
                 )

    # insert reference vertices into an Octree
    max_neighbours = 27
    adaptive_tree = Octree(max_neighbours, Vector(centre.x, centre.y, centre.z), maxdim*2)

    for i,v in enumerate(low_res_vertices):
        v.uid = i # Vector is python object so can add an attribute
        adaptive_tree.insert(v)

    new_vertices = []
    for i,v in enumerate(high_res_vertices):
        
        closest_vertex = adaptive_tree.nearest(v)
        closest_original_vertex = -1
        for ii,vv in enumerate(low_res_vertices):
            if ((closest_vertex - vv).length() < 1e-6): 
                closest_original_vertex = ii
                break
        if (closest_original_vertex == -1):
            print("Failed to find closest vertex.")
            raise Exception

        
        
        #print "dist to closest vertex: ", (low_res_vertices[closest_original_vertex] - v).length()
        possible_triangles = [t for t in low_res_triangles if closest_original_vertex in t]

        if len(possible_triangles) == 0:
            print(v, centre, maxdim*2)
            print(v, closest_original_vertex, low_res_vertices[closest_original_vertex], (low_res_vertices[closest_original_vertex] - v).length())
            raise Exception

        vert_checklist = []
        for t in possible_triangles:
            for v_idx in t:
                if v_idx == closest_original_vertex: continue
                if v_idx not in vert_checklist:
                    vert_checklist.append(v_idx)

        def get_closest_vertex_from_list(v, vlist):
            vert_dists = [ ( (v - low_res_vertices[vert_chk]).length(), vert_chk)
                           for vert_chk in vlist]
            vert_dists.sort()
            #print "sorted list: ",  vert_dists
            return vert_dists[0][1]

        second_closest_vertex = get_closest_vertex_from_list(v, vert_checklist)

        possible_triangles = [t for t in possible_triangles
                              if second_closest_vertex in t]
        vert_checklist = []
        for t in possible_triangles:
            for v_idx in t:
                if v_idx == closest_original_vertex: continue
                if v_idx == second_closest_vertex: continue
                if v_idx not in vert_checklist:
                    vert_checklist.append(v_idx)

        third_closest_vertex = get_closest_vertex_from_list(v, vert_checklist)

        closest_triangles = [t for t in possible_triangles
                             if third_closest_vertex in t]
        ct = closest_triangles[0]
        #print ct


        # at this point we have a vertex v which needs projecting onto a point
        # within the triangle defined by "ct"
        v1 = low_res_vertices[ct[0]]
        v2 = low_res_vertices[ct[1]]
        v3 = low_res_vertices[ct[2]]
        v1f = low_res_results[ct[0]][0]
        v1h = low_res_results[ct[0]][1]
        v2f = low_res_results[ct[1]][0]
        v2h = low_res_results[ct[1]][1]
        v3f = low_res_results[ct[2]][0]
        v3h = low_res_results[ct[2]][1]

        new_x_axis = (v2-v1).normalised()
        new_y_axis = (v3-v1).normalised()

        pt_from_v1 = v - v1
        new_f = v1f + pt_from_v1.dot(new_x_axis)*(v2f-v1f) + pt_from_v1.dot(new_y_axis)*(v3f-v1f);
        new_h = v1h + pt_from_v1.dot(new_x_axis)*(v2h-v1h) + pt_from_v1.dot(new_y_axis)*(v3h-v1h);

        print(new_f, new_h, file=high_res_out)

    high_res_out.close();


    return
Beispiel #28
0
def msms2gts(msms_vertex_filename, msms_face_filename, gts_filename):
    """Convert msms files to gts format (Gnu Triangulated Surface)"""

    vfloat = [line.split() for line in open(msms_vertex_filename,'r') if line[0] != '#']

    # delete the first lines of each list, which are column totals
    vfloat.remove(vfloat[0])

    vertices = []
    normals = []
    for vertex_descriptor in vfloat:
        x,y,z,xn,yn,zn = [float(v) for v in vertex_descriptor[:6]]
        vertices.append( Vector(x,y,z) )
        normals.append(  Vector(xn,yn,zn) )

    # get centre of set of points
    centre = Vector(0,0,0)
    for v in vertices:
        centre += v
    centre /= len(vertices)

    # get maximum xyz extent of set of points
    maxdim = max([ (v - centre).length() for v in vertices ])

    # insert vertices into an Octree
    max_neighbours = 27
    adaptive_tree = Octree(max_neighbours, Vector(centre.x, centre.y, centre.z), maxdim*2)

    duplicates = []
    ctr = 0
    mappingA = {}
    mappingB = {}
    for i,(v,n) in enumerate(zip(vertices,normals)):

        # check for duplicate already in tree
        pre_existing = adaptive_tree.check_for_duplicate(v)
        if (pre_existing == -1):
            mappingA[i] = ctr
            mappingB[ctr] = i
            adaptive_tree.insert(v)
            ctr += 1
        else:
            pre_existing_idx = mappingB[pre_existing]
            print("Clash between %d %s %s and %d %s %s" %(i, v, n, pre_existing_idx, vertices[pre_existing_idx], normals[pre_existing_idx]))
            duplicates.append(i)
            mappingA[i] = pre_existing

    vertices = [v for i,v in enumerate(vertices) if i not in duplicates]
    normals = [n for i,n in enumerate(normals) if i not in duplicates]

    tri = [line.split() for line in open(msms_face_filename,'r') if line[0] != '#']
    tri = [[int(float(t))-1 for t in tri_def] for tri_def in tri]
    tri.remove(tri[0])
    print("Number of vertices: ", len(vertices))
    print("Number of normals: ", len(normals))
    #new_tri = tri[:]
    new_tri = []
    for i,t in enumerate(tri):
        #print t[0]
        new_v1 = mappingA[t[0]]
        new_v2 = mappingA[t[1]]
        new_v3 = mappingA[t[2]]
        if (new_v1 in (new_v2, new_v3) or
            new_v2 in (new_v1, new_v3) or
            new_v3 in (new_v1, new_v2)
            ):
            print("skipped a dud triangle (%d) " %(i), t)
            continue
        new_tri.append([new_v1, new_v2, new_v3])

    # write the gts file
    write_gts(gts_filename, vertices, new_tri)

    return
Beispiel #29
0
# Module utilities
def _asString(v: Vector) -> str:
    return "(%f, %f, %f)" % (v.x, v.y, v.z)


def intRef(coord: float, radius: float):
    return int((coord + radius) // (2 * radius))


# Declare the subclass status
#PackedSphereArena.register(Arena)

# No main program, so used for testing
if __name__ == "__main__":
    log.basicConfig(level=getattr(log, "DEBUG"))
    psa = PackedSphereArena(1, 10, Vector(0, 0, 0))
    print("Capacity starting at ", psa.capacity())
    (v, ref) = psa._occupy(0, 1, 0)
    print("Occupied at ", v, ref)
    (v, ref2) = psa.move(ref, vacant=2)
    print("Moved to ", v, ref2)
    (v, ref3) = psa.move(ref2, to=ref)
    print("Moved back to ", v, ref3)
    print("isOccupied=", psa.isOccupied(v))
    print("Capacity is now ", psa.capacity())
    print(f"Ref for {v} is ", psa._getRef(v))
    try:
        psa.occupy(ref, 1, 0)
        print("No collision")
    except:
        print("Error: collision detected")
Beispiel #30
0
def align_mesh_to_reference(reference_mesh, mesh_to_realign):
    """Forces all vertices in the mesh_to_realign to lie on the surface defined by reference_mesh."""

    ref_vertices, ref_triangles = get_vertices_triangles_from_gts(reference_mesh)
    vertices, triangles = get_vertices_triangles_from_gts(mesh_to_realign)

    # get centre of set of points
    centre = Vector(0,0,0)
    for v in ref_vertices:
        centre += v
    centre = centre / len(ref_vertices)

    # get maximum xyz extent of set of points
    maxdim = max(max([ (v - centre).length() for v in vertices ]),
                 max([ (v - centre).length() for v in ref_vertices ])
                 )

    # insert reference vertices into an Octree
    max_neighbours = 27
    adaptive_tree = Octree(max_neighbours, Vector(centre.x, centre.y, centre.z), maxdim*2)

    for i,v in enumerate(ref_vertices):
        adaptive_tree.insert(Vector(v.x, v.y, v.z))

    new_vertices = []
    for i,v in enumerate(vertices):
        closest_original_vertex = adaptive_tree.get_uid_closest_to(v)
        #print "dist to closest vertex: ", (ref_vertices[closest_original_vertex] - v).length()
        possible_triangles = [t for t in ref_triangles if closest_original_vertex in t]

        if len(possible_triangles) == 0:
            print(v, centre, maxdim*2)
            print(v, closest_original_vertex, ref_vertices[closest_original_vertex], (ref_vertices[closest_original_vertex] - v).length())
            raise Exception

        vert_checklist = []
        for t in possible_triangles:
            for v_idx in t:
                if v_idx == closest_original_vertex: continue
                if v_idx not in vert_checklist:
                    vert_checklist.append(v_idx)

        def get_closest_vertex_from_list(v, vlist):
            vert_dists = [ ( (v - ref_vertices[vert_chk]).length(), vert_chk)
                           for vert_chk in vlist]
            vert_dists.sort()
            #print "sorted list: ",  vert_dists
            return vert_dists[0][1]

        second_closest_vertex = get_closest_vertex_from_list(v, vert_checklist)

        possible_triangles = [t for t in possible_triangles
                              if second_closest_vertex in t]
        vert_checklist = []
        for t in possible_triangles:
            for v_idx in t:
                if v_idx == closest_original_vertex: continue
                if v_idx == second_closest_vertex: continue
                if v_idx not in vert_checklist:
                    vert_checklist.append(v_idx)

        third_closest_vertex = get_closest_vertex_from_list(v, vert_checklist)

        closest_triangles = [t for t in possible_triangles
                             if third_closest_vertex in t]
        ct = closest_triangles[0]
        #print ct


        # at this point we have a vertex v which needs projecting onto a point
        # within the triangle defined by "ct"
        v1 = ref_vertices[ct[0]]
        v2 = ref_vertices[ct[1]]
        v3 = ref_vertices[ct[2]]
        norm = tnormal(v1,v2,v3)
        new_z_axis = norm
        new_x_axis = (v2-v1).normalised()
        new_y_axis = new_z_axis.cross(new_x_axis).normalised()
        pt_from_v1 = v - v1
        projected_pt = v1 + (new_x_axis * pt_from_v1.dot(new_x_axis)) + \
                     (new_y_axis * pt_from_v1.dot(new_y_axis))

        assert( (projected_pt - v1).dot(norm) < 1e-3 )
        assert( (projected_pt - v2).dot(norm) < 1e-3 )
        assert( (projected_pt - v3).dot(norm) < 1e-3 )

        new_vertices.append(projected_pt)
        #vertices[i] = projected_pt
#        if ((projected_pt - v).length() > 1e-3):
#            print "moved vertex by: ", (projected_pt - v).length()

    tnormals = [tnormal(new_vertices[t[0]],new_vertices[t[1]], new_vertices[t[2]])
                for t in triangles]

    write_gts(mesh_to_realign, new_vertices, triangles)

    return