def do(self): cache = context.application.cache parent = cache.node unit_cell = None if isinstance(parent, UnitCell): unit_cell = parent Atom = context.application.plugins.get_node("Atom") atoms = [] coordinates = [] for child in parent.children: if isinstance(child, Atom): atoms.append(child) coordinates.append(child.transformation.t) coordinates = numpy.array(coordinates) cf = ClusterFactory() for i0, i1, delta, distance in PairSearchIntra( coordinates, periodic.max_radius * 0.4, unit_cell): atom0 = atoms[i0] atom1 = atoms[i1] if atom0.number == atom1.number: if distance < periodic[atom0.number].vdw_radius * 0.4: cf.add_related(atom0, atom1) clusters = cf.get_clusters() del cf # define the new singles singles = [] for cluster in clusters: number = iter(cluster.items).next().number single = Atom(name="Single " + periodic[number].symbol) single.set_number(number) singles.append((single, list(cluster.items))) # calculate their positions for single, overlappers in singles: # in the following algorithm, we suppose that the cluster of # atoms is small compared to the parent's periodic sizes # (if the parent is a periodic system) first_pos = overlappers[0].transformation.t delta_to_mean = numpy.zeros(3, float) for atom in overlappers[1:]: delta_to_mean += parent.shortest_vector(atom.transformation.t - first_pos) delta_to_mean /= float(len(overlappers)) single.set_transformation(Translation(first_pos + delta_to_mean)) # modify the model for single, overlappers in singles: lowest_index = min([atom.get_index() for atom in overlappers]) primitive.Add(single, parent, index=lowest_index) for atom in overlappers: while len(atom.references) > 0: primitive.SetTarget(atom.references[0], single) primitive.Delete(atom)
def do(self): cache = context.application.cache new_parent = cache.parent old_parents = list(cache.nodes) for old_parent in old_parents: old_parent_index = old_parent.get_index() while len(old_parent.children) > 0: primitive.Move(old_parent.children[-1], new_parent, old_parent_index + 1) primitive.Delete(old_parent)
def do(self): Atom = context.application.plugins.get_node("Atom") cache = context.application.cache for spring in list(cache.nodes): atom1, atom2 = spring.get_targets() if isinstance(atom1, Atom) and isinstance(atom2, Atom): t = 0.5 * (atom1.get_frame_relative_to(spring.parent).t + atom2.get_frame_relative_to(spring.parent).t) replacement = Atom(name="Merge of %s and %s" % (atom1.name, atom2.name), number=max([atom1.number, atom2.number]), transformation=Translation(t)) primitive.Add(replacement, spring.parent, spring.get_index()) atoms = set([atom1, atom2]) for atom in atoms: while len(atom.references) > 0: primitive.SetTarget(atom.references[0], replacement) primitive.Delete(spring) for atom in atoms: primitive.Delete(atom)
def erase_at(self, p, parent): for node in context.application.main.drawing_area.iter_hits( (p[0] - 2, p[1] - 2, p[0] + 2, p[1] + 2)): try: match = (node is not None and node != parent and node.is_indirect_child_of(parent) and node.model == context.application.model and (not self.cb_erase_filter.get_active() or self.erase_filter(node))) except Exception: raise UserError( "An exception occured while evaluating the erase filter expression." ) if match: primitive.Delete(node)
def delete(nodes): for dupe in nodes: if dupe.model is not None: # this check must be made because a dupe # might get deleted by the consequence of the # deletion of one of the former dupes. primitive.Delete(dupe)
def clean_springs(self, springs): for spring in springs: primitive.Delete(spring)
def replace(self, gl_object): if not gl_object.get_fixed(): new = self.get_new(gl_object.transformation.t) # select a target object, i.e. the one the will be connected with # the bonds/vectors/... of the replaced object if (self.current_object == "Fragment"): # fragments are inserted as frames # take atom with index 1 as target target_object = new.children[1] else: target_object = new # check if all connections to the replaced object are applicable # to the new object. if not, then do not perform the replacement # and return early. for reference in gl_object.references[::-1]: if not reference.check_target(target_object): return # add the new object parent = gl_object.parent primitive.Add(new, parent) if (self.current_object == "Fragment"): # Fix the rotation and translation of the molecular fragment. # Rotation Bond = context.application.plugins.get_node("Bond") if len(gl_object.references) == 1 and isinstance( gl_object.references[0].parent, Bond): bond1 = gl_object.references[0].parent direction1 = bond1.shortest_vector_relative_to(parent) if bond1.children[0].target != gl_object: direction1 *= -1 bond2 = new.children[0].references[0].parent direction2 = bond2.shortest_vector_relative_to(parent) if bond2.children[0].target != target_object: direction2 *= -1 axis = numpy.cross(direction2, direction1) if numpy.linalg.norm(axis) < 1e-8: axis = random_orthonormal(direction1) angle = compute_angle(direction1, direction2) rotation = Rotation.from_properties(angle, axis, False) primitive.Transform(new, rotation) else: bond1 = None # Tranlsation pos_old = new.children[1].get_frame_relative_to(parent).t pos_new = gl_object.transformation.t translation = Translation(pos_new - pos_old) primitive.Transform(new, translation) if bond1 != None: # bond length old_length = numpy.linalg.norm(direction1) new_length = bonds.get_length( new.children[1].number, bond1.get_neighbor(gl_object).number) translation = Translation(-direction1 / old_length * (new_length - old_length)) primitive.Transform(new, translation) # let the references to the replaced object point to the new object for reference in gl_object.references[::-1]: try: primitive.SetTarget(reference, target_object) except primitive.PrimitiveError: primitive.Delete(reference.parent) # delete the replaced object primitive.Delete(gl_object) if (self.current_object == "Fragment"): # Delete the first atom in the fragment primitive.Delete(new.children[0]) # Unframe the fragment UnframeAbsolute = context.application.plugins.get_action( "UnframeAbsolute") UnframeAbsolute([new])
def do(self): # the indices (n,m) that define the tube, see e.g. the wikipedia page # about nanotubes for the interpretation of these indices: # http://en.wikipedia.org/wiki/Carbon_nanotube n = self.parameters.n m = self.parameters.m periodic_tube = isinstance(self.parameters.tube_length, Undefined) universe = context.application.model.universe def define_flat(): "Reads and converts the unit cell vectors from the current model." # some parts of the algorithm have been arranged sub functions like # these, to reduce the number of local variables in self.do. This # should also clarify the code. active, inactive = universe.cell.active_inactive lengths, angles = universe.cell.parameters a = lengths[active[0]] b = lengths[active[1]] theta = angles[inactive[0]] return (numpy.array([a, 0], float), numpy.array([b * numpy.cos(theta), b * numpy.sin(theta)], float)) flat_a, flat_b = define_flat() def create_pattern(): "Read the atom positions and transform them to the flat coordinates" active, inactive = universe.cell.active_inactive a = universe.cell.matrix[:, active[0]] b = universe.cell.matrix[:, active[1]] c = numpy.cross(a, b) tmp_cell = UnitCell(numpy.array([a, b, c]).transpose()) rotation = tmp_cell.alignment_a return [(atom.number, rotation * atom.get_absolute_frame().t) for atom in iter_atoms([universe])] pattern = create_pattern() def define_big_periodic(): "Based on (n,m) calculate the size of the periodic sheet (that will be folded into a tube)." big_a = n * flat_a - m * flat_b norm_a = numpy.linalg.norm(big_a) radius = norm_a / (2 * numpy.pi) big_x = big_a / norm_a big_y = numpy.array([-big_x[1], big_x[0]], float) big_b = None stack_vector = flat_b - flat_a * numpy.dot( big_x, flat_b) / numpy.dot(big_x, flat_a) stack_length = numpy.linalg.norm(stack_vector) nominator = numpy.linalg.norm(stack_vector - flat_b) denominator = numpy.linalg.norm(flat_a) fraction = nominator / denominator stack_size = 1 while True: repeat = fraction * stack_size if stack_length * stack_size > self.parameters.max_length: break if abs(repeat - round(repeat) ) * denominator < self.parameters.max_error: big_b = stack_vector * stack_size break stack_size += 1 if big_b is None: raise UserError( "Could not create a periodic tube shorter than the given maximum length." ) rotation = numpy.array([big_x, big_y], float) return big_a, big_b, rotation, stack_vector, stack_size, radius def define_big_not_periodic(): "Based on (n,m) calculate the size of the non-periodic sheet (that will be folded into a tube)." big_a = n * flat_a - m * flat_b norm_a = numpy.linalg.norm(big_a) radius = norm_a / (2 * numpy.pi) big_x = big_a / norm_a big_y = numpy.array([-big_x[1], big_x[0]], float) stack_vector = flat_b - flat_a * numpy.dot( big_x, flat_b) / numpy.dot(big_x, flat_a) stack_length = numpy.linalg.norm(stack_vector) stack_size = int(self.parameters.tube_length / stack_length) big_b = stack_vector * stack_size rotation = numpy.array([big_x, big_y], float) return big_a, big_b, rotation, stack_vector, stack_size, radius if periodic_tube: big_a, big_b, rotation, stack_vector, stack_size, radius = define_big_periodic( ) else: big_a, big_b, rotation, stack_vector, stack_size, radius = define_big_not_periodic( ) def iter_translations(): "Yields the indices of the periodic images that are part of the tube." to_fractional = numpy.linalg.inv( numpy.array([big_a, big_b]).transpose()) col_len = int( numpy.linalg.norm(big_a + m * stack_vector) / numpy.linalg.norm(flat_a)) + 4 shift = numpy.dot(stack_vector - flat_b, flat_a) / numpy.linalg.norm(flat_a)**2 for row in xrange(-m - 1, stack_size + 1): col_start = int(numpy.floor(row * shift)) - 1 for col in xrange(col_start, col_start + col_len): p = col * flat_a + row * flat_b i = numpy.dot(to_fractional, p) if (i >= 0).all() and (i < 1 - 1e-15).all(): yield p #yield p, (i >= 0).all() and (i < 1).all() def iter_pattern(): for number, coordinate in pattern: yield number, coordinate.copy() # first delete everything the universe: while len(universe.children) > 0: primitive.Delete(universe.children[0]) # add the new atoms Atom = context.application.plugins.get_node("Atom") if self.parameters.flat: rot_a = numpy.dot(rotation, big_a) rot_b = numpy.dot(rotation, big_b) big_matrix = numpy.array([ [rot_a[0], rot_b[0], 0], [rot_a[1], rot_b[1], 0], [0, 0, 10 * angstrom], ], float) big_cell = UnitCell( big_matrix, numpy.array([True, periodic_tube, False], bool)) primitive.SetProperty(universe, "cell", big_cell) for p in iter_translations(): for number, coordinate in iter_pattern(): coordinate[:2] += p coordinate[:2] = numpy.dot(rotation, coordinate[:2]) translation = Translation(coordinate) primitive.Add( Atom(number=number, transformation=translation), universe) else: tube_length = numpy.linalg.norm(big_b) big_matrix = numpy.diag([radius * 2, radius * 2, tube_length]) big_cell = UnitCell( big_matrix, numpy.array([False, False, periodic_tube], bool)) primitive.SetProperty(universe, "cell", big_cell) for p in iter_translations(): for number, coordinate in iter_pattern(): coordinate[:2] += p coordinate[:2] = numpy.dot(rotation, coordinate[:2]) translation = Translation( numpy.array([ (radius + coordinate[2]) * numpy.cos(coordinate[0] / radius), (radius + coordinate[2]) * numpy.sin(coordinate[0] / radius), coordinate[1], ])) primitive.Add( Atom(number=number, transformation=translation), universe)
def do(self): # create the repetitions vector repetitions = [] if hasattr(self.parameters, "repetitions_a"): repetitions.append(self.parameters.repetitions_a) else: repetitions.append(1) if hasattr(self.parameters, "repetitions_b"): repetitions.append(self.parameters.repetitions_b) else: repetitions.append(1) if hasattr(self.parameters, "repetitions_c"): repetitions.append(self.parameters.repetitions_c) else: repetitions.append(1) repetitions = numpy.array(repetitions, int) # serialize the positioned children universe = context.application.model.universe positioned = [ node for node in universe.children if (isinstance(node, GLTransformationMixin) and isinstance(node.transformation, Translation)) ] if len(positioned) == 0: return serialized = StringIO.StringIO() dump_to_file(serialized, positioned) # create the replica's # replicate the positioned objects new_children = {} for cell_index in iter_all_positions(repetitions): cell_index = numpy.array(cell_index) cell_hash = tuple(cell_index) serialized.seek(0) nodes = load_from_file(serialized) new_children[cell_hash] = nodes for node in nodes: t = node.transformation.t + numpy.dot(universe.cell.matrix, cell_index) new_transformation = node.transformation.copy_with(t=t) node.set_transformation(new_transformation) # forget about serialized stuff serialized.close() del serialized new_connectors = [] # replicate the objects that connect these positioned objects for cell_index in iter_all_positions(repetitions): cell_index = numpy.array(cell_index) cell_hash = tuple(cell_index) for connector in universe.children: # Only applicable to ReferentMixin with only SpatialReference # children if not isinstance(connector, ReferentMixin): continue skip = False for reference in connector.children: if not isinstance(reference, SpatialReference): skip = True break if skip: continue # first locate the new first target for this cell_index first_target_orig = connector.children[0].target first_target_index = positioned.index(first_target_orig) first_target = new_children[cell_hash][first_target_index] assert first_target is not None new_targets = [first_target] for reference in connector.children[1:]: # then find the other new targets, taking into account # periodicity other_target_orig = reference.target shortest_vector = universe.shortest_vector( (other_target_orig.transformation.t - first_target_orig.transformation.t)) translation = first_target.transformation.t + shortest_vector other_target_pos = translation other_cell_index = numpy.floor( universe.cell.to_fractional(other_target_pos)).astype( int) other_cell_index %= repetitions other_cell_hash = tuple(other_cell_index) other_target_index = positioned.index(other_target_orig) other_cell_children = new_children.get(other_cell_hash) assert other_cell_children is not None other_target = other_cell_children[other_target_index] assert other_target is not None new_targets.append(other_target) state = connector.__getstate__() state["targets"] = new_targets new_connectors.append(connector.__class__(**state)) # remove the existing nodes while len(universe.children) > 0: primitive.Delete(universe.children[0]) del positioned # multiply the cell matrix and reset the number of repetitions new_matrix = universe.cell * repetitions primitive.SetProperty(universe, "cell", new_matrix) primitive.SetProperty(universe, "repetitions", numpy.array([1, 1, 1], int)) # add the new nodes for nodes in new_children.itervalues(): for node in nodes: primitive.Add(node, universe) for connector in new_connectors: primitive.Add(connector, universe)
def extend_to_cluster(axis, interval): if (interval is None) or isinstance(interval, Undefined): return assert universe.cell.active[axis] interval.sort() index_min = int(numpy.floor(interval[0])) index_max = int(numpy.ceil(interval[1])) old_points = [ node for node in universe.children if (isinstance(node, GLTransformationMixin) and isinstance(node.transformation, Translation)) ] if len(old_points) == 0: return old_connections = [ node for node in universe.children if (isinstance(node, ReferentMixin) and reduce( (lambda x, y: x and y), (isinstance(child, SpatialReference) for child in node.children), True, )) ] # replication of the points new_points = {} for old_point in old_points: # determine the wrapped position old_pos = old_point.transformation.t.copy() old_frac = universe.cell.to_fractional(old_pos) old_index = numpy.floor(old_frac).astype(int) old_pos -= universe.cell.to_cartesian(old_index) old_frac -= old_index del old_index # make copies for cell_index in xrange(index_min, index_max): position = old_pos + universe.cell.matrix[:, axis] * cell_index if (old_frac[axis] + cell_index < interval[0]) or ( old_frac[axis] + cell_index > interval[1]): continue state = old_point.__getstate__() state["transformation"] = state[ "transformation"].copy_with(t=position) new_point = old_point.__class__(**state) new_points[(old_point, cell_index)] = new_point new_connections = [] # replication of the connections for cell_index in xrange(index_min - 1, index_max + 1): for connection in old_connections: old_target0 = connection.children[0].target new_target0 = new_points.get((old_target0, cell_index)) if new_target0 is None: continue new_targets = [new_target0] for reference in connection.children[1:]: abort = True old_target1 = reference.target for offset in 0, 1, -1: new_target1 = new_points.get( (old_target1, cell_index + offset)) if new_target1 is not None: delta = new_target0.transformation.t - new_target1.transformation.t if vector_acceptable( delta, universe.cell.matrix[:, axis]): new_targets.append(new_target1) abort = False break if abort: break if abort: del new_targets continue state = connection.__getstate__() state["targets"] = new_targets new_connections.append(connection.__class__(**state)) # remove the existing points and connections for node in old_connections: primitive.Delete(node) del old_connections for node in old_points: primitive.Delete(node) del old_points # remove the periodicity new_active = universe.cell.active.copy() new_active[axis] = False new_cell = universe.cell.copy_with(active=new_active) primitive.SetProperty(universe, "cell", new_cell) # add the new nodes for node in new_points.itervalues(): primitive.Add(node, universe) for connection in new_connections: primitive.Add(connection, universe)
def delete_referents(self): while len(self.references) > 0: primitive.Delete(self.references[0].parent)