def best_match(sites1,sites2,crystal_symmetry=None, reject_if_too_far=None,distance_per_site=None): assert distance_per_site is not None # if reject_if_too_far and the centers of the two are further than can # be reached by the remainders, skip unit_cell=crystal_symmetry.unit_cell() sps=crystal_symmetry.special_position_settings(min_distance_sym_equiv=0.5) from scitbx.array_family import flex # Match coordinates from cctbx import sgtbx # check central atoms if n>5 for each if sites1.size()>5 and sites2.size()>5: # what is distance? index1=sites1.size()//2 index2=sites2.size()//2 x1_ses=sps.sym_equiv_sites(site=sites1[index1]) info=sgtbx.min_sym_equiv_distance_info(reference_sites=x1_ses, other=sites2[index2]) dd=info.dist() # what is distance spannable by ends of each? max_dist=(index1+index2)*distance_per_site if dd > max_dist: info.i=index1 info.j=index2 return info # hopeless best_info=None best_dist=None i=0 for site in sites1: x1_ses=sps.sym_equiv_sites(site=site) j=0 for site2 in sites2: info=sgtbx.min_sym_equiv_distance_info(reference_sites=x1_ses, other=site2) dd=info.dist() if best_dist is None or dd<best_dist: best_dist=dd best_info=info best_info.i=i # just tack them on best_info.j=j j+=1 i+=1 return best_info
def loop(self): for i_position in xrange(self.wyckoff_table.size()): site_symmetry_i = self.wyckoff_table.random_site_symmetry( special_position_settings=self.special_position_settings, i_position=i_position) equiv_sites_i = sgtbx.sym_equiv_sites(site_symmetry_i) for j_position in xrange(self.wyckoff_table.size()): for n_trial in xrange(self.max_trials_per_position): site_j = self.wyckoff_table.random_site_symmetry( special_position_settings=self. special_position_settings, i_position=j_position).exact_site() dist_info = sgtbx.min_sym_equiv_distance_info( equiv_sites_i, site_j) if (dist_info.dist() > self.min_cross_distance): structure = xray.structure( special_position_settings=self. special_position_settings, scatterers=flex.xray_scatterer([ xray.scatterer( scattering_type=self.scattering_type, site=site) for site in [site_symmetry_i.exact_site(), site_j] ])) yield structure, dist_info.dist() break
def peak_cluster_reduction(crystal_symmetry, peak_list, min_peak_distance, max_reduced_peaks): special_position_settings = crystal.special_position_settings( crystal_symmetry=crystal_symmetry, min_distance_sym_equiv=min_peak_distance) peaks = [] for i,site in enumerate(peak_list.sites()): peaks.append(dicts.easy( site=special_position_settings.site_symmetry(site).exact_site(), height=peak_list.heights()[i])) reduced_peaks = [] for peak in peaks: site_symmetry = special_position_settings.site_symmetry(peak.site) equiv_sites = sgtbx.sym_equiv_sites(site_symmetry) keep = True for reduced_peak in reduced_peaks: dist = sgtbx.min_sym_equiv_distance_info( equiv_sites, reduced_peak.site).dist() if (dist < min_peak_distance): keep = False break if (keep == True): reduced_peaks.append(peak) if (len(reduced_peaks) == max_reduced_peaks): break return reduced_peaks
def _accumulate_significant(self, site, height, site_symmetry, equiv_sites): unit_cell = self.special_position_settings().unit_cell() orth = unit_cell.orthogonalize frac = unit_cell.fractionalize sum_w_sites = matrix.col(orth(site)) * height sum_w = height height_cutoff = height * self._cluster_height_fraction for i in xrange(self._peak_list_index, self._peak_list.size()): if (self._is_processed[i]): continue other_height = self._peak_list.heights()[i] if (other_height < height_cutoff): break other_site = self._peak_list.sites()[i] other_site_symmetry = self._special_position_settings.site_symmetry( other_site) if ( self._general_positions_only and not other_site_symmetry.is_point_group_1()): self._is_processed[i] = True continue other_site = other_site_symmetry.exact_site() dist_info = sgtbx.min_sym_equiv_distance_info(equiv_sites, other_site) dist = dist_info.dist() if (dist < self._min_cross_distance): self._is_processed[i] = True close_site = dist_info.apply(flex.vec3_double([other_site]))[0] close_site = site_symmetry.special_op() * close_site sum_w_sites += matrix.col(orth(close_site)) * other_height sum_w += other_height return frac(sum_w_sites / sum_w), height
def _accumulate_significant(self, site, height, site_symmetry, equiv_sites): unit_cell = self.special_position_settings().unit_cell() orth = unit_cell.orthogonalize frac = unit_cell.fractionalize sum_w_sites = matrix.col(orth(site)) * height sum_w = height height_cutoff = height * self._cluster_height_fraction for i in xrange(self._peak_list_index, self._peak_list.size()): if (self._is_processed[i]): continue other_height = self._peak_list.heights()[i] if (other_height < height_cutoff): break other_site = self._peak_list.sites()[i] other_site_symmetry = self._special_position_settings.site_symmetry( other_site) if (self._general_positions_only and not other_site_symmetry.is_point_group_1()): self._is_processed[i] = True continue other_site = other_site_symmetry.exact_site() dist_info = sgtbx.min_sym_equiv_distance_info( equiv_sites, other_site) dist = dist_info.dist() if (dist < self._min_cross_distance): self._is_processed[i] = True close_site = dist_info.apply(flex.vec3_double([other_site]))[0] close_site = site_symmetry.special_op() * close_site sum_w_sites += matrix.col(orth(close_site)) * other_height sum_w += other_height return frac(sum_w_sites / sum_w), height
def show_patterson_peaks(self, min_relative_peak_height=0.1, show_at_least=3): print("Patterson peaks for %s:" % str(self.input.info())) reciprocal_map = self.input if (reciprocal_map.anomalous_flag()): reciprocal_map = reciprocal_map.average_bijvoet_mates() patterson_map = reciprocal_map.patterson_map( symmetry_flags=maptbx.use_space_group_symmetry) patterson_map.apply_sigma_scaling() peak_list = patterson_map.tags().peak_search( map=patterson_map.real_map(), parameters=maptbx.peak_search_parameters()) max_height = peak_list.heights()[0] sym_equiv_origin = sgtbx.sym_equiv_sites( unit_cell=patterson_map.unit_cell(), space_group=patterson_map.space_group(), original_site=(0, 0, 0)) print(" Fractional coordinates Height Distance from origin") for i_peak in range(peak_list.size()): height = peak_list.heights()[i_peak] if (height < max_height * min_relative_peak_height and i_peak > show_at_least): break site = peak_list.sites()[i_peak] dist_info = sgtbx.min_sym_equiv_distance_info( sym_equiv_origin, site) print(" %8.4f %8.4f %8.4f" % (dist_info.sym_op() * site), end=' ') print(" %8.3f %8.3f" % (height, dist_info.dist())) print()
def verify_match(model1, model2, tolerance, match_rt, pairs): adj_tolerance = tolerance * (1 + 1.e-6) for pair in pairs: c1 = model1[pair[0]].site c2 = match_rt * model2[pair[1]].site equiv_c2 = sgtbx.sym_equiv_sites(model1.site_symmetry(c2.elems)) dist_info = sgtbx.min_sym_equiv_distance_info(equiv_c2, c1) assert dist_info.dist() < adj_tolerance, str(model1.space_group_info())
def have_suitable_hetero_distance(existing_sites, sym_equiv_sites_of_other_site, min_hetero_distance): for existing_site in existing_sites: if (sgtbx.min_sym_equiv_distance_info(sym_equiv_sites_of_other_site, existing_site).dist() < min_hetero_distance): return False return True
def have_suitable_hetero_distance(existing_sites, sym_equiv_sites_of_other_site, min_hetero_distance): for existing_site in existing_sites: if (sgtbx.min_sym_equiv_distance_info( sym_equiv_sites_of_other_site, existing_site).dist() < min_hetero_distance): return False return True
def check_peaks(structure, peak_sites, max_min_dist): for scatterer in structure.scatterers(): site_symmetry = structure.site_symmetry(scatterer.site) equiv_sites = sgtbx.sym_equiv_sites(site_symmetry) min_dist = None for peak_site in peak_sites: dist_info = sgtbx.min_sym_equiv_distance_info(equiv_sites, peak_site) if (min_dist is None): min_dist = dist_info.dist() else: min_dist = min(min_dist, dist_info.dist()) assert min_dist <= max_min_dist, (min_dist, max_min_dist)
def check_peaks(structure, peak_sites, max_min_dist): for scatterer in structure.scatterers(): site_symmetry = structure.site_symmetry(scatterer.site) equiv_sites = sgtbx.sym_equiv_sites(site_symmetry) min_dist = None for peak_site in peak_sites: dist_info = sgtbx.min_sym_equiv_distance_info( equiv_sites, peak_site) if (min_dist is None): min_dist = dist_info.dist() else: min_dist = min(min_dist, dist_info.dist()) assert min_dist <= max_min_dist, (min_dist, max_min_dist)
def next_with_effective_resolution(self): while 1: peak_list_index = self._peak_list_index if (peak_list_index >= self._peak_list.size()): return None self._peak_list_index += 1 if (self._is_processed is not None): if (self._is_processed[peak_list_index]): continue self._is_processed[peak_list_index] = True grid_index = self._peak_list.grid_indices(peak_list_index) grid_height = self._peak_list.grid_heights()[peak_list_index] site = self._peak_list.sites()[peak_list_index] height = self._peak_list.heights()[peak_list_index] site_symmetry = self._special_position_settings.site_symmetry(site) if ( self._general_positions_only and not site_symmetry.is_point_group_1()): continue site = site_symmetry.exact_site() equiv_sites = sgtbx.sym_equiv_sites(site_symmetry) keep = True if (self._sites.size() > 250): import warnings warnings.warn( message="This function should not be used for" " processing a large number of peaks.", category=RuntimeWarning) for s in self._sites: dist = sgtbx.min_sym_equiv_distance_info(equiv_sites, s).dist() if (dist < self._min_cross_distance): keep = False break if (keep == True): if ( self._effective_resolution is not None and ( self._heights.size() == 0 or height < self._heights[0] * self._significant_height_fraction)): site, height = self._accumulate_significant( site, height, site_symmetry, equiv_sites) self._peak_list_indices.append(peak_list_index) self._sites.append(site) self._heights.append(height) return cluster_site_info( peak_list_index=peak_list_index, grid_index=grid_index, grid_height=grid_height, site=site, height=height)
def compute_refined_matches(ref_model1, ref_model2, tolerance, models_are_diffraction_index_equivalent, shall_break): match_symmetry = euclidean_match_symmetry( ref_model1.space_group_info(), use_k2l=True, use_l2n=(not models_are_diffraction_index_equivalent)) ref_model1_sites = flex.vec3_double([pos.site for pos in ref_model1]) ref_model2_sites = flex.vec3_double([pos.site for pos in ref_model2]) add_pair_ext = ext.add_pair( tolerance, ref_model1.unit_cell(), ref_model1.space_group(), ref_model1.min_distance_sym_equiv(), ref_model1_sites, ref_model2_sites) accumulated_match_refine_times = match_refine_times() refined_matches = [] for i_pivot1 in xrange(ref_model1.size()): for i_pivot2 in xrange(ref_model2.size()): for eucl_symop in match_symmetry.rt_mx: c2 = eucl_symop * ref_model2[i_pivot2].site dist_info = sgtbx.min_sym_equiv_distance_info( add_pair_ext.equiv1(i_pivot1), c2, match_symmetry.continuous_shift_flags) if (dist_info.dist() < tolerance): allowed_shift = dist_info.continuous_shifts() match = match_refine(tolerance, ref_model1, ref_model2, match_symmetry, add_pair_ext, i_pivot1, i_pivot2, eucl_symop, allowed_shift, accumulated_match_refine_times) match.rt = match_rt_from_ref_eucl_rt( ref_model1.cb_op(), ref_model2.cb_op(), match.ref_eucl_rt) refined_matches.append(match) if shall_break(match): return refined_matches #print accumulated_match_refine_times return refined_matches
def compute_refined_matches(ref_model1, ref_model2, tolerance, models_are_diffraction_index_equivalent, shall_break): match_symmetry = euclidean_match_symmetry( ref_model1.space_group_info(), use_k2l=True, use_l2n=(not models_are_diffraction_index_equivalent)) ref_model1_sites = flex.vec3_double([pos.site for pos in ref_model1]) ref_model2_sites = flex.vec3_double([pos.site for pos in ref_model2]) add_pair_ext = ext.add_pair( tolerance, ref_model1.unit_cell(), ref_model1.space_group(), ref_model1.min_distance_sym_equiv(), ref_model1_sites, ref_model2_sites) accumulated_match_refine_times = match_refine_times() refined_matches = [] for i_pivot1 in range(ref_model1.size()): for i_pivot2 in range(ref_model2.size()): for eucl_symop in match_symmetry.rt_mx: c2 = eucl_symop * ref_model2[i_pivot2].site dist_info = sgtbx.min_sym_equiv_distance_info( add_pair_ext.equiv1(i_pivot1), c2, match_symmetry.continuous_shift_flags) if (dist_info.dist() < tolerance): allowed_shift = dist_info.continuous_shifts() match = match_refine(tolerance, ref_model1, ref_model2, match_symmetry, add_pair_ext, i_pivot1, i_pivot2, eucl_symop, allowed_shift, accumulated_match_refine_times) match.rt = match_rt_from_ref_eucl_rt( ref_model1.cb_op(), ref_model2.cb_op(), match.ref_eucl_rt) refined_matches.append(match) if shall_break(match): return refined_matches #print accumulated_match_refine_times return refined_matches
def next_with_effective_resolution(self): while 1: peak_list_index = self._peak_list_index if (peak_list_index >= self._peak_list.size()): return None self._peak_list_index += 1 if (self._is_processed is not None): if (self._is_processed[peak_list_index]): continue self._is_processed[peak_list_index] = True grid_index = self._peak_list.grid_indices(peak_list_index) grid_height = self._peak_list.grid_heights()[peak_list_index] site = self._peak_list.sites()[peak_list_index] height = self._peak_list.heights()[peak_list_index] site_symmetry = self._special_position_settings.site_symmetry(site) if (self._general_positions_only and not site_symmetry.is_point_group_1()): continue site = site_symmetry.exact_site() equiv_sites = sgtbx.sym_equiv_sites(site_symmetry) keep = True if (self._sites.size() > 250): import warnings warnings.warn(message="This function should not be used for" " processing a large number of peaks.", category=RuntimeWarning) for s in self._sites: dist = sgtbx.min_sym_equiv_distance_info(equiv_sites, s).dist() if (dist < self._min_cross_distance): keep = False break if (keep == True): if (self._effective_resolution is not None and (self._heights.size() == 0 or height < self._heights[0] * self._significant_height_fraction)): site, height = self._accumulate_significant( site, height, site_symmetry, equiv_sites) self._peak_list_indices.append(peak_list_index) self._sites.append(site) self._heights.append(height) return cluster_site_info(peak_list_index=peak_list_index, grid_index=grid_index, grid_height=grid_height, site=site, height=height)
def loop(self): for i_position in xrange(self.wyckoff_table.size()): site_symmetry_i = self.wyckoff_table.random_site_symmetry( special_position_settings=self.special_position_settings, i_position=i_position) equiv_sites_i = sgtbx.sym_equiv_sites(site_symmetry_i) for j_position in xrange(self.wyckoff_table.size()): for n_trial in xrange(self.max_trials_per_position): site_j = self.wyckoff_table.random_site_symmetry( special_position_settings=self.special_position_settings, i_position=j_position).exact_site() dist_info = sgtbx.min_sym_equiv_distance_info(equiv_sites_i, site_j) if (dist_info.dist() > self.min_cross_distance): structure = xray.structure( special_position_settings=self.special_position_settings, scatterers=flex.xray_scatterer( [xray.scatterer(scattering_type=self.scattering_type, site=site) for site in [site_symmetry_i.exact_site(), site_j]])) yield structure, dist_info.dist() break
def match_sites_by_symmetry(ref_sites, query_sites, unit_cell, space_group, cutoff=5): """Pair sites in query_sites to sites in ref_sites, allowing for symmetry""" pairings = numpy.zeros((len(ref_sites), len(query_sites)), dtype=numpy.int) for i_ref, ref in enumerate(ref_sites): ref_frac = unit_cell.fractionalize(ref) sym_sites_ref = sgtbx.sym_equiv_sites(space_group=space_group, unit_cell=unit_cell, original_site=ref_frac) for i_query, query in enumerate(query_sites): query_frac = unit_cell.fractionalize(query) min_dist = sgtbx.min_sym_equiv_distance_info( sym_sites_ref, query_frac).dist() if min_dist < cutoff: pairings[i_ref, i_query] = 1 return pairings
def show_patterson_peaks(self, min_relative_peak_height=0.1, show_at_least=3): print "Patterson peaks for %s:" % str(self.input.info()) reciprocal_map = self.input if reciprocal_map.anomalous_flag(): reciprocal_map = reciprocal_map.average_bijvoet_mates() patterson_map = reciprocal_map.patterson_map(symmetry_flags=maptbx.use_space_group_symmetry) patterson_map.apply_sigma_scaling() peak_list = patterson_map.tags().peak_search( map=patterson_map.real_map(), parameters=maptbx.peak_search_parameters() ) max_height = peak_list.heights()[0] sym_equiv_origin = sgtbx.sym_equiv_sites( unit_cell=patterson_map.unit_cell(), space_group=patterson_map.space_group(), original_site=(0, 0, 0) ) print " Fractional coordinates Height Distance from origin" for i_peak in xrange(peak_list.size()): height = peak_list.heights()[i_peak] if height < max_height * min_relative_peak_height and i_peak > show_at_least: break site = peak_list.sites()[i_peak] dist_info = sgtbx.min_sym_equiv_distance_info(sym_equiv_origin, site) print " %8.4f %8.4f %8.4f" % (dist_info.sym_op() * site), print " %8.3f %8.3f" % (height, dist_info.dist()) print
def calculate_shortest_diff(self, pair): c2 = self.apply_eucl_ops(pair[1]) return sgtbx.min_sym_equiv_distance_info( self.add_pair_ext.equiv1(pair[0]), c2).diff()
def run_call_back(flags, space_group_info): verbose = flags.Verbose if (flags.StaticModels): model1 = (test_model().add_random_positions(2, "A").shuffle_positions( ).random_symmetry_mates().apply_random_eucl_op()) model2 = (test_model().add_random_positions( 3, "B").shuffle_positions().random_symmetry_mates(). apply_random_eucl_op().shake_positions().random_hand()) for i in (0, 1): m1 = model1 if (i): m1 = model1.transform_to_reference_setting().reset_cb_op() for j in (0, 1): m2 = model2 if (j): m2 = model2.transform_to_reference_setting().reset_cb_op() if (0 or verbose): m1.show("Model1(%d)" % (i, )) m2.show("Model2(%d)" % (j, )) model_matches = emma.model_matches(m1, m2, rms_penalty_per_site=0) analyze_refined_matches(m1, m2, model_matches.refined_matches, verbose) return False model_core = test_model(space_group_info) model1 = (model_core.add_random_positions(2, "A")) model2 = (model_core.add_random_positions( 3, "B").shuffle_positions().random_symmetry_mates(). apply_random_eucl_op().shake_positions().random_hand()) if (0 or verbose): model_core.show("Core") model1.show("Model1") model2.show("Model2") model_matches = emma.model_matches(model1, model2, rms_penalty_per_site=0) analyze_refined_matches(model1, model2, model_matches.refined_matches, verbose) assert model_matches.consensus_model().size() >= model1.size() - 2 assert model_matches.consensus_model(i_model=2).size() >= model2.size() - 3 model1.expand_to_p1() model2.as_xray_structure() for i1, i2, m1, m2 in [(1, 2, model1, model2), (2, 1, model2, model1)]: m2_t = model_matches.transform_model(i_model=i2) assert m1.unit_cell().is_similar_to(m2_t.unit_cell()) assert m1.space_group() == m2_t.space_group() for pair in model_matches.refined_matches[0].pairs: site1 = m1.positions()[pair[i1 - 1]].site site2 = m2_t.positions()[pair[i2 - 1]].site equiv_sites1 = sgtbx.sym_equiv_sites(m1.site_symmetry(site1)) dist_info = sgtbx.min_sym_equiv_distance_info(equiv_sites1, site2) assert dist_info.dist() < model_matches.tolerance + 1.e-6 site2_closest = dist_info.sym_op() * site2 assert approx_equal(m1.unit_cell().distance(site1, site2_closest), dist_info.dist()) if (i1 == 1): singles = model_matches.refined_matches[0].singles2 else: singles = model_matches.refined_matches[0].singles1 for i_site2 in singles: site2 = m2_t.positions()[i_site2].site for i_site1 in range(len(model1.positions())): site1 = m1.positions()[i_site1].site equiv_sites1 = sgtbx.sym_equiv_sites(m1.site_symmetry(site1)) dist_info = sgtbx.min_sym_equiv_distance_info( equiv_sites1, site2) if (dist_info.dist() < model_matches.tolerance - 1.e-6): ok = False for pair in model_matches.refined_matches[0].pairs: if (pair[i1 - 1] == i_site1): ok = True assert ok
def check_with_grid_tags(inp_symmetry, symmetry_flags, sites_cart, point_distance, strictly_inside, flag_write_pdb, verbose): cb_op_inp_ref = inp_symmetry.change_of_basis_op_to_reference_setting() if (verbose): print "cb_op_inp_ref.c():", cb_op_inp_ref.c() ref_symmetry = inp_symmetry.change_basis(cb_op_inp_ref) search_symmetry = sgtbx.search_symmetry( flags=symmetry_flags, space_group_type=ref_symmetry.space_group_info().type(), seminvariant=ref_symmetry.space_group_info().structure_seminvariants()) assert search_symmetry.continuous_shifts_are_principal() continuous_shift_flags = search_symmetry.continuous_shift_flags() if (flag_write_pdb): tag_sites_frac = flex.vec3_double() else: tag_sites_frac = None if (strictly_inside): inp_tags = inp_symmetry.gridding( step=point_distance*.7, symmetry_flags=symmetry_flags).tags() if (tag_sites_frac is not None): for point in flex.nested_loop(inp_tags.n_real()): if (inp_tags.tags().tag_array()[point] < 0): point_frac_inp=[float(n)/d for n,d in zip(point, inp_tags.n_real())] tag_sites_frac.append(point_frac_inp) if (inp_tags.tags().n_independent() < sites_cart.size()): print "FAIL:", inp_symmetry.space_group_info(), \ inp_tags.tags().n_independent(), sites_cart.size() raise AssertionError else: inp_tags = inp_symmetry.gridding( step=point_distance/2., symmetry_flags=symmetry_flags).tags() sites_frac_inp = inp_symmetry.unit_cell().fractionalize( sites_cart=sites_cart) rt = cb_op_inp_ref.c().as_double_array() sites_frac_ref = rt[:9] * sites_frac_inp sites_frac_ref += rt[9:] max_distance = 2 * ((.5 * math.sqrt(3) * point_distance) * 2/3.) if (verbose): print "max_distance:", max_distance for point in flex.nested_loop(inp_tags.n_real()): if (inp_tags.tags().tag_array()[point] < 0): point_frac_inp = [float(n)/d for n,d in zip(point, inp_tags.n_real())] if (tag_sites_frac is not None): tag_sites_frac.append(point_frac_inp) point_frac_ref = cb_op_inp_ref.c() * point_frac_inp equiv_points = sgtbx.sym_equiv_sites( unit_cell=ref_symmetry.unit_cell(), space_group=search_symmetry.subgroup(), original_site=point_frac_ref, minimum_distance=2.e-6, tolerance=1.e-6) min_dist = sgtbx.min_sym_equiv_distance_info( reference_sites=equiv_points, others=sites_frac_ref, principal_continuous_allowed_origin_shift_flags =continuous_shift_flags).dist() if (min_dist > max_distance): print "FAIL:", inp_symmetry.space_group_info(), \ point_frac_ref, min_dist raise AssertionError if (inp_tags.tags().n_independent()+10 < sites_cart.size()): print "FAIL:", inp_symmetry.space_group_info(), \ inp_tags.tags().n_independent(), sites_cart.size() raise AssertionError if (tag_sites_frac is not None): dump_pdb( file_name="tag_sites.pdb", crystal_symmetry=inp_symmetry, sites_cart=inp_symmetry.unit_cell().orthogonalize( sites_frac=tag_sites_frac))
def check_with_grid_tags(inp_symmetry, symmetry_flags, sites_cart, point_distance, strictly_inside, flag_write_pdb, verbose): cb_op_inp_ref = inp_symmetry.change_of_basis_op_to_reference_setting() if (verbose): print("cb_op_inp_ref.c():", cb_op_inp_ref.c()) ref_symmetry = inp_symmetry.change_basis(cb_op_inp_ref) search_symmetry = sgtbx.search_symmetry( flags=symmetry_flags, space_group_type=ref_symmetry.space_group_info().type(), seminvariant=ref_symmetry.space_group_info().structure_seminvariants()) assert search_symmetry.continuous_shifts_are_principal() continuous_shift_flags = search_symmetry.continuous_shift_flags() if (flag_write_pdb): tag_sites_frac = flex.vec3_double() else: tag_sites_frac = None if (strictly_inside): inp_tags = inp_symmetry.gridding(step=point_distance * .7, symmetry_flags=symmetry_flags).tags() if (tag_sites_frac is not None): for point in flex.nested_loop(inp_tags.n_real()): if (inp_tags.tags().tag_array()[point] < 0): point_frac_inp = [ float(n) / d for n, d in zip(point, inp_tags.n_real()) ] tag_sites_frac.append(point_frac_inp) if (inp_tags.tags().n_independent() < sites_cart.size()): print("FAIL:", inp_symmetry.space_group_info(), \ inp_tags.tags().n_independent(), sites_cart.size()) raise AssertionError else: inp_tags = inp_symmetry.gridding(step=point_distance / 2., symmetry_flags=symmetry_flags).tags() sites_frac_inp = inp_symmetry.unit_cell().fractionalize( sites_cart=sites_cart) rt = cb_op_inp_ref.c().as_double_array() sites_frac_ref = rt[:9] * sites_frac_inp sites_frac_ref += rt[9:] max_distance = 2 * ((.5 * math.sqrt(3) * point_distance) * 2 / 3.) if (verbose): print("max_distance:", max_distance) for point in flex.nested_loop(inp_tags.n_real()): if (inp_tags.tags().tag_array()[point] < 0): point_frac_inp = [ float(n) / d for n, d in zip(point, inp_tags.n_real()) ] if (tag_sites_frac is not None): tag_sites_frac.append(point_frac_inp) point_frac_ref = cb_op_inp_ref.c() * point_frac_inp equiv_points = sgtbx.sym_equiv_sites( unit_cell=ref_symmetry.unit_cell(), space_group=search_symmetry.subgroup(), original_site=point_frac_ref, minimum_distance=2.e-6, tolerance=1.e-6) min_dist = sgtbx.min_sym_equiv_distance_info( reference_sites=equiv_points, others=sites_frac_ref, principal_continuous_allowed_origin_shift_flags= continuous_shift_flags).dist() if (min_dist > max_distance): print("FAIL:", inp_symmetry.space_group_info(), \ point_frac_ref, min_dist) raise AssertionError if (inp_tags.tags().n_independent() + 10 < sites_cart.size()): print("FAIL:", inp_symmetry.space_group_info(), \ inp_tags.tags().n_independent(), sites_cart.size()) raise AssertionError if (tag_sites_frac is not None): dump_pdb(file_name="tag_sites.pdb", crystal_symmetry=inp_symmetry, sites_cart=inp_symmetry.unit_cell().orthogonalize( sites_frac=tag_sites_frac))
# This is the most convenient interface. Essentially we just need # sgtbx.structure_seminvariants. # match_symmetry = euclidean_model_matching.euclidean_match_symmetry( space_group_info=sym_ref.space_group_info(), use_k2l=False, use_l2n=False) # # Compute the symmetry operation which maps the center of mass of # "other" closest to the center of mass of "reference." # centers_frac = [ sym_ref.unit_cell().fractionalize(cb_op_to_ref.c() * center_cart) for center_cart in centers_of_mass] dist_info = sgtbx.min_sym_equiv_distance_info( sym_ref.special_position_settings().sym_equiv_sites(centers_frac[0]), centers_frac[1], match_symmetry.continuous_shift_flags) sym_op = cb_op_to_ref.inverse().apply(dist_info.sym_op()) print "Rotation in fractional space:", sym_op.r().as_xyz() sym_op = sym_op.as_rational().as_float() \ + matrix.col(dist_info.continuous_shifts()) print "Translation in fractional space: (%s)" % ( ", ".join(["%.6g" % t for t in sym_op.t])) # centers_frac = [sym_ref.unit_cell().fractionalize(center_cart) for center_cart in centers_of_mass] sym_center_frac = sym_op * centers_frac[1] sym_center_cart = crystal_symmetry.unit_cell().orthogonalize(sym_center_frac) print "Centers of mass:" print " Reference: (%s)" % ", ".join(["%8.2f" % v for v in centers_of_mass[0]])
# This is the most convenient interface. Essentially we just need # sgtbx.structure_seminvariants. # match_symmetry = euclidean_model_matching.euclidean_match_symmetry( space_group_info=sym_ref.space_group_info(), use_k2l=False, use_l2n=False ) # # Compute the symmetry operation which maps the center of mass of # "other" closest to the center of mass of "reference." # centers_frac = [ sym_ref.unit_cell().fractionalize(cb_op_to_ref.c() * center_cart) for center_cart in centers_of_mass ] dist_info = sgtbx.min_sym_equiv_distance_info( sym_ref.special_position_settings().sym_equiv_sites(centers_frac[0]), centers_frac[1], match_symmetry.continuous_shift_flags, ) sym_op = cb_op_to_ref.inverse().apply(dist_info.sym_op()) print "Rotation in fractional space:", sym_op.r().as_xyz() sym_op = sym_op.as_rational().as_float() + matrix.col(dist_info.continuous_shifts()) print "Translation in fractional space: (%s)" % (", ".join(["%.6g" % t for t in sym_op.t])) # centers_frac = [sym_ref.unit_cell().fractionalize(center_cart) for center_cart in centers_of_mass] sym_center_frac = sym_op * centers_frac[1] sym_center_cart = crystal_symmetry.unit_cell().orthogonalize(sym_center_frac) print "Centers of mass:" print " Reference: (%s)" % ", ".join(["%8.2f" % v for v in centers_of_mass[0]]) print " Original other: (%s)" % ", ".join(["%8.2f" % v for v in centers_of_mass[1]]) print " Symmetry related other: (%s)" % ", ".join(["%8.2f" % v for v in sym_center_cart]) print "Cartesian distance between centers of mass: %.4f" % dist_info.dist()
def run(args, command_name="iotbx.pdb.superpose_centers_of_mass"): if (len(args) == 0): args = ["--help"] command_line = (option_parser( usage="%s [options] [reference_file] [other_file] [parameter_file]" % command_name).enable_show_defaults().enable_symmetry_comprehensive() ).process(args=args) if (command_line.expert_level is not None): master_params.show(expert_level=command_line.expert_level, attributes_level=command_line.attributes_level) sys.exit(0) # # Loop over command-line arguments. # parameter_interpreter = master_params.command_line_argument_interpreter() parsed_params = [] pdb_file_names = [] command_line_params = [] for arg in command_line.args: arg_is_processed = False if (os.path.isfile(arg)): params = None try: params = iotbx.phil.parse(file_name=arg) except KeyboardInterrupt: raise except RuntimeError: pass else: if (len(params.objects) == 0): params = None if (params is not None): parsed_params.append(params) arg_is_processed = True elif (pdb.is_pdb_file(file_name=arg)): pdb_file_names.append(arg) arg_is_processed = True if (not arg_is_processed): try: params = parameter_interpreter.process(arg=arg) except Sorry as e: if (not os.path.isfile(arg)): raise raise Sorry("Unknown file format: %s" % arg) else: command_line_params.append(params) # # Consolidation of inputs, resulting in effective phil_params. # phil_params = master_params.fetch(sources=parsed_params + command_line_params) params = phil_params.extract() for param_group in [params.reference, params.other, params.output]: if (param_group.file_name is None and len(pdb_file_names) > 0): param_group.file_name = pdb_file_names[0] pdb_file_names = pdb_file_names[1:] if (len(pdb_file_names) > 0): raise Sorry("Too many PDB file names: %s" % ", ".join([show_string(s) for s in pdb_file_names])) if (params.output.file_name is None and params.other.file_name is not None): name = os.path.basename(params.other.file_name) if (name.lower().endswith(".pdb")): name = name[:-4] name += "_superposed.pdb" params.output.file_name = name if (params.crystal_symmetry.unit_cell is None): params.crystal_symmetry.unit_cell = \ command_line.symmetry.unit_cell() if (params.crystal_symmetry.space_group is None): params.crystal_symmetry.space_group = \ command_line.symmetry.space_group_info() phil_params = master_params.format(python_object=params) phil_params.show() print("#phil __OFF__") # # Final checks. # if (params.reference.file_name is None): raise Sorry("Required file name is missing: reference.file_name") if (params.other.file_name is None): raise Sorry("Required file name is missing: other.file_name") if (params.output.file_name is None): raise Sorry("Required file name is missing: output.file_name") # # Processing of input PDB files. # pdb_objs = [] sites_carts = [] centers_of_mass = [] for param_group in [params.reference, params.other]: pdb_obj = pdb.hierarchy.input(file_name=param_group.file_name) pdb_obj.atoms = pdb_obj.hierarchy.atoms() pdb_objs.append(pdb_obj) sites_carts.append(pdb_obj.atoms.extract_xyz()) sites_sel = sites_carts[-1] if (param_group.atom_selection is not None): sel = pdb_obj.hierarchy.atom_selection_cache().selection( param_group.atom_selection) sites_sel = sites_sel.select(sel) print("Number of selected sites:", sites_sel.size()) centers_of_mass.append(sites_sel.mean()) # # Consolidation of crystal symmetries. # crystal_symmetry = command_line.symmetry for pdb_obj in pdb_objs: crystal_symmetry_from_pdb = pdb_obj.input.crystal_symmetry() if (crystal_symmetry_from_pdb is not None): crystal_symmetry = crystal_symmetry.join_symmetry( other_symmetry=crystal_symmetry_from_pdb, force=False) if (crystal_symmetry.unit_cell() is None): raise Sorry( "Unknown unit cell parameters." "\n Use --unit_cell or --symmetry to supply unit cell parameters." ) if (crystal_symmetry.space_group_info() is None): raise Sorry( "Unknown space group symmetry." "\n Use --space_group or --symmetry to supply symmetry information." ) crystal_symmetry.show_summary() # # Obtain transformation to reference setting. # To ensure all allowed origin shifts are parallel to the basis vectors. # cb_op_to_ref = crystal_symmetry.change_of_basis_op_to_reference_setting() sym_ref = crystal_symmetry.change_basis(cb_op=cb_op_to_ref) # # Obtain allowed origin shifts. # This is the most convenient interface. Essentially we just need # sgtbx.structure_seminvariants. # match_symmetry = euclidean_model_matching.euclidean_match_symmetry( space_group_info=sym_ref.space_group_info(), use_k2l=False, use_l2n=False) # # Compute the symmetry operation which maps the center of mass of # "other" closest to the center of mass of "reference." # centers_frac = [ sym_ref.unit_cell().fractionalize(cb_op_to_ref.c() * center_cart) for center_cart in centers_of_mass ] dist_info = sgtbx.min_sym_equiv_distance_info( sym_ref.special_position_settings().sym_equiv_sites(centers_frac[0]), centers_frac[1], match_symmetry.continuous_shift_flags) sym_op = cb_op_to_ref.inverse().apply(dist_info.sym_op()) print("Rotation in fractional space:", sym_op.r().as_xyz()) sym_op = sym_op.as_rational().as_float() \ + matrix.col(dist_info.continuous_shifts()) print("Translation in fractional space: (%s)" % (", ".join(["%.6g" % t for t in sym_op.t]))) # centers_frac = [ sym_ref.unit_cell().fractionalize(center_cart) for center_cart in centers_of_mass ] sym_center_frac = sym_op * centers_frac[1] sym_center_cart = crystal_symmetry.unit_cell().orthogonalize( sym_center_frac) print("Centers of mass:") print(" Reference: (%s)" % ", ".join(["%8.2f" % v for v in centers_of_mass[0]])) print(" Original other: (%s)" % ", ".join(["%8.2f" % v for v in centers_of_mass[1]])) print(" Symmetry related other: (%s)" % ", ".join(["%8.2f" % v for v in sym_center_cart])) print("Cartesian distance between centers of mass: %.4f" % dist_info.dist()) # # Internal consistency check (in input setting). # assert approx_equal( crystal_symmetry.unit_cell().distance(centers_frac[0], sym_center_frac), dist_info.dist()) # # Transform atomic coordinates of "other." # sites_frac_other = crystal_symmetry.unit_cell().fractionalize( sites_cart=sites_carts[1]) sites_frac_other_superposed = sym_op * sites_frac_other sites_cart_other_superposed = crystal_symmetry.unit_cell().orthogonalize( sites_frac=sites_frac_other_superposed) # # Replace original coordinates with transformed coordinates. # pdb_objs[1].atoms.set_xyz(new_xyz=sites_cart_other_superposed) # # Write (selected) transformed coordinates. # pdb_hierarchy = pdb_objs[1].hierarchy if (params.output.atom_selection is not None): sel = pdb_hierarchy.atom_selection_cache().selection( params.output.atom_selection) pdb_hierarchy = pdb_hierarchy.select(atom_selection=sel) pdb_hierarchy.write_pdb_file(file_name=params.output.file_name, crystal_symmetry=crystal_symmetry, append_end=True, atoms_reset_serial_first_value=1)
def run_call_back(flags, space_group_info): verbose = flags.Verbose if (flags.StaticModels): model1 = (test_model() .add_random_positions(2, "A") .shuffle_positions() .random_symmetry_mates() .apply_random_eucl_op() ) model2 = (test_model() .add_random_positions(3, "B") .shuffle_positions() .random_symmetry_mates() .apply_random_eucl_op() .shake_positions() .random_hand() ) for i in (0,1): m1 = model1 if (i): m1 = model1.transform_to_reference_setting().reset_cb_op() for j in (0,1): m2 = model2 if (j): m2 = model2.transform_to_reference_setting().reset_cb_op() if (0 or verbose): m1.show("Model1(%d)" % (i,)) m2.show("Model2(%d)" % (j,)) model_matches = emma.model_matches(m1, m2, rms_penalty_per_site=0) analyze_refined_matches(m1, m2, model_matches.refined_matches, verbose) return False model_core = test_model(space_group_info) model1 = (model_core .add_random_positions(2, "A") ) model2 = (model_core .add_random_positions(3, "B") .shuffle_positions() .random_symmetry_mates() .apply_random_eucl_op() .shake_positions() .random_hand() ) if (0 or verbose): model_core.show("Core") model1.show("Model1") model2.show("Model2") model_matches = emma.model_matches(model1, model2, rms_penalty_per_site=0) analyze_refined_matches( model1, model2, model_matches.refined_matches, verbose) assert model_matches.consensus_model().size() >= model1.size()-2 assert model_matches.consensus_model(i_model=2).size() >= model2.size()-3 model1.expand_to_p1() model2.as_xray_structure() for i1,i2,m1,m2 in [(1,2,model1,model2),(2,1,model2,model1)]: m2_t = model_matches.transform_model(i_model=i2) assert m1.unit_cell().is_similar_to(m2_t.unit_cell()) assert m1.space_group() == m2_t.space_group() for pair in model_matches.refined_matches[0].pairs: site1 = m1.positions()[pair[i1-1]].site site2 = m2_t.positions()[pair[i2-1]].site equiv_sites1 = sgtbx.sym_equiv_sites(m1.site_symmetry(site1)) dist_info = sgtbx.min_sym_equiv_distance_info(equiv_sites1, site2) assert dist_info.dist() < model_matches.tolerance + 1.e-6 site2_closest = dist_info.sym_op() * site2 assert approx_equal( m1.unit_cell().distance(site1, site2_closest), dist_info.dist()) if (i1 == 1): singles = model_matches.refined_matches[0].singles2 else: singles = model_matches.refined_matches[0].singles1 for i_site2 in singles: site2 = m2_t.positions()[i_site2].site for i_site1 in xrange(len(model1.positions())): site1 = m1.positions()[i_site1].site equiv_sites1 = sgtbx.sym_equiv_sites(m1.site_symmetry(site1)) dist_info = sgtbx.min_sym_equiv_distance_info(equiv_sites1, site2) if (dist_info.dist() < model_matches.tolerance - 1.e-6): ok = False for pair in model_matches.refined_matches[0].pairs: if (pair[i1-1] == i_site1): ok = True assert ok