Esempio n. 1
0
    def find_peaks(self):
        grid_real_binary = self.grid_real.deep_copy()
        rmsd = math.sqrt(
            flex.mean(
                flex.pow2(grid_real_binary.as_1d() -
                          flex.mean(grid_real_binary.as_1d()))))
        grid_real_binary.set_selected(
            grid_real_binary < (self.params.rmsd_cutoff) * rmsd, 0)
        grid_real_binary.as_1d().set_selected(grid_real_binary.as_1d() > 0, 1)
        grid_real_binary = grid_real_binary.iround()
        from cctbx import masks
        flood_fill = masks.flood_fill(grid_real_binary, self.fft_cell)
        if flood_fill.n_voids() < 4:
            # Require at least peak at origin and one peak for each basis vector
            raise Sorry(
                "Indexing failed: fft3d peak search failed to find sufficient number of peaks."
            )
        # the peak at the origin might have a significantly larger volume than the
        # rest so exclude this peak from determining maximum volume
        isel = (flood_fill.grid_points_per_void() > int(
            self.params.fft3d.peak_volume_cutoff *
            flex.max(flood_fill.grid_points_per_void()[1:]))).iselection()

        if self.params.optimise_initial_basis_vectors:
            self.volumes = flood_fill.grid_points_per_void().select(isel)
            sites_cart = flood_fill.centres_of_mass_cart().select(isel)
            sites_cart_optimised = optimise_basis_vectors(
                self.reflections['rlp'].select(
                    self.reflections_used_for_indexing), sites_cart)

            self.sites = self.fft_cell.fractionalize(sites_cart_optimised)

            diffs = (sites_cart_optimised - sites_cart)
            norms = diffs.norms()
            flex.min_max_mean_double(norms).show()
            perm = flex.sort_permutation(norms, reverse=True)
            for p in perm[:10]:
                logger.debug(sites_cart[p], sites_cart_optimised[p], norms[p])

            # only use those vectors which haven't shifted too far from starting point
            sel = norms < (5 * self.fft_cell.parameters()[0] /
                           self.gridding[0])
            self.sites = self.sites.select(sel)
            self.volumes = self.volumes.select(sel)
            #diff = (self.sites - flood_fill.centres_of_mass_frac().select(isel))
            #flex.min_max_mean_double(diff.norms()).show()

        else:
            self.sites = flood_fill.centres_of_mass_frac().select(isel)
            self.volumes = flood_fill.grid_points_per_void().select(isel)
Esempio n. 2
0
File: fft3d.py Progetto: dials/dials
  def find_peaks(self):
    grid_real_binary = self.grid_real.deep_copy()
    rmsd = math.sqrt(
      flex.mean(flex.pow2(grid_real_binary.as_1d()-flex.mean(grid_real_binary.as_1d()))))
    grid_real_binary.set_selected(grid_real_binary < (self.params.rmsd_cutoff)*rmsd, 0)
    grid_real_binary.as_1d().set_selected(grid_real_binary.as_1d() > 0, 1)
    grid_real_binary = grid_real_binary.iround()
    from cctbx import masks
    flood_fill = masks.flood_fill(grid_real_binary, self.fft_cell)
    if flood_fill.n_voids() < 4:
      # Require at least peak at origin and one peak for each basis vector
      raise Sorry("Indexing failed: fft3d peak search failed to find sufficient number of peaks.")
    # the peak at the origin might have a significantly larger volume than the
    # rest so exclude this peak from determining maximum volume
    isel = (flood_fill.grid_points_per_void() > int(
        self.params.fft3d.peak_volume_cutoff * flex.max(
          flood_fill.grid_points_per_void()[1:]))).iselection()

    if self.params.optimise_initial_basis_vectors:
      self.volumes = flood_fill.grid_points_per_void().select(isel)
      sites_cart = flood_fill.centres_of_mass_cart().select(isel)
      sites_cart_optimised = optimise_basis_vectors(
        self.reflections['rlp'].select(self.reflections_used_for_indexing),
        sites_cart)

      self.sites = self.fft_cell.fractionalize(sites_cart_optimised)

      diffs = (sites_cart_optimised - sites_cart)
      norms = diffs.norms()
      flex.min_max_mean_double(norms).show()
      perm = flex.sort_permutation(norms, reverse=True)
      for p in perm[:10]:
        logger.debug(sites_cart[p], sites_cart_optimised[p], norms[p])

      # only use those vectors which haven't shifted too far from starting point
      sel = norms < (5 * self.fft_cell.parameters()[0]/self.gridding[0])
      self.sites = self.sites.select(sel)
      self.volumes = self.volumes.select(sel)
      #diff = (self.sites - flood_fill.centres_of_mass_frac().select(isel))
      #flex.min_max_mean_double(diff.norms()).show()

    else:
      self.sites = flood_fill.centres_of_mass_frac().select(isel)
      self.volumes = flood_fill.grid_points_per_void().select(isel)
  def two_color_grid_search(self):
    '''creates candidate reciprocal lattice points based on two beams and performs
    2-D grid search based on maximizing the functional using N_UNIQUE_V candidate
    vectors (N_UNIQUE_V is usually 30 from Guildea paper)'''

    assert len(self.imagesets) == 1
    detector = self.imagesets[0].get_detector()

    mm_spot_pos = self.map_spots_pixel_to_mm_rad(self.reflections,detector,scan=None)

    self.map_centroids_to_reciprocal_space(mm_spot_pos,detector,self.beams[0],
                                           goniometer=None)
    self.reciprocal_lattice_points1 = mm_spot_pos['rlp'].select(
          (self.reflections['id'] == -1))

    rlps1 = mm_spot_pos['rlp'].select(
          (self.reflections['id'] == -1))

    self.map_centroids_to_reciprocal_space(mm_spot_pos,detector,self.beams[1],
                                           goniometer=None)
    self.reciprocal_lattice_points2 = mm_spot_pos['rlp'].select(
          (self.reflections['id'] == -1))
    # assert len(self.beams) == 3
    rlps2 = mm_spot_pos['rlp'].select(
          (self.reflections['id'] == -1))

    self.reciprocal_lattice_points=rlps1.concatenate(rlps2)

    #self.map_centroids_to_reciprocal_space(mm_spot_pos,detector,self.beams[2],goniometer=None)

    #self.reciprocal_lattice_points = mm_spot_pos['rlp'].select(
     #          (self.reflections['id'] == -1)&(1/self.reflections['rlp'].norms() > d_min))

    print "Indexing from %i reflections" %len(self.reciprocal_lattice_points)

    def compute_functional(vector):
      '''computes functional for 2-D grid search'''
      two_pi_S_dot_v = 2 * math.pi * self.reciprocal_lattice_points.dot(vector)
      return flex.sum(flex.cos(two_pi_S_dot_v))

    from rstbx.array_family import flex
    from rstbx.dps_core import SimpleSamplerTool
    assert self.target_symmetry_primitive is not None
    assert self.target_symmetry_primitive.unit_cell() is not None
    SST = SimpleSamplerTool(
      self.params.real_space_grid_search.characteristic_grid)
    SST.construct_hemisphere_grid(SST.incr)
    cell_dimensions = self.target_symmetry_primitive.unit_cell().parameters()[:3]
    unique_cell_dimensions = set(cell_dimensions)

    print("Makring search vecs")
    spiral_method = True
    if spiral_method:
        basis_vec_noise =True
        noise_scale = 2.
        #_N = 200000   # massively oversample the hemisphere so we can apply noise to our search
        _N = 100000
        print "Number of search vectors: %i" %( _N * len(unique_cell_dimensions))
        J = _N*2
        _thetas = [np.arccos( (2.*j - 1. - J)/J)
            for j in range(1,J+1)]
        _phis = [ np.sqrt( np.pi*J) *np.arcsin( (2.*j - 1. - J)/J )
            for j in range(1,J+1)]
        _x = np.sin(_thetas)*np.cos(_phis)
        _y = np.sin(_thetas)*np.sin(_phis)
        _z = np.cos(_thetas)
        nn = int(_N * 1.01)
        _u_vecs = np.array(zip(_x,_y,_z))[-nn:]

        rec_pts = np.array([self.reciprocal_lattice_points[i] for i in range(len(self.reciprocal_lattice_points))])
        N_unique = len(unique_cell_dimensions)

        # much faster to use numpy for massively over-sampled hemisphere..
        func_vals = np.zeros( nn*N_unique)
        vecs = np.zeros( (nn*N_unique, 3) )
        for i, l in enumerate(unique_cell_dimensions):
          # create noise model on top of lattice lengths...
          if basis_vec_noise:
            vec_mag = np.random.normal( l, scale=noise_scale, size=_u_vecs.shape[0] )
            vec_mag = vec_mag[:,None]
          else:
            vec_mag = l

          ul = _u_vecs * vec_mag
          func_slc = slice( i*nn, (i+1)*nn)
          vecs[func_slc] = ul
          func_vals[func_slc] = np.sum( np.cos( 2*np.pi*np.dot(rec_pts, ul.T) ),
                                      axis=0)

        order = np.argsort(func_vals)[::-1]  # sort function values, largest values first
        function_values = func_vals[order]
        vectors = vecs[order]

    else:  # fall back on original flex method
        vectors = flex.vec3_double()
        function_values = flex.double()
        print "Number of search vectors: %i" % (   len(SST.angles)* len(unique_cell_dimensions))
        for i, direction in enumerate(SST.angles):
          for l in unique_cell_dimensions:
            v = matrix.col(direction.dvec) * l
            f = compute_functional(v.elems)
            vectors.append(v.elems)
            function_values.append(f)
        perm = flex.sort_permutation(function_values, reverse=True)
        vectors = vectors.select(perm)
        function_values = function_values.select(perm)

    print("made search vecs")

    unique_vectors = []
    i = 0
    while len(unique_vectors) < N_UNIQUE_V:
      v = matrix.col(vectors[i])
      is_unique = True
      if i > 0:
        for v_u in unique_vectors:
          if v.length() < v_u.length():
            if is_approximate_integer_multiple(v, v_u):
              is_unique = False
              break
          elif is_approximate_integer_multiple(v_u, v):
            is_unique = False
            break
      if is_unique:
        unique_vectors.append(v)
      i += 1

    print ("chose unique basis vecs")
    if self.params.debug:
      for i in range(N_UNIQUE_V):
        v = matrix.col(vectors[i])
        print v.elems, v.length(), function_values[i]

    basis_vectors = [v.elems for v in unique_vectors]
    self.candidate_basis_vectors = basis_vectors

    if self.params.optimise_initial_basis_vectors:
      self.params.optimize_initial_basis_vectors = False
      # todo: verify this reference to self.reciprocal_lattice_points is correct
      optimised_basis_vectors = optimise_basis_vectors(
        self.reciprocal_lattice_points, basis_vectors)
      optimised_function_values = flex.double([
        compute_functional(v) for v in optimised_basis_vectors])

      perm = flex.sort_permutation(optimised_function_values, reverse=True)
      optimised_basis_vectors = optimised_basis_vectors.select(perm)
      optimised_function_values = optimised_function_values.select(perm)

      unique_vectors = [matrix.col(v) for v in optimised_basis_vectors]

    print "Number of unique vectors: %i" %len(unique_vectors)

    if self.params.debug:
      for i in range(len(unique_vectors)):
        print compute_functional(unique_vectors[i].elems), unique_vectors[i].length(), unique_vectors[i].elems
        print

    crystal_models = []
    self.candidate_basis_vectors = unique_vectors

    if self.params.debug:
      self.debug_show_candidate_basis_vectors()
    if self.params.debug_plots:
      self.debug_plot_candidate_basis_vectors()


    candidate_orientation_matrices \
      = self.find_candidate_orientation_matrices(
        unique_vectors)
        # max_combinations=self.params.basis_vector_combinations.max_try)

    FILTER_BY_MAG = True
    if FILTER_BY_MAG:
      print("\n\n FILTERING BY MAG\n\n")
      FILTER_TOL = 10,3   # within 5 percent of params and 1 percent of ang
      target_uc = self.params.known_symmetry.unit_cell.parameters()
      good_mats = []
      for c in candidate_orientation_matrices:
        uc = c.get_unit_cell().parameters()
        comps = []
        for i in range(3):
          tol = 0.01* FILTER_TOL[0] * target_uc[i]
          low = target_uc[i] - tol/2.
          high = target_uc[i] + tol/2
          comps.append(  low < uc[i] < high )
        for i in range(3,6):
          low = target_uc[i] - FILTER_TOL[1]
          high = target_uc[i] + FILTER_TOL[1]
          comps.append( low < uc[i] < high )
        if all( comps):
          print("matrix is ok:", c)
          good_mats.append(c)
        print("\nFilter kept %d / %d mats" % \
              (len(good_mats), len(candidate_orientation_matrices)))
      candidate_orientation_matrices = good_mats

    crystal_model, n_indexed = self.choose_best_orientation_matrix(
      candidate_orientation_matrices)
    orange = 2
    if crystal_model is not None:
      crystal_models = [crystal_model]
    else:
      crystal_models = []

    #assert len(crystal_models) > 0

    candidate_orientation_matrices = crystal_models

    #for i in range(len(candidate_orientation_matrices)):
      #if self.target_symmetry_primitive is not None:
        ##print "symmetrizing model"
        ##self.target_symmetry_primitive.show_summary()
        #symmetrized_model = self.apply_symmetry(
          #candidate_orientation_matrices[i], self.target_symmetry_primitive)
        #candidate_orientation_matrices[i] = symmetrized_model
    self.candidate_crystal_models = candidate_orientation_matrices

    # memory leak somewhere... probably not here.. but just in case...
    del _x, _y, _z, _u_vecs, order, rec_pts, vecs, func_vals, vectors, function_values
Esempio n. 4
0
  def real_space_grid_search(self):
    d_min = self.params.refinement_protocol.d_min_start

    sel = (self.reflections['id'] == -1)
    if d_min is not None:
      sel &= (1/self.reflections['rlp'].norms() > d_min)
    reciprocal_lattice_points = self.reflections['rlp'].select(sel)

    logger.info("Indexing from %i reflections" %len(reciprocal_lattice_points))

    def compute_functional(vector):
      two_pi_S_dot_v = 2 * math.pi * reciprocal_lattice_points.dot(vector)
      return flex.sum(flex.cos(two_pi_S_dot_v))

    from rstbx.array_family import flex
    from rstbx.dps_core import SimpleSamplerTool
    assert self.target_symmetry_primitive is not None
    assert self.target_symmetry_primitive.unit_cell() is not None
    SST = SimpleSamplerTool(
      self.params.real_space_grid_search.characteristic_grid)
    SST.construct_hemisphere_grid(SST.incr)
    cell_dimensions = self.target_symmetry_primitive.unit_cell().parameters()[:3]
    unique_cell_dimensions = set(cell_dimensions)
    logger.info(
      "Number of search vectors: %i" %(len(SST.angles) * len(unique_cell_dimensions)))
    vectors = flex.vec3_double()
    function_values = flex.double()
    for i, direction in enumerate(SST.angles):
      for l in unique_cell_dimensions:
        v = matrix.col(direction.dvec) * l
        f = compute_functional(v.elems)
        vectors.append(v.elems)
        function_values.append(f)

    perm = flex.sort_permutation(function_values, reverse=True)
    vectors = vectors.select(perm)
    function_values = function_values.select(perm)

    unique_vectors = []
    i = 0
    while len(unique_vectors) < 30:
      v = matrix.col(vectors[i])
      is_unique = True
      if i > 0:
        for v_u in unique_vectors:
          if v.length() < v_u.length():
            if is_approximate_integer_multiple(v, v_u):
              is_unique = False
              break
          elif is_approximate_integer_multiple(v_u, v):
            is_unique = False
            break
      if is_unique:
        unique_vectors.append(v)
      i += 1

    for i in range(30):
      v = matrix.col(vectors[i])
      logger.debug("%s %s %s" %(str(v.elems), str(v.length()), str(function_values[i])))

    basis_vectors = [v.elems for v in unique_vectors]
    self.candidate_basis_vectors = basis_vectors

    if self.params.optimise_initial_basis_vectors:
      optimised_basis_vectors = optimise_basis_vectors(
        reciprocal_lattice_points, basis_vectors)
      optimised_function_values = flex.double([
        compute_functional(v) for v in optimised_basis_vectors])

      perm = flex.sort_permutation(optimised_function_values, reverse=True)
      optimised_basis_vectors = optimised_basis_vectors.select(perm)
      optimised_function_values = optimised_function_values.select(perm)

      unique_vectors = [matrix.col(v) for v in optimised_basis_vectors]

    logger.info("Number of unique vectors: %i" %len(unique_vectors))

    for i in range(len(unique_vectors)):
      logger.debug("%s %s %s" %(
        str(compute_functional(unique_vectors[i].elems)),
        str(unique_vectors[i].length()),
        str(unique_vectors[i].elems)))

    crystal_models = []
    self.candidate_basis_vectors = unique_vectors
    self.debug_show_candidate_basis_vectors()
    if self.params.debug_plots:
      self.debug_plot_candidate_basis_vectors()
    candidate_orientation_matrices \
      = self.find_candidate_orientation_matrices(
        unique_vectors,
        max_combinations=self.params.basis_vector_combinations.max_try)
    crystal_model, n_indexed = self.choose_best_orientation_matrix(
      candidate_orientation_matrices)
    if crystal_model is not None:
      crystal_models = [crystal_model]
    else:
      crystal_models = []

    #assert len(crystal_models) > 0

    candidate_orientation_matrices = crystal_models

    #for i in range(len(candidate_orientation_matrices)):
      #if self.target_symmetry_primitive is not None:
        ##print "symmetrizing model"
        ##self.target_symmetry_primitive.show_summary()
        #symmetrized_model = self.apply_symmetry(
          #candidate_orientation_matrices[i], self.target_symmetry_primitive)
        #candidate_orientation_matrices[i] = symmetrized_model

    self.candidate_crystal_models = candidate_orientation_matrices
Esempio n. 5
0
    def real_space_grid_search(self):
        d_min = self.params.refinement_protocol.d_min_start

        sel = (self.reflections['id'] == -1)
        if d_min is not None:
            sel &= (1 / self.reflections['rlp'].norms() > d_min)
        reciprocal_lattice_points = self.reflections['rlp'].select(sel)

        logger.info("Indexing from %i reflections" %
                    len(reciprocal_lattice_points))

        def compute_functional(vector):
            two_pi_S_dot_v = 2 * math.pi * reciprocal_lattice_points.dot(
                vector)
            return flex.sum(flex.cos(two_pi_S_dot_v))

        from rstbx.array_family import flex
        from rstbx.dps_core import SimpleSamplerTool
        assert self.target_symmetry_primitive is not None
        assert self.target_symmetry_primitive.unit_cell() is not None
        SST = SimpleSamplerTool(
            self.params.real_space_grid_search.characteristic_grid)
        SST.construct_hemisphere_grid(SST.incr)
        cell_dimensions = self.target_symmetry_primitive.unit_cell(
        ).parameters()[:3]
        unique_cell_dimensions = set(cell_dimensions)
        logger.info("Number of search vectors: %i" %
                    (len(SST.angles) * len(unique_cell_dimensions)))
        vectors = flex.vec3_double()
        function_values = flex.double()
        for i, direction in enumerate(SST.angles):
            for l in unique_cell_dimensions:
                v = matrix.col(direction.dvec) * l
                f = compute_functional(v.elems)
                vectors.append(v.elems)
                function_values.append(f)

        perm = flex.sort_permutation(function_values, reverse=True)
        vectors = vectors.select(perm)
        function_values = function_values.select(perm)

        unique_vectors = []
        i = 0
        while len(unique_vectors) < 30:
            v = matrix.col(vectors[i])
            is_unique = True
            if i > 0:
                for v_u in unique_vectors:
                    if v.length() < v_u.length():
                        if is_approximate_integer_multiple(v, v_u):
                            is_unique = False
                            break
                    elif is_approximate_integer_multiple(v_u, v):
                        is_unique = False
                        break
            if is_unique:
                unique_vectors.append(v)
            i += 1

        for i in range(30):
            v = matrix.col(vectors[i])
            logger.debug(
                "%s %s %s" %
                (str(v.elems), str(v.length()), str(function_values[i])))

        basis_vectors = [v.elems for v in unique_vectors]
        self.candidate_basis_vectors = basis_vectors

        if self.params.optimise_initial_basis_vectors:
            optimised_basis_vectors = optimise_basis_vectors(
                reciprocal_lattice_points, basis_vectors)
            optimised_function_values = flex.double(
                [compute_functional(v) for v in optimised_basis_vectors])

            perm = flex.sort_permutation(optimised_function_values,
                                         reverse=True)
            optimised_basis_vectors = optimised_basis_vectors.select(perm)
            optimised_function_values = optimised_function_values.select(perm)

            unique_vectors = [matrix.col(v) for v in optimised_basis_vectors]

        logger.info("Number of unique vectors: %i" % len(unique_vectors))

        for i in range(len(unique_vectors)):
            logger.debug(
                "%s %s %s" % (str(compute_functional(
                    unique_vectors[i].elems)), str(unique_vectors[i].length()),
                              str(unique_vectors[i].elems)))

        crystal_models = []
        self.candidate_basis_vectors = unique_vectors
        self.debug_show_candidate_basis_vectors()
        if self.params.debug_plots:
            self.debug_plot_candidate_basis_vectors()
        candidate_orientation_matrices \
          = self.find_candidate_orientation_matrices(
            unique_vectors,
            max_combinations=self.params.basis_vector_combinations.max_try)
        crystal_model, n_indexed = self.choose_best_orientation_matrix(
            candidate_orientation_matrices)
        if crystal_model is not None:
            crystal_models = [crystal_model]
        else:
            crystal_models = []

        #assert len(crystal_models) > 0

        candidate_orientation_matrices = crystal_models

        #for i in range(len(candidate_orientation_matrices)):
        #if self.target_symmetry_primitive is not None:
        ##print "symmetrizing model"
        ##self.target_symmetry_primitive.show_summary()
        #symmetrized_model = self.apply_symmetry(
        #candidate_orientation_matrices[i], self.target_symmetry_primitive)
        #candidate_orientation_matrices[i] = symmetrized_model

        self.candidate_crystal_models = candidate_orientation_matrices