示例#1
0
    def run(self):

        dataset, grid, reference_map, args, verbose = self.data

        log_file = dataset.file_manager.get_file('dataset_log')

        assert reference_map.is_sparse(), 'Reference map is not in sparse form'

        # ============================================================================>
        # Create map handler in the native coordinate frame
        # ============================================================================>
        # Extract the map data
        fft_map = dataset.data.fft_maps['truncated']
        # Scale the map
        if args.params.maps.density_scaling == 'none': pass
        elif args.params.maps.density_scaling == 'sigma':
            fft_map.apply_sigma_scaling()
        elif args.params.maps.density_scaling == 'volume':
            fft_map.apply_volume_scaling()

        # ============================================================================>
        # Morph the map to the reference frame
        # ============================================================================>
        # Extract the map sites from the grid partition
        point_mappings_grid = grid.partition.nn_groups[
            grid.global_mask().outer_mask_indices()]
        assert sum(point_mappings_grid == -1) == 0
        sites_cart_map = grid.grid2cart(grid.global_mask().outer_mask(),
                                        origin_shift=True)
        # Translate the grid partition mappings to the dataset alignment mappings
        mappings_grid2dataset = get_interpolated_mapping_between_coordinates(
            query_list=grid.partition.sites_cart,
            ref_list=dataset.model.alignment.reference_sites,
            tol=0.01)
        point_mappings_dataset = numpy.array(
            [mappings_grid2dataset[i] for i in point_mappings_grid])
        assert sum(point_mappings_dataset == -1) == 0
        sites_cart_map_d = dataset.model.alignment.ref2nat(
            coordinates=sites_cart_map, mappings=point_mappings_dataset)
        # Create and sample from map object
        native_map_true = ElectronDensityMap.from_fft_map(fft_map)
        morphed_map_data = native_map_true.get_cart_values(sites_cart_map_d)

        # Scale map to reference
        scale_mask = grid.index_on_other(
            query=grid.global_mask().inner_mask_indices(),
            other=grid.global_mask().outer_mask_indices())
        scaled_map_data = scale_map_to_reference(
            ref_vals=reference_map.data,
            vals=morphed_map_data,
            mask_idxs=flex.size_t(scale_mask))
        # Create map holder
        morphed_map = reference_map.new_from_template(
            map_data=scaled_map_data, sparse=reference_map.is_sparse())
        morphed_map.meta.num = dataset.num
        morphed_map.meta.tag = dataset.tag
        morphed_map.meta.type = 'observed map'
        morphed_map.meta.resolution = reference_map.meta.resolution
        morphed_map.meta.map_uncertainty = None
        morphed_map.meta.obs_map_mean = morphed_map_data.min_max_mean().mean
        morphed_map.meta.obs_map_rms = morphed_map_data.standard_deviation_of_the_sample(
        )
        morphed_map.meta.scl_map_mean = scaled_map_data.min_max_mean().mean
        morphed_map.meta.scl_map_rms = scaled_map_data.standard_deviation_of_the_sample(
        )

        # Print a running row of dots
        print('>', end='')
        sys.stdout.flush()

        return morphed_map.make_sparse()
    def __call__(self, dataset, grid, reference_map, map_resolution):

        assert reference_map.is_sparse(), 'Reference map is not in sparse form'

        # ============================================================================>
        # Create map handler in the native coordinate frame
        # ============================================================================>
        # Extract the map data
        # TODO: make sure new version works
        # fft_map = dataset.data.fft_maps['truncated']
        dataset.data.fft_maps['truncated'] = dataset.data.miller_arrays['truncated'].fft_map(
            resolution_factor=float(self.resolution_factor),
            d_min=float(map_resolution),
            symmetry_flags=cctbx.maptbx.use_space_group_symmetry)
        fft_map = dataset.data.fft_maps['truncated']
        gc.collect()
        # Scale the map
        if   self.density_scaling == 'none':   pass
        elif self.density_scaling == 'sigma':  fft_map.apply_sigma_scaling()
        elif self.density_scaling == 'volume': fft_map.apply_volume_scaling()
        # Create map object
        # native_map_true = ElectronDensityMap.from_fft_map(fft_map).as_map()
        native_map_true = ElectronDensityMap.from_fft_map(fft_map)


        # ============================================================================>
        # Morph the map to the reference frame
        # ============================================================================>
        # Extract the map sites from the grid partition
        point_mappings_grid = grid.partition.nn_groups[grid.global_mask().outer_mask_indices()]
        assert sum(point_mappings_grid == -1) == 0
        sites_cart_map = grid.grid2cart(grid.global_mask().outer_mask(),
                                        origin_shift=True,
                                        )
        # Translate the grid partition mappings to the dataset alignment mappings
        mappings_grid2dataset = get_interpolated_mapping_between_coordinates(query_list=grid.partition.sites_cart,
                                                                             ref_list=dataset.model.alignment.reference_sites,
                                                                             tol=0.01,
                                                                             )
        point_mappings_dataset = numpy.array([mappings_grid2dataset[i] for i in point_mappings_grid])
        assert sum(point_mappings_dataset == -1) == 0
        sites_cart_map_d = dataset.model.alignment.ref2nat(coordinates=sites_cart_map,
                                                           mappings=point_mappings_dataset,
                                                           )
        morphed_map_data = native_map_true.get_cart_values(sites_cart_map_d)

        # Scale map to reference
        scale_mask = grid.index_on_other(query=grid.global_mask().inner_mask_indices(),
                                         other=grid.global_mask().outer_mask_indices())
        scaled_map_data = scale_map_to_reference(ref_vals   = reference_map.data,
                                                 vals       = morphed_map_data,
                                                 mask_idxs  = flex.size_t(scale_mask))
        # Create map holder
        morphed_map = reference_map.new_from_template(map_data=scaled_map_data, sparse=reference_map.is_sparse())
        morphed_map.meta.num = dataset.num
        morphed_map.meta.tag = dataset.tag
        morphed_map.meta.type = 'observed map'
        morphed_map.meta.resolution = reference_map.meta.resolution
        morphed_map.meta.map_uncertainty = None
        morphed_map.meta.obs_map_mean = morphed_map_data.min_max_mean().mean
        morphed_map.meta.obs_map_rms  = morphed_map_data.standard_deviation_of_the_sample()
        morphed_map.meta.scl_map_mean = scaled_map_data.min_max_mean().mean
        morphed_map.meta.scl_map_rms  = scaled_map_data.standard_deviation_of_the_sample()

        # Print a running row of dots
        print('>', end=''); sys.stdout.flush()

        return morphed_map.make_sparse()