def test_experimentlist_with_identifiers(): # Initialise a list of experiments experiments = ExperimentList() experiments.append( Experiment(beam=Beam(s0=(0, 0, -1)), detector=Detector(), identifier="bacon")) experiments.append( Experiment(beam=Beam(s0=(0, 0, -1)), detector=Detector(), identifier="sausage")) with pytest.raises(Exception): experiments.append( Experiment(beam=Beam(), detector=Detector(), identifier="bacon")) d = experiments.to_dict() e2 = ExperimentListDict(d).decode() assert experiments[0].identifier == e2[0].identifier assert experiments[1].identifier == e2[1].identifier assert tuple(experiments.identifiers()) == ("bacon", "sausage") experiments[0].identifier = "spam" assert tuple(experiments.identifiers()) == ("spam", "sausage") experiments.append(Experiment(identifier="bacon")) experiments.select_on_experiment_identifiers(["spam", "bacon"]) assert list(experiments.identifiers()) == ["spam", "bacon"] experiments.append(Experiment(identifier="ham")) experiments.append(Experiment(identifier="jam")) experiments.remove_on_experiment_identifiers(["spam", "jam"]) assert list(experiments.identifiers()) == ["bacon", "ham"]
def test_experimentlist_with_identifiers(): from dxtbx.model import Beam, Detector, Goniometer, Scan # Initialise a list of experiments experiments = ExperimentList() experiments.append( Experiment(beam=Beam(s0=(0, 0, -1)), detector=Detector(), identifier="bacon")) experiments.append( Experiment(beam=Beam(s0=(0, 0, -1)), detector=Detector(), identifier="sausage")) with pytest.raises(Exception): experiments.append( Experiment(beam=Beam(), detector=Detector(), identifier="bacon")) d = experiments.to_dict() e2 = ExperimentListDict(d).decode() assert experiments[0].identifier == e2[0].identifier assert experiments[1].identifier == e2[1].identifier assert tuple(experiments.identifiers()) == ("bacon", "sausage") experiments[0].identifier = "spam" assert tuple(experiments.identifiers()) == ("spam", "sausage")
def test_UnitCellAnalysisObserver(): # generate some random unit cells sgi = sgtbx.space_group_info("P1") unit_cells = [ sgi.any_compatible_unit_cell(volume=random.uniform(990, 1010)) for i in range(10) ] # generate experiment list experiments = ExperimentList() U = matrix.identity(3) for uc in unit_cells: B = matrix.sqr(uc.fractionalization_matrix()).transpose() direct_matrix = (U * B).inverse() experiments.append( Experiment(crystal=Crystal( direct_matrix[:3], direct_matrix[3:6], direct_matrix[6:9], space_group=sgi.group(), ))) # generate dendrogram crystal_symmetries = [ expt.crystal.get_crystal_symmetry() for expt in experiments ] lattice_ids = experiments.identifiers() ucs = UnitCellCluster.from_crystal_symmetries(crystal_symmetries, lattice_ids=lattice_ids) _, dendrogram, _ = ucs.ab_cluster(write_file_lists=False, doplot=False) # setup script script = mock.Mock() script._experiments = experiments script.unit_cell_dendrogram = dendrogram # test the observer observer = observers.UnitCellAnalysisObserver() observer.update(script) assert set(observer.data) == {"experiments", "dendrogram"} d = observer.make_plots() assert "unit_cell_graphs" in d
class DataManager(object): def __init__(self, experiments, reflections): self._input_experiments = experiments self._input_reflections = reflections self._experiments = copy.deepcopy(experiments) self._reflections = copy.deepcopy(reflections) self._set_batches() def _set_batches(self): max_batches = max(e.scan.get_image_range()[1] for e in self._experiments) max_batches += 10 # allow some head room n = int(math.ceil(math.log10(max_batches))) for i, expt in enumerate(self._experiments): expt.scan.set_batch_offset(i * 10**n) logger.debug( "%s %s" % (expt.scan.get_batch_offset(), expt.scan.get_batch_range())) @property def experiments(self): return self._experiments @experiments.setter def experiments(self, experiments): self._experiments = experiments @property def reflections(self): return self._reflections @reflections.setter def reflections(self, reflections): self._reflections = reflections def select(self, experiment_identifiers): self._experiments = ExperimentList([ expt for expt in self._experiments if expt.identifier in experiment_identifiers ]) experiment_identifiers = self._experiments.identifiers() sel = flex.bool(len(self._reflections), False) for i_expt, identifier in enumerate(experiment_identifiers): sel_expt = self._reflections['identifier'] == identifier sel.set_selected(sel_expt, True) self._reflections['id'].set_selected(sel_expt, i_expt) self._reflections = self._reflections.select(sel) assert self.reflections.are_experiment_identifiers_consistent( self._experiments) def reflections_as_miller_arrays(self, intensity_key='intensity.sum.value'): from cctbx import crystal, miller variance_key = intensity_key.replace('.value', '.variance') assert intensity_key in self._reflections assert variance_key in self._reflections miller_arrays = [] for expt in self._experiments: crystal_symmetry = crystal.symmetry( unit_cell=expt.crystal.get_unit_cell(), space_group=expt.crystal.get_space_group()) sel = ((self._reflections.get_flags( self._reflections.flags.integrated_sum) & (self._reflections['identifier'] == expt.identifier))) assert sel.count(True) > 0 refl = self._reflections.select(sel) data = refl[intensity_key] variances = refl[variance_key] # FIXME probably need to do some filtering of intensities similar to that # done in export_mtz miller_indices = refl['miller_index'] assert variances.all_gt(0) sigmas = flex.sqrt(variances) miller_set = miller.set(crystal_symmetry, miller_indices, anomalous_flag=False) intensities = miller.array(miller_set, data=data, sigmas=sigmas) intensities.set_observation_type_xray_intensity() intensities.set_info( miller.array_info(source='DIALS', source_type='pickle')) miller_arrays.append(intensities) return miller_arrays def reindex(self, cb_op=None, cb_ops=None, space_group=None): assert [cb_op, cb_ops].count(None) == 1 if cb_op is not None: logger.info('Reindexing: %s' % cb_op) self._reflections['miller_index'] = cb_op.apply( self._reflections['miller_index']) for expt in self._experiments: cryst_reindexed = expt.crystal.change_basis(cb_op) if space_group is not None: cryst_reindexed.set_space_group(space_group) expt.crystal.update(cryst_reindexed) elif isinstance(cb_ops, dict): for cb_op, dataset_ids in cb_ops.iteritems(): cb_op = sgtbx.change_of_basis_op(cb_op) for dataset_id in dataset_ids: expt = self._experiments[dataset_id] logger.info('Reindexing experiment %s: %s' % (expt.identifier, cb_op.as_xyz())) cryst_reindexed = expt.crystal.change_basis(cb_op) if space_group is not None: cryst_reindexed.set_space_group(space_group) expt.crystal.update(cryst_reindexed) sel = self._reflections['identifier'] == expt.identifier self._reflections['miller_index'].set_selected( sel, cb_op.apply( self._reflections['miller_index'].select(sel))) else: assert len(cb_ops) == len(self._experiments) for cb_op, expt in zip(cb_ops, self._experiments): logger.info('Reindexing experiment %s: %s' % (expt.identifier, cb_op.as_xyz())) cryst_reindexed = expt.crystal.change_basis(cb_op) if space_group is not None: cryst_reindexed.set_space_group(space_group) expt.crystal.update(cryst_reindexed) sel = self._reflections['identifier'] == expt.identifier self._reflections['miller_index'].set_selected( sel, cb_op.apply(self._reflections['miller_index'].select(sel))) def export_reflections(self, filename): self._reflections.as_pickle(filename) def export_experiments(self, filename): dump.experiment_list(self._experiments, filename) def export_mtz(self, filename=None, params=None): if params is None: params = export_phil_scope.extract() if filename is not None: params.mtz.hklout = filename m = export_mtz( self._reflections, self._experiments, params.mtz.hklout, include_partials=params.mtz.include_partials, keep_partials=params.mtz.keep_partials, scale_partials=params.mtz.scale_partials, min_isigi=params.mtz.min_isigi, force_static_model=params.mtz.force_static_model, filter_ice_rings=params.mtz.filter_ice_rings, ignore_profile_fitting=params.mtz.ignore_profile_fitting, apply_scales=params.mtz.apply_scales) m.show_summary() b1 = set(b.num() for b in m.batches()) b2 = set(m.get_column('BATCH').extract_values().as_double().iround()) assert len(b2.difference(b1)) == 0 return params.mtz.hklout