def test_update_imageset_ids(dials_data): expts = ExperimentList() refls = [] for i in [1, 2, 3, 4, 5, 7, 8, 10]: refls.append( flex.reflection_table.from_file( dials_data("multi_crystal_proteinase_k", pathlib=True) / f"reflections_{i}.pickle")) expts.extend( load.experiment_list( dials_data("multi_crystal_proteinase_k", pathlib=True) / f"experiments_{i}.json", check_format=False, )) # first make sure ids are set up correctly. experiments, reflections = assign_unique_identifiers(expts, refls) reflections = update_imageset_ids(experiments, reflections) joint_reflections = flex.reflection_table() for refls in reflections: joint_reflections.extend(refls) # check that there are 8 unique id and imageset_ids, and that these # correctly correspond to each experiment assert len(set(joint_reflections["id"])) == 8 assert len(set(joint_reflections["imageset_id"])) == 8 for id_ in range(8): sel = joint_reflections["id"] == id_ assert set(joint_reflections["imageset_id"].select(sel)) == {id_}
def _reindex_experiments_reflections(experiments, reflections, space_group, cb_op): """Reindex the input data.""" reindexed_experiments = reindex_experiments( experiments, cb_op, space_group=space_group ) reindexed_reflections = flex.reflection_table() reflections = update_imageset_ids(experiments, reflections) for i in range(len(reindexed_experiments)): reindexed_refl = copy.deepcopy(reflections[i]) reindexed_refl["miller_index"] = cb_op.apply(reindexed_refl["miller_index"]) reindexed_reflections.extend(reindexed_refl) return reindexed_experiments, [reindexed_reflections]
def finish(self): """Save the experiments json and scaled pickle file.""" # Now create a joint reflection table. Delete all other data before # joining reflection tables - just need experiments for mtz export # and a reflection table. del self.scaler gc.collect() # update imageset ids before combining reflection tables. self.reflections = update_imageset_ids(self.experiments, self.reflections) joint_table = flex.reflection_table() for i in range(len(self.reflections)): joint_table.extend(self.reflections[i]) # del reflection_table self.reflections[i] = 0 gc.collect() # remove reflections with very low scale factors sel = joint_table["inverse_scale_factor"] <= 0.001 good_sel = ~joint_table.get_flags(joint_table.flags.bad_for_scaling, all=False) n_neg = (good_sel & sel).count(True) if n_neg > 0: logger.warning( """%s non-excluded reflections were assigned scale factors < 0.001 during scaling. These will be excluded in the output reflection table. It may be best to rerun scaling from this point for an improved model.""", n_neg, ) joint_table.set_flags(sel, joint_table.flags.excluded_for_scaling) to_del = [ "variance", "intensity", "s0", "s0c", "s1c", "prescaling_correction", "batch", ] for col in to_del: try: del joint_table[col] except KeyError: pass return self.experiments, joint_table
def export(self): """Output the datafiles for cosym. This includes the cosym.json, reflections and experiments files.""" reindexed_reflections = flex.reflection_table() self._reflections = update_imageset_ids(self._experiments, self._reflections) for refl in self._reflections: reindexed_reflections.extend(refl) reindexed_reflections.reset_ids() logger.info("Saving reindexed experiments to %s", self.params.output.experiments) self._experiments.as_file(self.params.output.experiments) logger.info("Saving reindexed reflections to %s", self.params.output.reflections) reindexed_reflections.as_file(self.params.output.reflections)