def generate_reflections(self): # Build a mock scan for a 3 degree sweep from dxtbx.model.scan import scan_factory sf = scan_factory() self.scan = sf.make_scan(image_range = (1,1), exposure_times = 0.1, oscillation = (0, 3.0), epochs = range(1), deg = True) sweep_range = self.scan.get_oscillation_range(deg=False) # Create a scans ExperimentList, only for generating reflections experiments = ExperimentList() experiments.append(Experiment( beam=self.beam, detector=self.detector, goniometer=self.gonio, scan=self.scan, crystal=self.crystal, imageset=None)) # Create a ScansRayPredictor ray_predictor = ScansRayPredictor(experiments, sweep_range) # Generate rays - only to work out which hkls are predicted resolution = 2.0 index_generator = IndexGenerator(self.crystal.get_unit_cell(), space_group(space_group_symbols(1).hall()).type(), resolution) indices = index_generator.to_array() rays = ray_predictor.predict(indices) # Make a standard reflection_table and copy in the ray data self.reflections = flex.reflection_table.empty_standard(len(rays)) self.reflections.update(rays) return
def setup_models(args): """setup the experimental models""" # Setup experimental models master_phil = parse(""" include scope dials.test.algorithms.refinement.geometry_phil """, process_includes=True) models = setup_geometry.Extract(master_phil, cmdline_args = args) detector = models.detector goniometer = models.goniometer crystal = models.crystal beam = models.beam # Build a mock scan for a 180 degree sweep sf = scan_factory() scan = sf.make_scan(image_range = (1,180), exposure_times = 0.1, oscillation = (0, 1.0), epochs = range(180), deg = True) sweep_range = scan.get_oscillation_range(deg=False) im_width = scan.get_oscillation(deg=False)[1] assert sweep_range == (0., pi) assert approx_equal(im_width, 1.0 * pi / 180.) experiments = ExperimentList() experiments.append(Experiment( beam=beam, detector=detector, goniometer=goniometer, scan=scan, crystal=crystal, imageset=None)) return experiments
def predict_reflections(self): from dials.algorithms import shoebox from dials.array_family import flex from dxtbx.model.experiment.experiment_list import ExperimentList from dxtbx.model.experiment.experiment_list import Experiment from dials.algorithms.profile_model.gaussian_rs import Model # Get models from the sweep self.beam = self.sweep.get_beam() self.detector = self.sweep.get_detector() self.gonio = self.sweep.get_goniometer() self.scan = self.sweep.get_scan() sigma_b = self.beam.get_sigma_divergence(deg=True) sigma_m = self.crystal.get_mosaicity(deg=True) exlist = ExperimentList() exlist.append( Experiment(imageset=self.sweep, beam=self.beam, detector=self.detector, goniometer=self.gonio, scan=self.scan, crystal=self.crystal, profile=Model(None, 3, sigma_b, sigma_m, deg=True))) predicted = flex.reflection_table.from_predictions(exlist[0]) predicted['id'] = flex.int(len(predicted), 0) predicted.compute_bbox(exlist) # Find overlapping reflections overlaps = shoebox.find_overlapping(predicted['bbox']) # Return the reflections and overlaps return predicted, overlaps
def experiment_list_for_crystal(self, crystal): experiments = ExperimentList() for imageset in self.imagesets: experiments.append(Experiment(imageset=imageset, beam=imageset.get_beam(), detector=imageset.get_detector(), goniometer=imageset.get_goniometer(), scan=imageset.get_scan(), crystal=crystal)) return experiments
def experiment_list_for_crystal(self, crystal): experiments = ExperimentList() for imageset in self.imagesets: experiments.append( Experiment(imageset=imageset, beam=imageset.get_beam(), detector=imageset.get_detector(), goniometer=imageset.get_goniometer(), scan=imageset.get_scan(), crystal=crystal)) return experiments
def find_lattices(self): self.real_space_grid_search() crystal_models = self.candidate_crystal_models experiments = ExperimentList() for cm in crystal_models: for imageset in self.imagesets: experiments.append(Experiment(imageset=imageset, beam=imageset.get_beam(), detector=imageset.get_detector(), goniometer=imageset.get_goniometer(), scan=imageset.get_scan(), crystal=cm)) return experiments
def combine_crystals(experiments): '''Replace all crystals in the experiments list with the first crystal''' from dxtbx.model.experiment.experiment_list import Experiment, ExperimentList new_experiments=ExperimentList() ref_crystal = experiments[0].crystal for exp in experiments: new_experiments.append(Experiment(beam=exp.beam, detector=exp.detector, scan=exp.scan, goniometer=exp.goniometer, crystal=ref_crystal, imageset=exp.imageset)) return new_experiments
def find_lattices(self): self.real_space_grid_search() crystal_models = self.candidate_crystal_models experiments = ExperimentList() for cm in crystal_models: for imageset in self.imagesets: experiments.append( Experiment(imageset=imageset, beam=imageset.get_beam(), detector=imageset.get_detector(), goniometer=imageset.get_goniometer(), scan=imageset.get_scan(), crystal=cm)) return experiments
def combine_crystals(experiments): '''Replace all crystals in the experiments list with the first crystal''' from dxtbx.model.experiment.experiment_list import Experiment, ExperimentList new_experiments = ExperimentList() ref_crystal = experiments[0].crystal for exp in experiments: new_experiments.append( Experiment(beam=exp.beam, detector=exp.detector, scan=exp.scan, goniometer=exp.goniometer, crystal=ref_crystal, imageset=exp.imageset)) return new_experiments
def find_lattices(self): experiments = ExperimentList() for cm in self.known_orientations: # indexer expects crystals to be in primitive setting space_group = cm.get_space_group() cb_op_to_primitive \ = space_group.info().change_of_basis_op_to_primitive_setting() cm = cm.change_basis(cb_op_to_primitive) for imageset in self.imagesets: experiments.append(Experiment(imageset=imageset, beam=imageset.get_beam(), detector=imageset.get_detector(), goniometer=imageset.get_goniometer(), scan=imageset.get_scan(), crystal=cm)) return experiments
def find_lattices(self): experiments = ExperimentList() for cm in self.known_orientations: # indexer expects crystals to be in primitive setting space_group = cm.get_space_group() cb_op_to_primitive \ = space_group.info().change_of_basis_op_to_primitive_setting() cm = cm.change_basis(cb_op_to_primitive) for imageset in self.imagesets: experiments.append( Experiment(imageset=imageset, beam=imageset.get_beam(), detector=imageset.get_detector(), goniometer=imageset.get_goniometer(), scan=imageset.get_scan(), crystal=cm)) return experiments
def generate(self): from dxtbx.model import Beam, Detector, Goniometer, Scan # Initialise a list of experiments experiments = ExperimentList() # Create a few beams b1 = Beam() b2 = Beam() b3 = Beam() # Create a few detectors d1 = Detector() d2 = Detector() d3 = Detector() # Create a few goniometers g1 = Goniometer() g2 = Goniometer() g3 = Goniometer() # Create a few scans s1 = Scan() s2 = Scan() s3 = Scan() # Create a list of models b = [b1, b2, b3, b2, b1] d = [d1, d2, d3, d2, d1] g = [g1, g2, g3, g2, g1] s = [s1, s2, s3, s2, s1] # Populate with various experiments for i in range(5): experiments.append(Experiment( beam=b[i], detector=d[i], goniometer=g[i], scan=s[i])) # Return the list of experiments return experiments
def prepare_dxtbx_models(self,setting_specific_ai,sg,isoform=None): from dxtbx.model.beam import beam_factory beam = beam_factory.simple(wavelength = self.inputai.wavelength) from dxtbx.model.detector import detector_factory detector = detector_factory.simple( sensor = detector_factory.sensor("PAD"), distance = setting_specific_ai.distance(), beam_centre = [setting_specific_ai.xbeam(), setting_specific_ai.ybeam()], fast_direction = "+x", slow_direction = "+y", pixel_size = [self.pixel_size,self.pixel_size], image_size = [self.inputpd['size1'],self.inputpd['size1']], ) direct = matrix.sqr(setting_specific_ai.getOrientation().direct_matrix()) from dxtbx.model.crystal import crystal_model crystal = crystal_model( real_space_a = matrix.row(direct[0:3]), real_space_b = matrix.row(direct[3:6]), real_space_c = matrix.row(direct[6:9]), space_group_symbol = sg, mosaicity = setting_specific_ai.getMosaicity() ) if isoform is not None: newB = matrix.sqr(isoform.fractionalization_matrix()).transpose() crystal.set_B(newB) from dxtbx.model.experiment.experiment_list import Experiment, ExperimentList experiments = ExperimentList() experiments.append(Experiment(beam=beam, detector=detector, crystal=crystal)) print beam print detector print crystal return experiments
def find_lattices(self): self.d_min = self.params.refinement_protocol.d_min_start from rstbx.phil.phil_preferences import indexing_api_defs import iotbx.phil hardcoded_phil = iotbx.phil.parse( input_string=indexing_api_defs).extract() sel = (self.reflections['id'] == -1) if self.d_min is not None: sel &= (1 / self.reflections['rlp'].norms() > self.d_min) reflections = self.reflections.select(sel) solutions = candidate_basis_vectors_fft1d( reflections['rlp'], hardcoded_phil, max_cell=self.params.max_cell) self.candidate_basis_vectors = solutions[0] self.debug_show_candidate_basis_vectors() if self.params.debug_plots: self.debug_plot_candidate_basis_vectors() self.candidate_crystal_models = self.find_candidate_orientation_matrices( self.candidate_basis_vectors, max_combinations=self.params.basis_vector_combinations.max_try) crystal_model, n_indexed = self.choose_best_orientation_matrix( self.candidate_crystal_models) if crystal_model is not None: crystal_models = [crystal_model] else: crystal_models = [] experiments = ExperimentList() for cm in crystal_models: for imageset in self.imagesets: experiments.append( Experiment(imageset=imageset, beam=imageset.get_beam(), detector=imageset.get_detector(), goniometer=imageset.get_goniometer(), scan=imageset.get_scan(), crystal=cm)) return experiments
def generate_reflections(self): # Build a mock scan for a 3 degree sweep from dxtbx.model.scan import scan_factory sf = scan_factory() self.scan = sf.make_scan(image_range=(1, 1), exposure_times=0.1, oscillation=(0, 3.0), epochs=range(1), deg=True) sweep_range = self.scan.get_oscillation_range(deg=False) # Create a scans ExperimentList, only for generating reflections experiments = ExperimentList() experiments.append( Experiment(beam=self.beam, detector=self.detector, goniometer=self.gonio, scan=self.scan, crystal=self.crystal, imageset=None)) # Create a ScansRayPredictor ray_predictor = ScansRayPredictor(experiments, sweep_range) # Generate rays - only to work out which hkls are predicted resolution = 2.0 index_generator = IndexGenerator( self.crystal.get_unit_cell(), space_group(space_group_symbols(1).hall()).type(), resolution) indices = index_generator.to_array() rays = ray_predictor(indices) # Make a standard reflection_table and copy in the ray data self.reflections = flex.reflection_table.empty_standard(len(rays)) self.reflections.update(rays) return
def setup_models(args): """setup the experimental models""" # Setup experimental models master_phil = parse(""" include scope dials.test.algorithms.refinement.geometry_phil """, process_includes=True) models = setup_geometry.Extract(master_phil, cmdline_args=args) detector = models.detector goniometer = models.goniometer crystal = models.crystal beam = models.beam # Build a mock scan for a 180 degree sweep sf = scan_factory() scan = sf.make_scan(image_range=(1, 180), exposure_times=0.1, oscillation=(0, 1.0), epochs=range(180), deg=True) sweep_range = scan.get_oscillation_range(deg=False) im_width = scan.get_oscillation(deg=False)[1] assert sweep_range == (0., pi) assert approx_equal(im_width, 1.0 * pi / 180.) experiments = ExperimentList() experiments.append( Experiment(beam=beam, detector=detector, goniometer=goniometer, scan=scan, crystal=crystal, imageset=None)) return experiments
def __call__(self, experiments, reflections): self.working_phil.show() params = self.working_phil.extract() for iexp, exp in enumerate(experiments): print "Refining crystal", iexp # reflection subset for a single experiment refs = reflections.select(reflections['id'] == iexp) refs['id'] = flex.size_t(len(refs),0) # experiment list for a single experiment exps=ExperimentList() exps.append(exp) refiner = RefinerFactory.from_parameters_data_experiments( params, refs, exps, verbosity=1) # do refinement refiner.run() refined_exps = refiner.get_experiments() # replace this experiment with the refined one experiments[iexp] = refined_exps[0] return experiments
def find_lattices(self): self.d_min = self.params.refinement_protocol.d_min_start from rstbx.phil.phil_preferences import indexing_api_defs import iotbx.phil hardcoded_phil = iotbx.phil.parse( input_string=indexing_api_defs).extract() sel = (self.reflections['id'] == -1) if self.d_min is not None: sel &= (1/self.reflections['rlp'].norms() > self.d_min) reflections = self.reflections.select(sel) solutions = candidate_basis_vectors_fft1d( reflections['rlp'], hardcoded_phil, max_cell=self.params.max_cell) self.candidate_basis_vectors = solutions[0] self.debug_show_candidate_basis_vectors() if self.params.debug_plots: self.debug_plot_candidate_basis_vectors() self.candidate_crystal_models = self.find_candidate_orientation_matrices( self.candidate_basis_vectors, max_combinations=self.params.basis_vector_combinations.max_try) crystal_model, n_indexed = self.choose_best_orientation_matrix( self.candidate_crystal_models) if crystal_model is not None: crystal_models = [crystal_model] else: crystal_models = [] experiments = ExperimentList() for cm in crystal_models: for imageset in self.imagesets: experiments.append(Experiment(imageset=imageset, beam=imageset.get_beam(), detector=imageset.get_detector(), goniometer=imageset.get_goniometer(), scan=imageset.get_scan(), crystal=cm)) return experiments
def __call__(self, experiments, reflections): self.working_phil.show() params = self.working_phil.extract() for iexp, exp in enumerate(experiments): print("Refining crystal", iexp) # reflection subset for a single experiment refs = reflections.select(reflections["id"] == iexp) refs["id"] = flex.size_t(len(refs), 0) # experiment list for a single experiment exps = ExperimentList() exps.append(exp) refiner = RefinerFactory.from_parameters_data_experiments( params, refs, exps, verbosity=1 ) # do refinement refiner.run() refined_exps = refiner.get_experiments() # replace this experiment with the refined one experiments[iexp] = refined_exps[0] return experiments
def predict_reflections(self): from dials.algorithms import shoebox from dials.array_family import flex from dxtbx.model.experiment.experiment_list import ExperimentList from dxtbx.model.experiment.experiment_list import Experiment from dials.algorithms.profile_model.gaussian_rs import Model # Get models from the sweep self.beam = self.sweep.get_beam() self.detector = self.sweep.get_detector() self.gonio = self.sweep.get_goniometer() self.scan = self.sweep.get_scan() sigma_b = self.beam.get_sigma_divergence(deg=True) sigma_m = self.crystal.get_mosaicity(deg=True) exlist = ExperimentList() exlist.append(Experiment( imageset=self.sweep, beam=self.beam, detector=self.detector, goniometer=self.gonio, scan=self.scan, crystal=self.crystal, profile=Model( None, 3, sigma_b, sigma_m, deg=True))) predicted = flex.reflection_table.from_predictions(exlist[0]) predicted['id'] = flex.int(len(predicted), 0) predicted.compute_bbox(exlist) # Find overlapping reflections overlaps = shoebox.find_overlapping(predicted['bbox']) # Return the reflections and overlaps return predicted, overlaps
def do_work(item): iexp, exp = item print "Refining crystal", iexp # reflection subset for a single experiment refs = reflections.select(reflections['id'] == iexp) refs['id'] = flex.int(len(refs), 0) # DGW commented out as reflections.minimum_number_of_reflections no longer exists #if len(refs) < params.refinement.reflections.minimum_number_of_reflections: # print "Not enough reflections to refine experiment" # return # experiment list for a single experiment exps = ExperimentList() exps.append(exp) try: refiner = RefinerFactory.from_parameters_data_experiments( params, refs, exps) # do refinement refiner.run() except Exception, e: print "Error,", str(e) return
def do_work(item): iexp, exp = item print "Refining crystal", iexp # reflection subset for a single experiment refs = reflections.select(reflections['id'] == iexp) refs['id'] = flex.int(len(refs),0) # DGW commented out as reflections.minimum_number_of_reflections no longer exists #if len(refs) < params.refinement.reflections.minimum_number_of_reflections: # print "Not enough reflections to refine experiment" # return # experiment list for a single experiment exps=ExperimentList() exps.append(exp) try: refiner = RefinerFactory.from_parameters_data_experiments( params, refs, exps) # do refinement refiner.run() except Exception, e: print "Error,", str(e) return
def run(self): '''Execute the script.''' from dials.util.options import flatten_reflections, flatten_experiments, \ flatten_datablocks import cPickle as pickle # Parse the command line params, options = self.parser.parse_args(show_diff_phil=True) reflections = flatten_reflections(params.input.reflections) experiments = flatten_experiments(params.input.experiments) datablocks = flatten_datablocks(params.input.datablock) # Try to load the models and data slice_exps = len(experiments) > 0 slice_refs = len(reflections) > 0 slice_dbs = len(datablocks) > 0 # Catch case of nothing to do if not any([slice_exps, slice_refs, slice_dbs]): print "No suitable input provided" self.parser.print_help() return if reflections: if len(reflections) > 1: raise Sorry( "Only one reflections list can be imported at present") reflections = reflections[0] # calculate frame numbers if needed if experiments: reflections = calculate_frame_numbers(reflections, experiments) # if we still don't have the right column give up if 'xyzobs.px.value' not in reflections: raise Sorry( "These reflections do not have frame numbers set, and " "there are no experiments provided to calculate these.") # set trivial case where no scan range is provided at all if not params.scan_range: params.scan_range = [None] # check if slicing into blocks if params.block_size is not None: # in this case for simplicity, ensure that there is either an # an experiment list or datablocks, but not both. Ensure there is only # a single scan contained within. if [slice_exps, slice_dbs].count(True) != 1: raise Sorry( "For slicing into blocks please provide either datablocks" " or experiments, but not both.") if slice_exps: if len(experiments) > 1: raise Sorry( "For slicing into blocks please provide a single " "scan only") scan = experiments[0].scan if slice_dbs: scans = datablocks[0].unique_scans() if len(scans) > 1 or len(datablocks) > 1: raise Sorry( "For slicing into blocks please provide a single " "scan only") scan = scans[0] # Having extracted the scan, calculate the blocks params.scan_range = calculate_block_ranges(scan, params.block_size) # Do the slicing then recombine if slice_exps: sliced = [slice_experiments(experiments, [sr])[0] \ for sr in params.scan_range] sliced_experiments = ExperimentList() for exp in sliced: sliced_experiments.append(exp) if slice_dbs: sliced = [slice_datablocks(datablocks, [sr])[0] \ for sr in params.scan_range] imagesets = [db.extract_imagesets()[0] for db in sliced] sliced_datablocks = DataBlock(imagesets) # slice reflections if present if slice_refs: sliced = [slice_reflections(reflections, [sr]) \ for sr in params.scan_range] sliced_reflections = sliced[0] for i, rt in enumerate(sliced[1:]): rt['id'] += (i + 1) # set id sliced_reflections.extend(rt) else: # slice each dataset into the requested subset if slice_exps: sliced_experiments = slice_experiments(experiments, params.scan_range) if slice_refs: sliced_reflections = slice_reflections(reflections, params.scan_range) if slice_dbs: sliced_datablocks = slice_datablocks(datablocks, params.scan_range) # Save sliced experiments if slice_exps: output_experiments_filename = params.output.experiments_filename if output_experiments_filename is None: # take first filename as template bname = basename(params.input.experiments[0].filename) bname = splitext(bname)[0] if not bname: bname = "experiments" if len(params.scan_range ) == 1 and params.scan_range[0] is not None: ext = "_{0}_{1}.json".format(*params.scan_range[0]) else: ext = "_sliced.json" output_experiments_filename = bname + ext print 'Saving sliced experiments to {0}'.format( output_experiments_filename) from dxtbx.model.experiment.experiment_list import ExperimentListDumper dump = ExperimentListDumper(sliced_experiments) dump.as_json(output_experiments_filename) # Save sliced reflections if slice_refs: output_reflections_filename = params.output.reflections_filename if output_reflections_filename is None: # take first filename as template bname = basename(params.input.reflections[0].filename) bname = splitext(bname)[0] if not bname: bname = "reflections" if len(params.scan_range ) == 1 and params.scan_range[0] is not None: ext = "_{0}_{1}.pickle".format(*params.scan_range[0]) else: ext = "_sliced.pickle" output_reflections_filename = bname + ext print 'Saving sliced reflections to {0}'.format( output_reflections_filename) sliced_reflections.as_pickle(output_reflections_filename) # Save sliced datablocks if slice_dbs: output_datablocks_filename = params.output.datablocks_filename if output_datablocks_filename is None: # take first filename as template bname = basename(params.input.datablock[0].filename) bname = splitext(bname)[0] if not bname: bname = "datablock" if len(params.scan_range ) == 1 and params.scan_range[0] is not None: ext = "_{0}_{1}.json".format(*params.scan_range[0]) else: ext = "_sliced.json" output_datablocks_filename = bname + ext print 'Saving sliced datablocks to {0}'.format( output_datablocks_filename) from dxtbx.datablock import DataBlockDumper dump = DataBlockDumper(sliced_datablocks) dump.as_file(output_datablocks_filename) return
class Test(object): def __init__(self): self.create_models() self.generate_reflections() def create_models(self): # build models, with a larger crystal than default in order to get plenty of # reflections on the 'still' image overrides = """ geometry.parameters.crystal.a.length.range=40 50; geometry.parameters.crystal.b.length.range=40 50; geometry.parameters.crystal.c.length.range=40 50; geometry.parameters.random_seed = 42""" master_phil = parse(""" include scope dials.test.algorithms.refinement.geometry_phil """, process_includes=True) models = Extract(master_phil, overrides) # keep track of the models self.detector = models.detector self.gonio = models.goniometer self.crystal = models.crystal self.beam = models.beam # Create a stills ExperimentList self.stills_experiments = ExperimentList() self.stills_experiments.append( Experiment(beam=self.beam, detector=self.detector, crystal=self.crystal, imageset=None)) # keep track of the parameterisation of the models self.det_param = DetectorParameterisationSinglePanel(self.detector) self.s0_param = BeamParameterisation(self.beam, self.gonio) self.xlo_param = CrystalOrientationParameterisation(self.crystal) self.xluc_param = CrystalUnitCellParameterisation(self.crystal) def generate_reflections(self): # Build a mock scan for a 3 degree sweep from dxtbx.model.scan import scan_factory sf = scan_factory() self.scan = sf.make_scan(image_range=(1, 1), exposure_times=0.1, oscillation=(0, 3.0), epochs=range(1), deg=True) sweep_range = self.scan.get_oscillation_range(deg=False) # Create a scans ExperimentList, only for generating reflections experiments = ExperimentList() experiments.append( Experiment(beam=self.beam, detector=self.detector, goniometer=self.gonio, scan=self.scan, crystal=self.crystal, imageset=None)) # Create a ScansRayPredictor ray_predictor = ScansRayPredictor(experiments, sweep_range) # Generate rays - only to work out which hkls are predicted resolution = 2.0 index_generator = IndexGenerator( self.crystal.get_unit_cell(), space_group(space_group_symbols(1).hall()).type(), resolution) indices = index_generator.to_array() rays = ray_predictor(indices) # Make a standard reflection_table and copy in the ray data self.reflections = flex.reflection_table.empty_standard(len(rays)) self.reflections.update(rays) return def get_fd_gradients(self, pred_param, ref_predictor): # get finite difference gradients p_vals = pred_param.get_param_vals() deltas = [1.e-7] * len(p_vals) fd_grads = [] p_names = pred_param.get_param_names() for i in range(len(deltas)): # save parameter value val = p_vals[i] # calc reverse state p_vals[i] -= deltas[i] / 2. pred_param.set_param_vals(p_vals) ref_predictor(self.reflections) x, y, _ = self.reflections['xyzcal.mm'].deep_copy().parts() delpsi = self.reflections['delpsical.rad'].deep_copy() rev_state = flex.vec3_double(x, y, delpsi) # calc forward state p_vals[i] += deltas[i] pred_param.set_param_vals(p_vals) ref_predictor(self.reflections) x, y, _ = self.reflections['xyzcal.mm'].deep_copy().parts() delpsi = self.reflections['delpsical.rad'].deep_copy() fwd_state = flex.vec3_double(x, y, delpsi) # reset parameter to saved value p_vals[i] = val # finite difference fd = (fwd_state - rev_state) x_grads, y_grads, delpsi_grads = fd.parts() x_grads /= deltas[i] y_grads /= deltas[i] delpsi_grads /= deltas[i] fd_grads.append({ 'name': p_names[i], 'dX_dp': x_grads, 'dY_dp': y_grads, 'dDeltaPsi_dp': delpsi_grads }) # return to the initial state pred_param.set_param_vals(p_vals) return fd_grads def run_stills_pred_param(self, verbose=False): if verbose: print 'Testing derivatives for StillsPredictionParameterisation' print '========================================================' # Build a prediction parameterisation for the stills experiment pred_param = StillsPredictionParameterisation( self.stills_experiments, detector_parameterisations=[self.det_param], beam_parameterisations=[self.s0_param], xl_orientation_parameterisations=[self.xlo_param], xl_unit_cell_parameterisations=[self.xluc_param]) # Predict the reflections in place. Must do this ahead of calculating # the analytical gradients so quantities like s1 are correct from dials.algorithms.refinement.prediction import ExperimentsPredictor ref_predictor = ExperimentsPredictor(self.stills_experiments) ref_predictor(self.reflections) # get analytical gradients an_grads = pred_param.get_gradients(self.reflections) fd_grads = self.get_fd_gradients(pred_param, ref_predictor) for i, (an_grad, fd_grad) in enumerate(zip(an_grads, fd_grads)): # compare FD with analytical calculations if verbose: print "\nParameter {0}: {1}".format(i, fd_grad['name']) for idx, name in enumerate(["dX_dp", "dY_dp", "dDeltaPsi_dp"]): if verbose: print name a = fd_grad[name] b = an_grad[name] abs_error = a - b denom = a + b fns = five_number_summary(abs_error) if verbose: print (" summary of absolute errors: %9.6f %9.6f %9.6f " + \ "%9.6f %9.6f") % fns assert flex.max(flex.abs(abs_error)) < 0.0003 # largest absolute error found to be about 0.00025 for dY/dp of # Crystal0g_param_3. Reject outlying absolute errors and test again. iqr = fns[3] - fns[1] # skip further stats on errors with an iqr of near zero, e.g. dDeltaPsi_dp # for detector parameters, which are all equal to zero if iqr < 1.e-10: continue sel1 = abs_error < fns[3] + 1.5 * iqr sel2 = abs_error > fns[1] - 1.5 * iqr sel = sel1 & sel2 tst = flex.max_index(flex.abs(abs_error.select(sel))) tst_val = abs_error.select(sel)[tst] n_outliers = sel.count(False) if verbose: print (" {0} outliers rejected, leaving greatest " + \ "absolute error: {1:9.6f}").format(n_outliers, tst_val) # largest absolute error now 0.000086 for dX/dp of Beam0Mu2 assert abs(tst_val) < 0.00009 # Completely skip parameters with FD gradients all zero (e.g. gradients of # DeltaPsi for detector parameters) sel1 = flex.abs(a) < 1.e-10 if sel1.all_eq(True): continue # otherwise calculate normalised errors, by dividing absolute errors by # the IQR (more stable than relative error calculation) norm_error = abs_error / iqr fns = five_number_summary(norm_error) if verbose: print (" summary of normalised errors: %9.6f %9.6f %9.6f " + \ "%9.6f %9.6f") % fns # largest normalised error found to be about 25.7 for dY/dp of # Crystal0g_param_3. try: assert flex.max(flex.abs(norm_error)) < 30 except AssertionError as e: e.args += ("extreme normalised error value: {0}".format( flex.max(flex.abs(norm_error))), ) raise e # Reject outlying normalised errors and test again iqr = fns[3] - fns[1] if iqr > 0.: sel1 = norm_error < fns[3] + 1.5 * iqr sel2 = norm_error > fns[1] - 1.5 * iqr sel = sel1 & sel2 tst = flex.max_index(flex.abs(norm_error.select(sel))) tst_val = norm_error.select(sel)[tst] n_outliers = sel.count(False) # most outliers found for for dY/dp of Crystal0g_param_3 (which had # largest errors, so no surprise there). try: assert n_outliers < 250 except AssertionError as e: e.args += ("too many outliers rejected: {0}".format( n_outliers), ) raise e if verbose: print (" {0} outliers rejected, leaving greatest " + \ "normalised error: {1:9.6f}").format(n_outliers, tst_val) # largest normalied error now about -4. for dX/dp of Detector0Tau1 assert abs( tst_val) < 4.5, 'should be about 4 not %s' % tst_val if verbose: print return def run_spherical_relp_stills_pred_param(self, verbose=True): if verbose: print 'Testing derivatives for SphericalRelpStillsPredictionParameterisation' print '=====================================================================' # Build a prediction parameterisation for the stills experiment pred_param = SphericalRelpStillsPredictionParameterisation( self.stills_experiments, detector_parameterisations=[self.det_param], beam_parameterisations=[self.s0_param], xl_orientation_parameterisations=[self.xlo_param], xl_unit_cell_parameterisations=[self.xluc_param]) # Predict the reflections in place. Must do this ahead of calculating # the analytical gradients so quantities like s1 are correct from dials.algorithms.refinement.prediction import ExperimentsPredictor ref_predictor = ExperimentsPredictor(self.stills_experiments, spherical_relp=True) ref_predictor(self.reflections) # get analytical gradients an_grads = pred_param.get_gradients(self.reflections) fd_grads = self.get_fd_gradients(pred_param, ref_predictor) # compare FD with analytical calculations for i, (an_grad, fd_grad) in enumerate(zip(an_grads, fd_grads)): if verbose: print "\nParameter {0}: {1}".format(i, fd_grad['name']) for idx, name in enumerate(["dX_dp", "dY_dp", "dDeltaPsi_dp"]): if verbose: print name for a, b in zip(an_grad[name], fd_grad[name]): if name == 'dDeltaPsi_dp': # DeltaPsi errors are much worse than X, Y errors! # FIXME, look into this further assert approx_equal(a, b, eps=5e-3) else: assert approx_equal(a, b, eps=5e-6) if verbose: print "OK" if verbose: print
class read_experiments(object): def __init__(self, params): import cPickle as pickle from dxtbx.model.beam import beam_factory from dxtbx.model.detector import detector_factory from dxtbx.model.crystal import crystal_model from cctbx.crystal_orientation import crystal_orientation, basis_type from dxtbx.model.experiment.experiment_list import Experiment, ExperimentList from scitbx import matrix self.experiments = ExperimentList() self.unique_file_names = [] self.params = params data = pickle.load( open(self.params.output.prefix + "_frame.pickle", "rb")) frames_text = data.split("\n") for item in frames_text: tokens = item.split(' ') wavelength = float(tokens[order_dict["wavelength"]]) beam = beam_factory.simple(wavelength=wavelength) detector = detector_factory.simple( sensor=detector_factory.sensor( "PAD"), # XXX shouldn't hard code for XFEL distance=float(tokens[order_dict["distance"]]), beam_centre=[ float(tokens[order_dict["beam_x"]]), float(tokens[order_dict["beam_y"]]) ], fast_direction="+x", slow_direction="+y", pixel_size=[self.params.pixel_size, self.params.pixel_size], image_size=[1795, 1795], # XXX obviously need to figure this out ) reciprocal_matrix = matrix.sqr([ float(tokens[order_dict[k]]) for k in [ 'res_ori_1', 'res_ori_2', 'res_ori_3', 'res_ori_4', 'res_ori_5', 'res_ori_6', 'res_ori_7', 'res_ori_8', 'res_ori_9' ] ]) ORI = crystal_orientation(reciprocal_matrix, basis_type.reciprocal) direct = matrix.sqr(ORI.direct_matrix()) crystal = crystal_model( real_space_a=matrix.row(direct[0:3]), real_space_b=matrix.row(direct[3:6]), real_space_c=matrix.row(direct[6:9]), space_group_symbol=self.params.target_space_group.type(). lookup_symbol(), mosaicity=float(tokens[order_dict["half_mosaicity_deg"]]), ) crystal.domain_size = float(tokens[order_dict["domain_size_ang"]]) #if isoform is not None: # newB = matrix.sqr(isoform.fractionalization_matrix()).transpose() # crystal.set_B(newB) self.experiments.append( Experiment( beam=beam, detector=None, #dummy for now crystal=crystal)) self.unique_file_names.append( tokens[order_dict["unique_file_name"]]) self.show_summary() def get_experiments(self): return self.experiments def get_files(self): return self.unique_file_names def show_summary(self): w = flex.double([e.beam.get_wavelength() for e in self.experiments]) stats = flex.mean_and_variance(w) print "Wavelength mean and standard deviation:", stats.mean( ), stats.unweighted_sample_standard_deviation() uc = [e.crystal.get_unit_cell().parameters() for e in self.experiments] a = flex.double([u[0] for u in uc]) stats = flex.mean_and_variance(a) print "Unit cell a mean and standard deviation:", stats.mean( ), stats.unweighted_sample_standard_deviation() b = flex.double([u[1] for u in uc]) stats = flex.mean_and_variance(b) print "Unit cell b mean and standard deviation:", stats.mean( ), stats.unweighted_sample_standard_deviation() c = flex.double([u[2] for u in uc]) stats = flex.mean_and_variance(c) print "Unit cell c mean and standard deviation:", stats.mean( ), stats.unweighted_sample_standard_deviation() d = flex.double([e.crystal.domain_size for e in self.experiments]) stats = flex.mean_and_variance(d) # NOTE XXX FIXME: cxi.index seems to record the half-domain size; report here the full domain size print "Domain size mean and standard deviation:", 2. * stats.mean( ), 2. * stats.unweighted_sample_standard_deviation()
def run(self): print "Parsing input" params, options = self.parser.parse_args(show_diff_phil=True) #Configure the logging log.config(params.detector_phase.refinement.verbosity, info='dials.refine.log', debug='dials.refine.debug.log') # Try to obtain the models and data if not params.input.experiments: raise Sorry("No Experiments found in the input") if not params.input.reflections: raise Sorry("No reflection data found in the input") try: assert len(params.input.reflections) == len( params.input.experiments) except AssertionError: raise Sorry( "The number of input reflections files does not match the " "number of input experiments") # set up global experiments and reflections lists from dials.array_family import flex reflections = flex.reflection_table() global_id = 0 from dxtbx.model.experiment.experiment_list import ExperimentList experiments = ExperimentList() if params.reference_detector == "first": # Use the first experiment of the first experiment list as the reference detector ref_exp = params.input.experiments[0].data[0] else: # Average all the detectors to generate a reference detector assert params.detector_phase.refinement.parameterisation.detector.hierarchy_level == 0 from scitbx.matrix import col panel_fasts = [] panel_slows = [] panel_oris = [] for exp_wrapper in params.input.experiments: exp = exp_wrapper.data[0] if panel_oris: for i, panel in enumerate(exp.detector): panel_fasts[i] += col(panel.get_fast_axis()) panel_slows[i] += col(panel.get_slow_axis()) panel_oris[i] += col(panel.get_origin()) else: for i, panel in enumerate(exp.detector): panel_fasts.append(col(panel.get_fast_axis())) panel_slows.append(col(panel.get_slow_axis())) panel_oris.append(col(panel.get_origin())) ref_exp = copy.deepcopy(params.input.experiments[0].data[0]) for i, panel in enumerate(ref_exp.detector): # Averaging the fast and slow axes can make them be non-orthagonal. Fix by finding # the vector that goes exactly between them and rotate # around their cross product 45 degrees from that vector in either direction vf = panel_fasts[i] / len(params.input.experiments) vs = panel_slows[i] / len(params.input.experiments) c = vf.cross(vs) angle = vf.angle(vs, deg=True) v45 = vf.rotate(c, angle / 2, deg=True) vf = v45.rotate(c, -45, deg=True) vs = v45.rotate(c, 45, deg=True) panel.set_frame(vf, vs, panel_oris[i] / len(params.input.experiments)) print "Reference detector (averaged):", str(ref_exp.detector) # set the experiment factory that combines a crystal with the reference beam # and the reference detector experiment_from_crystal = ExperimentFromCrystal( ref_exp.beam, ref_exp.detector) # keep track of the number of refl per accepted experiment for a table nrefs_per_exp = [] # loop through the input, building up the global lists for ref_wrapper, exp_wrapper in zip(params.input.reflections, params.input.experiments): refs = ref_wrapper.data exps = exp_wrapper.data # there might be multiple experiments already here. Loop through them for i, exp in enumerate(exps): # select the relevant reflections sel = refs['id'] == i sub_ref = refs.select(sel) ## DGW commented out as reflections.minimum_number_of_reflections no longer exists #if len(sub_ref) < params.crystals_phase.refinement.reflections.minimum_number_of_reflections: # print "skipping experiment", i, "in", exp_wrapper.filename, "due to insufficient strong reflections in", ref_wrapper.filename # continue # build an experiment with this crystal plus the reference models combined_exp = experiment_from_crystal(exp.crystal) # next experiment ID in series exp_id = len(experiments) # check this experiment if not check_experiment(combined_exp, sub_ref): print "skipping experiment", i, "in", exp_wrapper.filename, "due to poor RMSDs" continue # set reflections ID sub_ref['id'] = flex.int(len(sub_ref), exp_id) # keep number of reflections for the table nrefs_per_exp.append(len(sub_ref)) # obtain mm positions on the reference detector sub_ref = indexer_base.map_spots_pixel_to_mm_rad( sub_ref, combined_exp.detector, combined_exp.scan) # extend refl and experiments lists reflections.extend(sub_ref) experiments.append(combined_exp) # print number of reflections per accepted experiment from libtbx.table_utils import simple_table header = ["Experiment", "Nref"] rows = [(str(i), str(n)) for (i, n) in enumerate(nrefs_per_exp)] st = simple_table(rows, header) print "Number of reflections per experiment" print st.format() for cycle in range(params.n_macrocycles): print "MACROCYCLE %02d" % (cycle + 1) print "=============\n" # first run: multi experiment joint refinement of detector with fixed beam and # crystals print "PHASE 1" # SET THIS TEST TO FALSE TO REFINE WHOLE DETECTOR AS SINGLE JOB if params.detector_phase.refinement.parameterisation.detector.hierarchy_level > 0: experiments = detector_parallel_refiners( params.detector_phase, experiments, reflections) else: experiments = detector_refiner(params.detector_phase, experiments, reflections) # second run print "PHASE 2" experiments = crystals_refiner(params.crystals_phase, experiments, reflections) # Save the refined experiments to file output_experiments_filename = params.output.experiments_filename print 'Saving refined experiments to {0}'.format( output_experiments_filename) from dxtbx.model.experiment.experiment_list import ExperimentListDumper dump = ExperimentListDumper(experiments) dump.as_json(output_experiments_filename) # Write out refined reflections, if requested if params.output.reflections_filename: print 'Saving refined reflections to {0}'.format( params.output.reflections_filename) reflections.as_pickle(params.output.reflections_filename) return
sf = scan_factory() myscan = sf.make_scan(image_range = (1,1800), exposure_times = 0.1, oscillation = (0, 0.1), epochs = range(1800), deg = True) sweep_range = myscan.get_oscillation_range(deg=False) im_width = myscan.get_oscillation(deg=False)[1] assert sweep_range == (0., pi) assert approx_equal(im_width, 0.1 * pi / 180.) # Build ExperimentLists experiments_single_panel = ExperimentList() experiments_multi_panel = ExperimentList() experiments_single_panel.append(Experiment( beam=mybeam, detector=single_panel_detector, goniometer=mygonio, scan=myscan, crystal=mycrystal, imageset=None)) experiments_multi_panel.append(Experiment( beam=mybeam, detector=multi_panel_detector, goniometer=mygonio, scan=myscan, crystal=mycrystal, imageset=None)) ########################### # Parameterise the models # ########################### det_param = DetectorParameterisationSinglePanel(single_panel_detector) s0_param = BeamParameterisation(mybeam, mygonio) xlo_param = CrystalOrientationParameterisation(mycrystal) xluc_param = CrystalUnitCellParameterisation(mycrystal) multi_det_param = DetectorParameterisationMultiPanel(multi_panel_detector, mybeam)
def run(self): '''Execute the script.''' from dials.algorithms.refinement.two_theta_refiner import \ TwoThetaReflectionManager, TwoThetaTarget, \ TwoThetaPredictionParameterisation start_time = time() # Parse the command line params, options = self.parser.parse_args(show_diff_phil=False) # set up global experiments and reflections lists from dials.array_family import flex reflections = flex.reflection_table() global_id = 0 from dxtbx.model.experiment.experiment_list import ExperimentList experiments=ExperimentList() # loop through the input, building up the global lists nrefs_per_exp = [] for ref_wrapper, exp_wrapper in zip(params.input.reflections, params.input.experiments): refs = ref_wrapper.data exps = exp_wrapper.data for i, exp in enumerate(exps): sel = refs['id'] == i sub_ref = refs.select(sel) nrefs_per_exp.append(len(sub_ref)) sub_ref['id'] = flex.int(len(sub_ref), global_id) reflections.extend(sub_ref) experiments.append(exp) global_id += 1 # Try to load the models and data nexp = len(experiments) if nexp == 0: print "No Experiments found in the input" self.parser.print_help() return if len(reflections) == 0: print "No reflection data found in the input" self.parser.print_help() return self.check_input(reflections) # Configure the logging log.config(info=params.output.log, debug=params.output.debug_log) logger.info(dials_version()) # Log the diff phil diff_phil = self.parser.diff_phil.as_str() if diff_phil is not '': logger.info('The following parameters have been modified:\n') logger.info(diff_phil) # Convert to P 1? if params.refinement.triclinic: reflections, experiments = self.convert_to_P1(reflections, experiments) # Combine crystals? if params.refinement.combine_crystal_models and len(experiments) > 1: logger.info('Combining {0} crystal models'.format(len(experiments))) experiments = self.combine_crystals(experiments) # Filter integrated centroids? if params.refinement.filter_integrated_centroids: reflections = self.filter_integrated_centroids(reflections) # Get the refiner logger.info('Configuring refiner') refiner = self.create_refiner(params, reflections, experiments) # Refine the geometry if nexp == 1: logger.info('Performing refinement of a single Experiment...') else: logger.info('Performing refinement of {0} Experiments...'.format(nexp)) # Refine and get the refinement history history = refiner.run() # get the refined experiments experiments = refiner.get_experiments() crystals = experiments.crystals() if len(crystals) == 1: # output the refined model for information logger.info('') logger.info('Final refined crystal model:') logger.info(crystals[0]) logger.info(self.cell_param_table(crystals[0])) # Save the refined experiments to file output_experiments_filename = params.output.experiments logger.info('Saving refined experiments to {0}'.format(output_experiments_filename)) from dxtbx.model.experiment.experiment_list import ExperimentListDumper dump = ExperimentListDumper(experiments) dump.as_json(output_experiments_filename) # Correlation plot if params.output.correlation_plot.filename is not None: from os.path import splitext root, ext = splitext(params.output.correlation_plot.filename) if not ext: ext = ".pdf" steps = params.output.correlation_plot.steps if steps is None: steps = [history.get_nrows()-1] # extract individual column names or indices col_select = params.output.correlation_plot.col_select num_plots = 0 for step in steps: fname_base = root if len(steps) > 1: fname_base += "_step%02d" % step plot_fname = fname_base + ext corrmat, labels = refiner.get_parameter_correlation_matrix(step, col_select) if [corrmat, labels].count(None) == 0: from dials.algorithms.refinement.refinement_helpers import corrgram plt = corrgram(corrmat, labels) if plt is not None: logger.info('Saving parameter correlation plot to {}'.format(plot_fname)) plt.savefig(plot_fname) num_plots += 1 mat_fname = fname_base + ".pickle" with open(mat_fname, 'wb') as handle: py_mat = corrmat.as_scitbx_matrix() #convert to pickle-friendly form logger.info('Saving parameter correlation matrix to {0}'.format(mat_fname)) pickle.dump({'corrmat':py_mat, 'labels':labels}, handle) if num_plots == 0: msg = "Sorry, no parameter correlation plots were produced. Please set " \ "track_parameter_correlation=True to ensure correlations are " \ "tracked, and make sure correlation_plot.col_select is valid." logger.info(msg) if params.output.cif is not None: self.generate_cif(crystals[0], refiner, file=params.output.cif) if params.output.p4p is not None: self.generate_p4p(crystals[0], experiments[0].beam, file=params.output.p4p) if params.output.mmcif is not None: self.generate_mmcif(crystals[0], refiner, file=params.output.mmcif) # Log the total time taken logger.info("\nTotal time taken: {0:.2f}s".format(time() - start_time))
def test_fd_derivatives(): '''Test derivatives of the prediction equation''' from libtbx.phil import parse # Import model builder from setup_geometry import Extract # Imports for reflection prediction from dials.algorithms.spot_prediction import IndexGenerator, ray_intersection from dxtbx.model.experiment.experiment_list import ExperimentList, Experiment from dials.algorithms.refinement.prediction import ScansRayPredictor, \ ExperimentsPredictor # Create models overrides = """geometry.parameters.crystal.a.length.range = 10 50 geometry.parameters.crystal.b.length.range = 10 50 geometry.parameters.crystal.c.length.range = 10 50""" master_phil = parse(""" include scope dials.test.algorithms.refinement.geometry_phil """, process_includes=True) models = Extract(master_phil, overrides) mydetector = models.detector mygonio = models.goniometer mycrystal = models.crystal mybeam = models.beam # Build a mock scan for a 72 degree sweep sweep_range = (0., pi/5.) from dxtbx.model.scan import scan_factory sf = scan_factory() myscan = sf.make_scan(image_range = (1,720), exposure_times = 0.1, oscillation = (0, 0.1), epochs = range(720), deg = True) # Create a parameterisation of the crystal unit cell from dials.algorithms.refinement.parameterisation.crystal_parameters import \ CrystalUnitCellParameterisation xluc_param = CrystalUnitCellParameterisation(mycrystal) # Create an ExperimentList experiments = ExperimentList() experiments.append(Experiment( beam=mybeam, detector=mydetector, goniometer=mygonio, scan=myscan, crystal=mycrystal, imageset=None)) # Build a prediction parameterisation for two theta prediction pred_param = TwoThetaPredictionParameterisation(experiments, detector_parameterisations = None, beam_parameterisations = None, xl_orientation_parameterisations = None, xl_unit_cell_parameterisations = [xluc_param]) # Generate some reflections obs_refs, ref_predictor = generate_reflections(experiments) # Build a ReflectionManager with overloads for handling 2theta residuals refman = TwoThetaReflectionManager(obs_refs, experiments, outlier_detector=None) # Build a TwoThetaExperimentsPredictor ref_predictor = TwoThetaExperimentsPredictor(experiments) # Make a target for the least squares 2theta residual target = TwoThetaTarget(experiments, ref_predictor, refman, pred_param) # Keep only reflections that pass inclusion criteria and have predictions reflections = refman.get_matches() # Get analytical gradients an_grads = pred_param.get_gradients(reflections) # Get finite difference gradients p_vals = pred_param.get_param_vals() deltas = [1.e-7] * len(p_vals) for i in range(len(deltas)): val = p_vals[i] p_vals[i] -= deltas[i] / 2. pred_param.set_param_vals(p_vals) target.predict() reflections = refman.get_matches() rev_state = reflections['2theta_resid'].deep_copy() p_vals[i] += deltas[i] pred_param.set_param_vals(p_vals) target.predict() reflections = refman.get_matches() fwd_state = reflections['2theta_resid'].deep_copy() p_vals[i] = val fd = (fwd_state - rev_state) fd /= deltas[i] # compare with analytical calculation assert approx_equal(fd, an_grads[i]["d2theta_dp"], eps=1.e-6) # return to the initial state pred_param.set_param_vals(p_vals) return
return Experiment(beam=self.reference_beam, detector=self.reference_detector, crystal=crystal) assert len(working_params.input) > 1 print len(working_params.input), "datasets specified as input" e = enumerate(working_params.input) i, line = e.next() reflections, exp = load_input(line.experiments, line.reflections) assert reflections['id'].all_eq(0) experiment_from_crystal=ExperimentFromCrystal(exp.beam, exp.detector) from dxtbx.model.experiment.experiment_list import ExperimentList experiments=ExperimentList() experiments.append(experiment_from_crystal(exp.crystal)) from scitbx.array_family import flex for i, line in e: refs, exp = load_input(line.experiments, line.reflections) refs['id'] = flex.int(len(refs),i) reflections.extend(refs) experiments.append(experiment_from_crystal(exp.crystal)) # analysis of panel sampling #TODO # refinement - limit Jacobian calculation to 100'000 reflections at a time from libtbx.phil import parse user_phil=parse(""" refinement{
def test1(): dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) # use a datablock that contains a CS-PAD detector description data_dir = os.path.join(dials_regression, "refinement_test_data", "hierarchy_test") datablock_path = os.path.join(data_dir, "datablock.json") assert os.path.exists(datablock_path) # load models from dxtbx.datablock import DataBlockFactory datablock = DataBlockFactory.from_serialized_format(datablock_path, check_format=False) im_set = datablock[0].extract_imagesets()[0] from copy import deepcopy detector = deepcopy(im_set.get_detector()) beam = im_set.get_beam() # we'll invent a crystal, goniometer and scan for this test from dxtbx.model.crystal import crystal_model crystal = crystal_model((40.,0.,0.) ,(0.,40.,0.), (0.,0.,40.), space_group_symbol = "P1") from dxtbx.model.experiment import goniometer_factory goniometer = goniometer_factory.known_axis((1., 0., 0.)) # Build a mock scan for a 180 degree sweep from dxtbx.model.scan import scan_factory sf = scan_factory() scan = sf.make_scan(image_range = (1,1800), exposure_times = 0.1, oscillation = (0, 0.1), epochs = range(1800), deg = True) sweep_range = scan.get_oscillation_range(deg=False) im_width = scan.get_oscillation(deg=False)[1] assert sweep_range == (0., pi) assert approx_equal(im_width, 0.1 * pi / 180.) from dxtbx.model.experiment.experiment_list import ExperimentList, Experiment # Build an experiment list experiments = ExperimentList() experiments.append(Experiment( beam=beam, detector=detector, goniometer=goniometer, scan=scan, crystal=crystal, imageset=None)) # simulate some reflections refs, ref_predictor = generate_reflections(experiments) # move the detector quadrants apart by 2mm both horizontally and vertically from dials.algorithms.refinement.parameterisation \ import DetectorParameterisationHierarchical det_param = DetectorParameterisationHierarchical(detector, level=1) det_p_vals = det_param.get_param_vals() p_vals = list(det_p_vals) p_vals[1] += 2 p_vals[2] -= 2 p_vals[7] += 2 p_vals[8] += 2 p_vals[13] -= 2 p_vals[14] += 2 p_vals[19] -= 2 p_vals[20] -= 2 det_param.set_param_vals(p_vals) # reparameterise the detector at the new perturbed geometry det_param = DetectorParameterisationHierarchical(detector, level=1) # parameterise other models from dials.algorithms.refinement.parameterisation.beam_parameters import \ BeamParameterisation from dials.algorithms.refinement.parameterisation.crystal_parameters import \ CrystalOrientationParameterisation, CrystalUnitCellParameterisation beam_param = BeamParameterisation(beam, goniometer) xlo_param = CrystalOrientationParameterisation(crystal) xluc_param = CrystalUnitCellParameterisation(crystal) # fix beam beam_param.set_fixed([True]*3) # fix crystal xluc_param.set_fixed([True]*6) xlo_param.set_fixed([True]*3) # parameterisation of the prediction equation from dials.algorithms.refinement.parameterisation.prediction_parameters import \ XYPhiPredictionParameterisation from dials.algorithms.refinement.parameterisation.parameter_report import \ ParameterReporter pred_param = XYPhiPredictionParameterisation(experiments, [det_param], [beam_param], [xlo_param], [xluc_param]) param_reporter = ParameterReporter([det_param], [beam_param], [xlo_param], [xluc_param]) # reflection manager and target function from dials.algorithms.refinement.target import \ LeastSquaresPositionalResidualWithRmsdCutoff from dials.algorithms.refinement.reflection_manager import ReflectionManager refman = ReflectionManager(refs, experiments, nref_per_degree=20) # set a very tight rmsd target of 1/10000 of a pixel target = LeastSquaresPositionalResidualWithRmsdCutoff(experiments, ref_predictor, refman, pred_param, restraints_parameterisation=None, frac_binsize_cutoff=0.0001) # minimisation engine from dials.algorithms.refinement.engine \ import LevenbergMarquardtIterations as Refinery refinery = Refinery(target = target, prediction_parameterisation = pred_param, log = None, verbosity = 0, track_step = False, track_gradient = False, track_parameter_correlation = False, max_iterations = 20) # Refiner from dials.algorithms.refinement.refiner import Refiner refiner = Refiner(reflections=refs, experiments=experiments, pred_param=pred_param, param_reporter=param_reporter, refman=refman, target=target, refinery=refinery, verbosity=0) history = refiner.run() assert history.reason_for_termination == "RMSD target achieved" #compare detector with original detector orig_det = im_set.get_detector() refined_det = refiner.get_experiments()[0].detector from scitbx import matrix import math for op, rp in zip(orig_det, refined_det): # compare the origin vectors by... o1 = matrix.col(op.get_origin()) o2 = matrix.col(rp.get_origin()) # ...their relative lengths assert approx_equal( math.fabs(o1.length() - o2.length()) / o1.length(), 0, eps=1e-5) # ...the angle between them assert approx_equal(o1.accute_angle(o2), 0, eps=1e-5) print "OK" return
def run(self): ''' Parse the options. ''' from dials.util.options import flatten_experiments, flatten_reflections from dxtbx.model.experiment.experiment_list import ExperimentList from scitbx.math import five_number_summary # Parse the command line arguments params, options = self.parser.parse_args(show_diff_phil=True) self.params = params experiments = flatten_experiments(params.input.experiments) reflections = flatten_reflections(params.input.reflections) assert len(reflections) == 1 reflections = reflections[0] print "Found", len(reflections), "reflections", "and", len( experiments), "experiments" difference_vector_norms = (reflections['xyzcal.mm'] - reflections['xyzobs.mm.value']).norms() data = flex.double() counts = flex.double() for i in xrange(len(experiments)): dvns = difference_vector_norms.select(reflections['id'] == i) counts.append(len(dvns)) if len(dvns) == 0: data.append(0) continue rmsd = math.sqrt(flex.sum_sq(dvns) / len(dvns)) data.append(rmsd) data *= 1000 subset = data.select(counts > 0) print len(subset), "experiments with > 0 reflections" if params.show_plots: h = flex.histogram(subset, n_slots=40) fig = plt.figure() ax = fig.add_subplot('111') ax.plot(h.slot_centers().as_numpy_array(), h.slots().as_numpy_array(), '-') plt.title("Histogram of %d image RMSDs" % len(subset)) fig = plt.figure() plt.boxplot(subset, vert=False) plt.title("Boxplot of %d image RMSDs" % len(subset)) plt.show() outliers = counts == 0 min_x, q1_x, med_x, q3_x, max_x = five_number_summary(subset) print "Five number summary of RMSDs (microns): min %.1f, q1 %.1f, med %.1f, q3 %.1f, max %.1f" % ( min_x, q1_x, med_x, q3_x, max_x) iqr_x = q3_x - q1_x cut_x = params.iqr_multiplier * iqr_x outliers.set_selected(data > q3_x + cut_x, True) #outliers.set_selected(col < q1_x - cut_x, True) # Don't throw away the images that are outliers in the 'good' direction! filtered_reflections = flex.reflection_table() filtered_experiments = ExperimentList() for i in xrange(len(experiments)): if outliers[i]: continue refls = reflections.select(reflections['id'] == i) refls['id'] = flex.int(len(refls), len(filtered_experiments)) filtered_reflections.extend(refls) filtered_experiments.append(experiments[i]) zeroes = counts == 0 n_zero = len(counts.select(zeroes)) print "Removed %d bad experiments and %d experiments with zero reflections, out of %d (%%%.1f)" % ( len(experiments) - len(filtered_experiments) - n_zero, n_zero, len(experiments), 100 * ((len(experiments) - len(filtered_experiments)) / len(experiments))) from dxtbx.model.experiment.experiment_list import ExperimentListDumper dump = ExperimentListDumper(filtered_experiments) dump.as_json(params.output.filtered_experiments) filtered_reflections.as_pickle(params.output.filtered_reflections)
(-float(fs_x.rstrip('x')), float(fs_y.rstrip('y')), 0.0)) slow = matrix.col( (-float(ss_x.rstrip('x')), float(ss_y.rstrip('y')), 0.0)) origin = matrix.col( (-float(geom[key]['corner_x']) * params.pixel_size, float(geom[key]['corner_y']) * params.pixel_size, 0.0)) # OBS! you need to set the panel to a root before set local frame... p = root.add_panel() p.set_name('panel-%s' % key) p.set_image_size((512, 1024)) p.set_trusted_range((-1, 1000000)) p.set_pixel_size((params.pixel_size, params.pixel_size)) p.set_local_frame(fast.elems, slow.elems, origin.elems) from dxtbx.model.beam import beam_factory wavelength = params.wavelength beam = beam_factory.simple(wavelength) from dxtbx.model.experiment.experiment_list import Experiment, ExperimentList, ExperimentListDumper experiments = ExperimentList() experiment = Experiment(detector=detector, beam=beam) experiments.append(experiment) dump = ExperimentListDumper(experiments) dump.as_json("geometry.json") if __name__ == "__main__": run(sys.argv[1:])
def test1(): dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) # use a datablock that contains a CS-PAD detector description data_dir = os.path.join(dials_regression, "refinement_test_data", "hierarchy_test") datablock_path = os.path.join(data_dir, "datablock.json") assert os.path.exists(datablock_path) # load models from dxtbx.datablock import DataBlockFactory datablock = DataBlockFactory.from_serialized_format(datablock_path, check_format=False) im_set = datablock[0].extract_imagesets()[0] from copy import deepcopy detector = deepcopy(im_set.get_detector()) beam = im_set.get_beam() # we'll invent a crystal, goniometer and scan for this test from dxtbx.model.crystal import crystal_model crystal = crystal_model((40., 0., 0.), (0., 40., 0.), (0., 0., 40.), space_group_symbol="P1") from dxtbx.model.experiment import goniometer_factory goniometer = goniometer_factory.known_axis((1., 0., 0.)) # Build a mock scan for a 180 degree sweep from dxtbx.model.scan import scan_factory sf = scan_factory() scan = sf.make_scan(image_range=(1, 1800), exposure_times=0.1, oscillation=(0, 0.1), epochs=range(1800), deg=True) sweep_range = scan.get_oscillation_range(deg=False) im_width = scan.get_oscillation(deg=False)[1] assert sweep_range == (0., pi) assert approx_equal(im_width, 0.1 * pi / 180.) from dxtbx.model.experiment.experiment_list import ExperimentList, Experiment # Build an experiment list experiments = ExperimentList() experiments.append( Experiment(beam=beam, detector=detector, goniometer=goniometer, scan=scan, crystal=crystal, imageset=None)) # simulate some reflections refs, ref_predictor = generate_reflections(experiments) # move the detector quadrants apart by 2mm both horizontally and vertically from dials.algorithms.refinement.parameterisation \ import DetectorParameterisationHierarchical det_param = DetectorParameterisationHierarchical(detector, level=1) det_p_vals = det_param.get_param_vals() p_vals = list(det_p_vals) p_vals[1] += 2 p_vals[2] -= 2 p_vals[7] += 2 p_vals[8] += 2 p_vals[13] -= 2 p_vals[14] += 2 p_vals[19] -= 2 p_vals[20] -= 2 det_param.set_param_vals(p_vals) # reparameterise the detector at the new perturbed geometry det_param = DetectorParameterisationHierarchical(detector, level=1) # parameterise other models from dials.algorithms.refinement.parameterisation.beam_parameters import \ BeamParameterisation from dials.algorithms.refinement.parameterisation.crystal_parameters import \ CrystalOrientationParameterisation, CrystalUnitCellParameterisation beam_param = BeamParameterisation(beam, goniometer) xlo_param = CrystalOrientationParameterisation(crystal) xluc_param = CrystalUnitCellParameterisation(crystal) # fix beam beam_param.set_fixed([True] * 3) # fix crystal xluc_param.set_fixed([True] * 6) xlo_param.set_fixed([True] * 3) # parameterisation of the prediction equation from dials.algorithms.refinement.parameterisation.prediction_parameters import \ XYPhiPredictionParameterisation from dials.algorithms.refinement.parameterisation.parameter_report import \ ParameterReporter pred_param = XYPhiPredictionParameterisation(experiments, [det_param], [beam_param], [xlo_param], [xluc_param]) param_reporter = ParameterReporter([det_param], [beam_param], [xlo_param], [xluc_param]) # reflection manager and target function from dials.algorithms.refinement.target import \ LeastSquaresPositionalResidualWithRmsdCutoff from dials.algorithms.refinement.reflection_manager import ReflectionManager refman = ReflectionManager(refs, experiments, nref_per_degree=20) # set a very tight rmsd target of 1/10000 of a pixel target = LeastSquaresPositionalResidualWithRmsdCutoff( experiments, ref_predictor, refman, pred_param, restraints_parameterisation=None, frac_binsize_cutoff=0.0001) # minimisation engine from dials.algorithms.refinement.engine \ import LevenbergMarquardtIterations as Refinery refinery = Refinery(target=target, prediction_parameterisation=pred_param, log=None, verbosity=0, track_step=False, track_gradient=False, track_parameter_correlation=False, max_iterations=20) # Refiner from dials.algorithms.refinement.refiner import Refiner refiner = Refiner(reflections=refs, experiments=experiments, pred_param=pred_param, param_reporter=param_reporter, refman=refman, target=target, refinery=refinery, verbosity=0) history = refiner.run() assert history.reason_for_termination == "RMSD target achieved" #compare detector with original detector orig_det = im_set.get_detector() refined_det = refiner.get_experiments()[0].detector from scitbx import matrix import math for op, rp in zip(orig_det, refined_det): # compare the origin vectors by... o1 = matrix.col(op.get_origin()) o2 = matrix.col(rp.get_origin()) # ...their relative lengths assert approx_equal(math.fabs(o1.length() - o2.length()) / o1.length(), 0, eps=1e-5) # ...the angle between them assert approx_equal(o1.accute_angle(o2), 0, eps=1e-5) print "OK" return
class Test(object): def __init__(self, fast_pred_param=False): if fast_pred_param: self._pred_param_type = ScanVaryingPredictionParameterisationFast else: self._pred_param_type = ScanVaryingPredictionParameterisation def create_models(self, cmdline_overrides=None): if cmdline_overrides is None: cmdline_overrides = [] overrides = """geometry.parameters.crystal.a.length.range = 10 50 geometry.parameters.crystal.b.length.range = 10 50 geometry.parameters.crystal.c.length.range = 10 50""" master_phil = parse(""" include scope dials.test.algorithms.refinement.geometry_phil """, process_includes=True) # Extract models models = Extract(master_phil, overrides, cmdline_args = cmdline_overrides) self.detector = models.detector self.goniometer = models.goniometer self.crystal = models.crystal self.beam = models.beam # Make a scan of 1-360 * 0.5 deg images sf = scan_factory() self.scan = sf.make_scan((1,360), 0.5, (0, 0.5), range(360)) # Generate an ExperimentList self.experiments = ExperimentList() self.experiments.append(Experiment( beam=self.beam, detector=self.detector, goniometer=self.goniometer, scan=self.scan, crystal=self.crystal, imageset=None)) # Create a reflection predictor for the experiments self.ref_predictor = ExperimentsPredictor(self.experiments) # Create scan-varying parameterisations of these models, with 5 samples self.det_param = ScanVaryingDetectorParameterisationSinglePanel( self.detector, self.scan.get_array_range(), 5) self.s0_param = ScanVaryingBeamParameterisation( self.beam, self.scan.get_array_range(), 5, self.goniometer) self.xlo_param = ScanVaryingCrystalOrientationParameterisation( self.crystal, self.scan.get_array_range(), 5) self.xluc_param = ScanVaryingCrystalUnitCellParameterisation( self.crystal, self.scan.get_array_range(), 5) return def generate_reflections(self): sweep_range = self.scan.get_oscillation_range(deg=False) resolution = 2.0 index_generator = IndexGenerator(self.crystal.get_unit_cell(), space_group(space_group_symbols(1).hall()).type(), resolution) indices = index_generator.to_array() # Predict rays within the sweep range ray_predictor = ScansRayPredictor(self.experiments, sweep_range) obs_refs = ray_predictor(indices) # Take only those rays that intersect the detector intersects = ray_intersection(self.detector, obs_refs) obs_refs = obs_refs.select(intersects) # Re-predict using the Experiments predictor for all these reflections. The # result is the same, but we gain also the flags and xyzcal.px columns obs_refs['id'] = flex.int(len(obs_refs), 0) obs_refs = self.ref_predictor(obs_refs) # Set 'observed' centroids from the predicted ones obs_refs['xyzobs.mm.value'] = obs_refs['xyzcal.mm'] # Invent some variances for the centroid positions of the simulated data im_width = 0.1 * pi / 180. px_size = self.detector[0].get_pixel_size() var_x = flex.double(len(obs_refs), (px_size[0] / 2.)**2) var_y = flex.double(len(obs_refs), (px_size[1] / 2.)**2) var_phi = flex.double(len(obs_refs), (im_width / 2.)**2) obs_refs['xyzobs.mm.variance'] = flex.vec3_double(var_x, var_y, var_phi) # set the flex random seed to an 'uninteresting' number flex.set_random_seed(12407) # take 5 random reflections for speed reflections = obs_refs.select(flex.random_selection(len(obs_refs), 5)) # use a BlockCalculator to calculate the blocks per image from dials.algorithms.refinement.reflection_manager import BlockCalculator block_calculator = BlockCalculator(self.experiments, reflections) reflections = block_calculator.per_image() return reflections def __call__(self, cmdline_overrides): self.create_models(cmdline_overrides) reflections = self.generate_reflections() # use a ReflectionManager to exclude reflections too close to the spindle, # plus set the frame numbers from dials.algorithms.refinement.reflection_manager import ReflectionManager refman = ReflectionManager(reflections, self.experiments, outlier_detector=None) # create prediction parameterisation of the requested type pred_param = self._pred_param_type(self.experiments, [self.det_param], [self.s0_param], [self.xlo_param], [self.xluc_param]) # make a target to ensure reflections are predicted and refman is finalised from dials.algorithms.refinement.target import \ LeastSquaresPositionalResidualWithRmsdCutoff target = LeastSquaresPositionalResidualWithRmsdCutoff(self.experiments, self.ref_predictor, refman, pred_param, restraints_parameterisation=None) # keep only those reflections that pass inclusion criteria and have predictions reflections = refman.get_matches() # get analytical gradients pred_param.compose(reflections) an_grads = pred_param.get_gradients(reflections) # get finite difference gradients p_vals = pred_param.get_param_vals() p_names = pred_param.get_param_names() deltas = [1.e-7] * len(p_vals) for i in range(len(deltas)): val = p_vals[i] p_vals[i] -= deltas[i] / 2. pred_param.set_param_vals(p_vals) pred_param.compose(reflections) self.ref_predictor(reflections) rev_state = reflections['xyzcal.mm'].deep_copy() p_vals[i] += deltas[i] pred_param.set_param_vals(p_vals) pred_param.compose(reflections) self.ref_predictor(reflections) fwd_state = reflections['xyzcal.mm'].deep_copy() p_vals[i] = val fd = (fwd_state - rev_state) x_grads, y_grads, phi_grads = fd.parts() x_grads /= deltas[i] y_grads /= deltas[i] phi_grads /= deltas[i] try: for n, (a,b) in enumerate(zip(x_grads, an_grads[i]["dX_dp"])): assert approx_equal(a, b, eps=1.e-6) for n, (a,b) in enumerate(zip(y_grads, an_grads[i]["dY_dp"])): assert approx_equal(a, b, eps=1.e-6) for n, (a,b) in enumerate(zip(phi_grads, an_grads[i]["dphi_dp"])): assert approx_equal(a, b, eps=1.e-6) except AssertionError: print "Failure for {0}".format(p_names[i]) # return to the initial state pred_param.set_param_vals(p_vals) pred_param.compose(reflections) print "OK" return
class Test(object): def __init__(self): self.create_models() self.generate_reflections() def create_models(self): # build models, with a larger crystal than default in order to get plenty of # reflections on the 'still' image overrides = """ geometry.parameters.crystal.a.length.range=40 50; geometry.parameters.crystal.b.length.range=40 50; geometry.parameters.crystal.c.length.range=40 50; geometry.parameters.random_seed = 42""" master_phil = parse(""" include scope dials.test.algorithms.refinement.geometry_phil """, process_includes=True) models = Extract(master_phil, overrides) # keep track of the models self.detector = models.detector self.gonio = models.goniometer self.crystal = models.crystal self.beam = models.beam # Create a stills ExperimentList self.stills_experiments = ExperimentList() self.stills_experiments.append(Experiment(beam=self.beam, detector=self.detector, crystal=self.crystal, imageset=None)) # keep track of the parameterisation of the models self.det_param = DetectorParameterisationSinglePanel(self.detector) self.s0_param = BeamParameterisation(self.beam, self.gonio) self.xlo_param = CrystalOrientationParameterisation(self.crystal) self.xluc_param = CrystalUnitCellParameterisation(self.crystal) def generate_reflections(self): # Build a mock scan for a 3 degree sweep from dxtbx.model.scan import scan_factory sf = scan_factory() self.scan = sf.make_scan(image_range = (1,1), exposure_times = 0.1, oscillation = (0, 3.0), epochs = range(1), deg = True) sweep_range = self.scan.get_oscillation_range(deg=False) # Create a scans ExperimentList, only for generating reflections experiments = ExperimentList() experiments.append(Experiment( beam=self.beam, detector=self.detector, goniometer=self.gonio, scan=self.scan, crystal=self.crystal, imageset=None)) # Create a ScansRayPredictor ray_predictor = ScansRayPredictor(experiments, sweep_range) # Generate rays - only to work out which hkls are predicted resolution = 2.0 index_generator = IndexGenerator(self.crystal.get_unit_cell(), space_group(space_group_symbols(1).hall()).type(), resolution) indices = index_generator.to_array() rays = ray_predictor.predict(indices) # Make a standard reflection_table and copy in the ray data self.reflections = flex.reflection_table.empty_standard(len(rays)) self.reflections.update(rays) return def get_fd_gradients(self, pred_param, ref_predictor): # get finite difference gradients p_vals = pred_param.get_param_vals() deltas = [1.e-7] * len(p_vals) fd_grads = [] p_names = pred_param.get_param_names() for i in range(len(deltas)): # save parameter value val = p_vals[i] # calc reverse state p_vals[i] -= deltas[i] / 2. pred_param.set_param_vals(p_vals) ref_predictor.update() ref_predictor.predict(self.reflections) x, y, _ = self.reflections['xyzcal.mm'].deep_copy().parts() delpsi = self.reflections['delpsical.rad'].deep_copy() rev_state = flex.vec3_double(x, y, delpsi) # calc forward state p_vals[i] += deltas[i] pred_param.set_param_vals(p_vals) ref_predictor.update() ref_predictor.predict(self.reflections) x, y, _ = self.reflections['xyzcal.mm'].deep_copy().parts() delpsi = self.reflections['delpsical.rad'].deep_copy() fwd_state = flex.vec3_double(x, y, delpsi) # reset parameter to saved value p_vals[i] = val # finite difference fd = (fwd_state - rev_state) x_grads, y_grads, delpsi_grads = fd.parts() x_grads /= deltas[i] y_grads /= deltas[i] delpsi_grads /= deltas[i] fd_grads.append({'name':p_names[i], 'dX_dp':x_grads, 'dY_dp':y_grads, 'dDeltaPsi_dp':delpsi_grads}) # return to the initial state pred_param.set_param_vals(p_vals) return fd_grads def run_stills_pred_param(self, verbose = False): if verbose: print 'Testing derivatives for StillsPredictionParameterisation' print '========================================================' # Build a prediction parameterisation for the stills experiment pred_param = StillsPredictionParameterisation(self.stills_experiments, detector_parameterisations = [self.det_param], beam_parameterisations = [self.s0_param], xl_orientation_parameterisations = [self.xlo_param], xl_unit_cell_parameterisations = [self.xluc_param]) # Predict the reflections in place. Must do this ahead of calculating # the analytical gradients so quantities like s1 are correct from dials.algorithms.refinement.prediction import ExperimentsPredictor ref_predictor = ExperimentsPredictor(self.stills_experiments) ref_predictor.update() ref_predictor.predict(self.reflections) # get analytical gradients an_grads = pred_param.get_gradients(self.reflections) fd_grads = self.get_fd_gradients(pred_param, ref_predictor) for i, (an_grad, fd_grad) in enumerate(zip(an_grads, fd_grads)): # compare FD with analytical calculations if verbose: print "\nParameter {0}: {1}". format(i, fd_grad['name']) for idx, name in enumerate(["dX_dp", "dY_dp", "dDeltaPsi_dp"]): if verbose: print name a = fd_grad[name] b = an_grad[name] abs_error = a - b denom = a + b fns = five_number_summary(abs_error) if verbose: print (" summary of absolute errors: %9.6f %9.6f %9.6f " + \ "%9.6f %9.6f") % fns assert flex.max(flex.abs(abs_error)) < 0.0003 # largest absolute error found to be about 0.00025 for dY/dp of # Crystal0g_param_3. Reject outlying absolute errors and test again. iqr = fns[3] - fns[1] # skip further stats on errors with an iqr of near zero, e.g. dDeltaPsi_dp # for detector parameters, which are all equal to zero if iqr < 1.e-10: continue sel1 = abs_error < fns[3] + 1.5 * iqr sel2 = abs_error > fns[1] - 1.5 * iqr sel = sel1 & sel2 tst = flex.max_index(flex.abs(abs_error.select(sel))) tst_val = abs_error.select(sel)[tst] n_outliers = sel.count(False) if verbose: print (" {0} outliers rejected, leaving greatest " + \ "absolute error: {1:9.6f}").format(n_outliers, tst_val) # largest absolute error now 0.000086 for dX/dp of Beam0Mu2 assert abs(tst_val) < 0.00009 # Completely skip parameters with FD gradients all zero (e.g. gradients of # DeltaPsi for detector parameters) sel1 = flex.abs(a) < 1.e-10 if sel1.all_eq(True): continue # otherwise calculate normalised errors, by dividing absolute errors by # the IQR (more stable than relative error calculation) norm_error = abs_error / iqr fns = five_number_summary(norm_error) if verbose: print (" summary of normalised errors: %9.6f %9.6f %9.6f " + \ "%9.6f %9.6f") % fns # largest normalised error found to be about 25.7 for dY/dp of # Crystal0g_param_3. try: assert flex.max(flex.abs(norm_error)) < 30 except AssertionError as e: e.args += ("extreme normalised error value: {0}".format( flex.max(flex.abs(norm_error))),) raise e # Reject outlying normalised errors and test again iqr = fns[3] - fns[1] if iqr > 0.: sel1 = norm_error < fns[3] + 1.5 * iqr sel2 = norm_error > fns[1] - 1.5 * iqr sel = sel1 & sel2 tst = flex.max_index(flex.abs(norm_error.select(sel))) tst_val = norm_error.select(sel)[tst] n_outliers = sel.count(False) # most outliers found for for dY/dp of Crystal0g_param_3 (which had # largest errors, so no surprise there). try: assert n_outliers < 250 except AssertionError as e: e.args += ("too many outliers rejected: {0}".format(n_outliers),) raise e if verbose: print (" {0} outliers rejected, leaving greatest " + \ "normalised error: {1:9.6f}").format(n_outliers, tst_val) # largest normalied error now about -4. for dX/dp of Detector0Tau1 assert abs(tst_val) < 4.5 if verbose: print return def run_spherical_relp_stills_pred_param(self, verbose=True): if verbose: print 'Testing derivatives for SphericalRelpStillsPredictionParameterisation' print '=====================================================================' # Build a prediction parameterisation for the stills experiment pred_param = SphericalRelpStillsPredictionParameterisation( self.stills_experiments, detector_parameterisations = [self.det_param], beam_parameterisations = [self.s0_param], xl_orientation_parameterisations = [self.xlo_param], xl_unit_cell_parameterisations = [self.xluc_param]) # Predict the reflections in place. Must do this ahead of calculating # the analytical gradients so quantities like s1 are correct from dials.algorithms.refinement.prediction import ExperimentsPredictor ref_predictor = ExperimentsPredictor(self.stills_experiments, spherical_relp=True) ref_predictor.update() ref_predictor.predict(self.reflections) # get analytical gradients an_grads = pred_param.get_gradients(self.reflections) fd_grads = self.get_fd_gradients(pred_param, ref_predictor) # compare FD with analytical calculations for i, (an_grad, fd_grad) in enumerate(zip(an_grads, fd_grads)): if verbose: print "\nParameter {0}: {1}". format(i, fd_grad['name']) for idx, name in enumerate(["dX_dp", "dY_dp", "dDeltaPsi_dp"]): if verbose: print name for a, b in zip(an_grad[name], fd_grad[name]): if name == 'dDeltaPsi_dp': # DeltaPsi errors are much worse than X, Y errors! # FIXME, look into this further assert approx_equal(a,b, eps=5e-3) else: assert approx_equal(a,b, eps=5e-6) if verbose: print "OK" if verbose: print
######################################################################## # Build a mock scan for a 180 degree sweep sf = scan_factory() myscan = sf.make_scan(image_range=(1, 1800), exposure_times=0.1, oscillation=(0, 0.1), epochs=range(1800), deg=True) # Build an ExperimentList experiments = ExperimentList() experiments.append( Experiment(beam=mybeam, detector=mydetector, goniometer=mygonio, scan=myscan, crystal=mycrystal, imageset=None)) # Create the PredictionParameterisation pred_param = XYPhiPredictionParameterisation(experiments, [det_param], [s0_param], [xlo_param], [xluc_param]) ################################ # Apply known parameter shifts # ################################ # shift detector by 1.0 mm each translation and 4 mrad each rotation det_p_vals = det_param.get_param_vals()
def run(self): '''Execute the script.''' from dials.util.options import flatten_experiments from libtbx.utils import Sorry # Parse the command line params, options = self.parser.parse_args(show_diff_phil=True) # Try to load the models and data if len(params.input.experiments) == 0: print "No Experiments found in the input" self.parser.print_help() return if len(params.input.reflections) == 0: print "No reflection data found in the input" self.parser.print_help() return try: assert len(params.input.reflections) == len(params.input.experiments) except AssertionError: raise Sorry("The number of input reflections files does not match the " "number of input experiments") flat_exps = flatten_experiments(params.input.experiments) ref_beam = params.reference_from_experiment.beam ref_goniometer = params.reference_from_experiment.goniometer ref_scan = params.reference_from_experiment.scan ref_crystal = params.reference_from_experiment.crystal ref_detector = params.reference_from_experiment.detector if ref_beam is not None: try: ref_beam = flat_exps[ref_beam].beam except IndexError: raise Sorry("{0} is not a valid experiment ID".format(ref_beam)) if ref_goniometer is not None: try: ref_goniometer = flat_exps[ref_goniometer].goniometer except IndexError: raise Sorry("{0} is not a valid experiment ID".format(ref_goniometer)) if ref_scan is not None: try: ref_scan = flat_exps[ref_scan].scan except IndexError: raise Sorry("{0} is not a valid experiment ID".format(ref_scan)) if ref_crystal is not None: try: ref_crystal = flat_exps[ref_crystal].crystal except IndexError: raise Sorry("{0} is not a valid experiment ID".format(ref_crystal)) if ref_detector is not None: assert not params.reference_from_experiment.average_detector try: ref_detector = flat_exps[ref_detector].detector except IndexError: raise Sorry("{0} is not a valid experiment ID".format(ref_detector)) elif params.reference_from_experiment.average_detector: # Average all of the detectors together from scitbx.matrix import col def average_detectors(target, panelgroups, depth): # Recursive function to do the averaging if params.reference_from_experiment.average_hierarchy_level is None or \ depth == params.reference_from_experiment.average_hierarchy_level: n = len(panelgroups) sum_fast = col((0.0,0.0,0.0)) sum_slow = col((0.0,0.0,0.0)) sum_ori = col((0.0,0.0,0.0)) # Average the d matrix vectors for pg in panelgroups: sum_fast += col(pg.get_local_fast_axis()) sum_slow += col(pg.get_local_slow_axis()) sum_ori += col(pg.get_local_origin()) sum_fast /= n sum_slow /= n sum_ori /= n # Re-orthagonalize the slow and the fast vectors by rotating around the cross product c = sum_fast.cross(sum_slow) a = sum_fast.angle(sum_slow, deg=True)/2 sum_fast = sum_fast.rotate(c, a-45, deg=True) sum_slow = sum_slow.rotate(c, -(a-45), deg=True) target.set_local_frame(sum_fast,sum_slow,sum_ori) if target.is_group(): # Recurse for i, target_pg in enumerate(target): average_detectors(target_pg, [pg[i] for pg in panelgroups], depth+1) ref_detector = flat_exps[0].detector average_detectors(ref_detector.hierarchy(), [e.detector.hierarchy() for e in flat_exps], 0) combine = CombineWithReference(beam=ref_beam, goniometer=ref_goniometer, scan=ref_scan, crystal=ref_crystal, detector=ref_detector, params=params) # set up global experiments and reflections lists from dials.array_family import flex reflections = flex.reflection_table() global_id = 0 from dxtbx.model.experiment.experiment_list import ExperimentList experiments=ExperimentList() # loop through the input, building up the global lists nrefs_per_exp = [] for ref_wrapper, exp_wrapper in zip(params.input.reflections, params.input.experiments): refs = ref_wrapper.data exps = exp_wrapper.data for i, exp in enumerate(exps): sel = refs['id'] == i sub_ref = refs.select(sel) nrefs_per_exp.append(len(sub_ref)) sub_ref['id'] = flex.int(len(sub_ref), global_id) reflections.extend(sub_ref) experiments.append(combine(exp)) global_id += 1 # print number of reflections per experiment from libtbx.table_utils import simple_table header = ["Experiment", "Nref"] rows = [(str(i), str(n)) for (i, n) in enumerate(nrefs_per_exp)] st = simple_table(rows, header) print st.format() # save a random subset if requested if params.output.n_subset is not None and len(experiments) > params.output.n_subset: import random subset_exp = ExperimentList() subset_refls = flex.reflection_table() n_picked = 0 indices = range(len(experiments)) while n_picked < params.output.n_subset: idx = indices.pop(random.randint(0, len(indices)-1)) subset_exp.append(experiments[idx]) refls = reflections.select(reflections['id'] == idx) refls['id'] = flex.int(len(refls), n_picked) subset_refls.extend(refls) n_picked += 1 experiments = subset_exp reflections = subset_refls # save output from dxtbx.model.experiment.experiment_list import ExperimentListDumper print 'Saving combined experiments to {0}'.format( params.output.experiments_filename) dump = ExperimentListDumper(experiments) dump.as_json(params.output.experiments_filename) print 'Saving combined reflections to {0}'.format( params.output.reflections_filename) reflections.as_pickle(params.output.reflections_filename) return
def run(self): '''Execute the script.''' from dials.util.options import flatten_experiments from libtbx.utils import Sorry # Parse the command line params, options = self.parser.parse_args(show_diff_phil=True) # Try to load the models and data if len(params.input.experiments) == 0: print "No Experiments found in the input" self.parser.print_help() return if len(params.input.reflections) == 0: print "No reflection data found in the input" self.parser.print_help() return try: assert len(params.input.reflections) == len( params.input.experiments) except AssertionError: raise Sorry( "The number of input reflections files does not match the " "number of input experiments") flat_exps = flatten_experiments(params.input.experiments) ref_beam = params.reference_from_experiment.beam ref_goniometer = params.reference_from_experiment.goniometer ref_scan = params.reference_from_experiment.scan ref_crystal = params.reference_from_experiment.crystal ref_detector = params.reference_from_experiment.detector if ref_beam is not None: try: ref_beam = flat_exps[ref_beam].beam except IndexError: raise Sorry( "{0} is not a valid experiment ID".format(ref_beam)) if ref_goniometer is not None: try: ref_goniometer = flat_exps[ref_goniometer].goniometer except IndexError: raise Sorry( "{0} is not a valid experiment ID".format(ref_goniometer)) if ref_scan is not None: try: ref_scan = flat_exps[ref_scan].scan except IndexError: raise Sorry( "{0} is not a valid experiment ID".format(ref_scan)) if ref_crystal is not None: try: ref_crystal = flat_exps[ref_crystal].crystal except IndexError: raise Sorry( "{0} is not a valid experiment ID".format(ref_crystal)) if ref_detector is not None: assert not params.reference_from_experiment.average_detector try: ref_detector = flat_exps[ref_detector].detector except IndexError: raise Sorry( "{0} is not a valid experiment ID".format(ref_detector)) elif params.reference_from_experiment.average_detector: # Average all of the detectors together from scitbx.matrix import col def average_detectors(target, panelgroups, depth): # Recursive function to do the averaging if params.reference_from_experiment.average_hierarchy_level is None or \ depth == params.reference_from_experiment.average_hierarchy_level: n = len(panelgroups) sum_fast = col((0.0, 0.0, 0.0)) sum_slow = col((0.0, 0.0, 0.0)) sum_ori = col((0.0, 0.0, 0.0)) # Average the d matrix vectors for pg in panelgroups: sum_fast += col(pg.get_local_fast_axis()) sum_slow += col(pg.get_local_slow_axis()) sum_ori += col(pg.get_local_origin()) sum_fast /= n sum_slow /= n sum_ori /= n # Re-orthagonalize the slow and the fast vectors by rotating around the cross product c = sum_fast.cross(sum_slow) a = sum_fast.angle(sum_slow, deg=True) / 2 sum_fast = sum_fast.rotate(c, a - 45, deg=True) sum_slow = sum_slow.rotate(c, -(a - 45), deg=True) target.set_local_frame(sum_fast, sum_slow, sum_ori) if target.is_group(): # Recurse for i, target_pg in enumerate(target): average_detectors(target_pg, [pg[i] for pg in panelgroups], depth + 1) ref_detector = flat_exps[0].detector average_detectors(ref_detector.hierarchy(), [e.detector.hierarchy() for e in flat_exps], 0) combine = CombineWithReference(beam=ref_beam, goniometer=ref_goniometer, scan=ref_scan, crystal=ref_crystal, detector=ref_detector, params=params) # set up global experiments and reflections lists from dials.array_family import flex reflections = flex.reflection_table() global_id = 0 from dxtbx.model.experiment.experiment_list import ExperimentList experiments = ExperimentList() # loop through the input, building up the global lists nrefs_per_exp = [] for ref_wrapper, exp_wrapper in zip(params.input.reflections, params.input.experiments): refs = ref_wrapper.data exps = exp_wrapper.data for i, exp in enumerate(exps): sel = refs['id'] == i sub_ref = refs.select(sel) nrefs_per_exp.append(len(sub_ref)) sub_ref['id'] = flex.int(len(sub_ref), global_id) reflections.extend(sub_ref) experiments.append(combine(exp)) global_id += 1 # print number of reflections per experiment from libtbx.table_utils import simple_table header = ["Experiment", "Nref"] rows = [(str(i), str(n)) for (i, n) in enumerate(nrefs_per_exp)] st = simple_table(rows, header) print st.format() # save output from dxtbx.model.experiment.experiment_list import ExperimentListDumper print 'Saving combined experiments to {0}'.format( params.output.experiments_filename) dump = ExperimentListDumper(experiments) dump.as_json(params.output.experiments_filename) print 'Saving combined reflections to {0}'.format( params.output.reflections_filename) reflections.as_pickle(params.output.reflections_filename) return
def __init__(self,Ibase,Gbase,I_visited,G_visited,FSIM,**kwargs): g_counter=0; forward_map_G=flex.size_t(len(G_visited)); backward_map_G=flex.size_t() for s in xrange(len(G_visited)): #print s, G_visited[s], c[len_I + s], c[len_I + len(Gbase) + s] if G_visited[s]: forward_map_G[s] = g_counter backward_map_G.append(s) g_counter+=1 subsetGbase = Gbase.select(backward_map_G) remapped_frame = forward_map_G.select(FSIM.frame) i_counter=0; forward_map_I=flex.size_t(len(I_visited)); backward_map_I=flex.size_t() for s in xrange(len(I_visited)): #print s,I_visited[s], c[s] if I_visited[s]: forward_map_I[s] = i_counter backward_map_I.append(s) i_counter+=1 subsetIbase = Ibase.select(backward_map_I) remapped_miller = forward_map_I.select(FSIM.miller) from cctbx.examples.merging import intensity_data remapped_FSIM = intensity_data() remapped_FSIM.raw_obs = FSIM.raw_obs remapped_FSIM.exp_var = FSIM.exp_var remapped_FSIM.stol_sq = FSIM.stol_sq remapped_FSIM.origHKL = FSIM.origHKL remapped_FSIM.frame = remapped_frame remapped_FSIM.miller = remapped_miller if kwargs.has_key('experiments'): # XXX seems like we need to implement a proper select statement for ExperimentList # kwargs["experiments"] = kwargs["experiments"].select(G_visited==1) from dxtbx.model.experiment.experiment_list import ExperimentList new_experiments = ExperimentList() for idx in xrange(len(G_visited)): if G_visited[idx]==1: new_experiments.append(kwargs["experiments"][idx]) kwargs["experiments"] = new_experiments base_class.__init__(self,subsetIbase,subsetGbase,remapped_FSIM,**kwargs) fitted = self.unpack() fitted_stddev = self.unpack_stddev() def help_expand_data(data): result = {} for key in data.keys(): if key=="I": ex = flex.double(len(Ibase)) for s in xrange(len(I_visited)): if I_visited[s]: ex[s] = data[key][forward_map_I[s]] result[key]=ex elif key in ["G", "B", "D", "Ax", "Ay"]: ex = flex.double(len(Gbase)) for s in xrange(len(G_visited)): if G_visited[s]: ex[s] = data[key][forward_map_G[s]] result[key]=ex return result self.expanded = help_expand_data(fitted) self.expanded_stddev = help_expand_data(fitted_stddev) print "DONE UNMAPPING HERE"
def run(verbose = False): # Build models, with a larger crystal than default in order to get plenty of # reflections on the 'still' image overrides = """ geometry.parameters.crystal.a.length.range=40 50; geometry.parameters.crystal.b.length.range=40 50; geometry.parameters.crystal.c.length.range=40 50; geometry.parameters.random_seed = 42""" master_phil = parse(""" include scope dials.test.algorithms.refinement.geometry_phil """, process_includes=True) models = Extract(master_phil, overrides) mydetector = models.detector mygonio = models.goniometer mycrystal = models.crystal mybeam = models.beam # Build a mock scan for a 3 degree sweep from dxtbx.model.scan import scan_factory sf = scan_factory() myscan = sf.make_scan(image_range = (1,1), exposure_times = 0.1, oscillation = (0, 3.0), epochs = range(1), deg = True) sweep_range = myscan.get_oscillation_range(deg=False) # Create parameterisations of these models det_param = DetectorParameterisationSinglePanel(mydetector) s0_param = BeamParameterisation(mybeam, mygonio) xlo_param = CrystalOrientationParameterisation(mycrystal) xluc_param = CrystalUnitCellParameterisation(mycrystal) # Create a scans ExperimentList, only for generating reflections experiments = ExperimentList() experiments.append(Experiment( beam=mybeam, detector=mydetector, goniometer=mygonio, scan=myscan, crystal=mycrystal, imageset=None)) # Create a stills ExperimentList stills_experiments = ExperimentList() stills_experiments.append(Experiment( beam=mybeam, detector=mydetector, crystal=mycrystal, imageset=None)) # Generate rays - only to work out which hkls are predicted ray_predictor = ScansRayPredictor(experiments, sweep_range) resolution = 2.0 index_generator = IndexGenerator(mycrystal.get_unit_cell(), space_group(space_group_symbols(1).hall()).type(), resolution) indices = index_generator.to_array() rays = ray_predictor.predict(indices) # Make a standard reflection_table and copy in the ray data reflections = flex.reflection_table.empty_standard(len(rays)) reflections.update(rays) # Build a standard prediction parameterisation for the stills experiment to do # FD calculation (not used for its analytical gradients) pred_param = StillsPredictionParameterisation(stills_experiments, detector_parameterisations = [det_param], beam_parameterisations = [s0_param], xl_orientation_parameterisations = [xlo_param], xl_unit_cell_parameterisations = [xluc_param]) # Make a managed SphericalRelpStillsReflectionPredictor reflection predictor # for the first (only) experiment ref_predictor = Predictor(stills_experiments) # Predict these reflections in place. Must do this ahead of calculating # the analytical gradients so quantities like s1 are correct ref_predictor.update() ref_predictor.predict(reflections) # calculate analytical gradients ag = AnalyticalGradients(stills_experiments, detector_parameterisation=det_param, beam_parameterisation=s0_param, xl_orientation_parameterisation=xlo_param, xl_unit_cell_parameterisation=xluc_param) an_grads = ag.get_beam_gradients(reflections) an_grads.update(ag.get_crystal_orientation_gradients(reflections)) an_grads.update(ag.get_crystal_unit_cell_gradients(reflections)) # get finite difference gradients p_vals = pred_param.get_param_vals() deltas = [1.e-7] * len(p_vals) fd_grads = [] p_names = pred_param.get_param_names() for i in range(len(deltas)): # save parameter value val = p_vals[i] # calc reverse state p_vals[i] -= deltas[i] / 2. pred_param.set_param_vals(p_vals) ref_predictor.update() ref_predictor.predict(reflections) x, y, _ = reflections['xyzcal.mm'].deep_copy().parts() delpsi = reflections['delpsical.rad'].deep_copy() s1 = reflections['s1'].deep_copy() rev_state = s1 # calc forward state p_vals[i] += deltas[i] pred_param.set_param_vals(p_vals) ref_predictor.update() ref_predictor.predict(reflections) x, y, _ = reflections['xyzcal.mm'].deep_copy().parts() delpsi = reflections['delpsical.rad'].deep_copy() s1 = reflections['s1'].deep_copy() fwd_state = s1 # reset parameter to saved value p_vals[i] = val # finite difference - currently for s1 only fd = (fwd_state - rev_state) inv_delta = 1. / deltas[i] s1_grads = fd * inv_delta # store gradients fd_grads.append({'name':p_names[i], 'ds1':s1_grads}) # return to the initial state pred_param.set_param_vals(p_vals) for i, fd_grad in enumerate(fd_grads): ## compare FD with analytical calculations if verbose: print "\n\nParameter {0}: {1}". format(i, fd_grad['name']) print "d[s1]/dp for the first reflection" print 'finite diff', fd_grad['ds1'][0] try: an_grad = an_grads[fd_grad['name']] except KeyError: continue print 'checking analytical vs finite difference gradients for s1' for a, b in zip(fd_grad['ds1'], an_grad['ds1']): assert approx_equal(a, b) print 'OK'
sf = scan_factory() myscan = sf.make_scan(image_range = (1,1800), exposure_times = 0.1, oscillation = (0, 0.1), epochs = range(1800), deg = True) sweep_range = myscan.get_oscillation_range(deg=False) temp = myscan.get_oscillation(deg=False) im_width = temp[1] - temp[0] assert sweep_range == (0., pi) assert approx_equal(im_width, 0.1 * pi / 180.) # Build an experiment list experiments = ExperimentList() experiments.append(Experiment( beam=mybeam, detector=mydetector, goniometer=mygonio, scan=myscan, crystal=mycrystal, imageset=None)) ########################### # Parameterise the models # ########################### det_param = DetectorParameterisationSinglePanel(mydetector) s0_param = BeamParameterisation(mybeam, mygonio) xlo_param = CrystalOrientationParameterisation(mycrystal) xluc_param = CrystalUnitCellParameterisation(mycrystal) # TEMPORARY TESTING HERE from dials.algorithms.refinement.restraints.restraints import SingleUnitCellTie uct = SingleUnitCellTie(xluc_param, [None]*6, [None]*6)
def test_refinement(): '''Test a refinement run''' dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) # Get a beam and detector from a datablock. This one has a CS-PAD, but that # is irrelevant data_dir = os.path.join(dials_regression, "refinement_test_data", "hierarchy_test") datablock_path = os.path.join(data_dir, "datablock.json") assert os.path.exists(datablock_path) # load models from dxtbx.datablock import DataBlockFactory datablock = DataBlockFactory.from_serialized_format(datablock_path, check_format=False) im_set = datablock[0].extract_imagesets()[0] from copy import deepcopy detector = deepcopy(im_set.get_detector()) beam = im_set.get_beam() # Invent a crystal, goniometer and scan for this test from dxtbx.model.crystal import crystal_model crystal = crystal_model((40.,0.,0.) ,(0.,40.,0.), (0.,0.,40.), space_group_symbol = "P1") orig_xl = deepcopy(crystal) from dxtbx.model.experiment import goniometer_factory goniometer = goniometer_factory.known_axis((1., 0., 0.)) # Build a mock scan for a 180 degree sweep from dxtbx.model.scan import scan_factory sf = scan_factory() scan = sf.make_scan(image_range = (1,1800), exposure_times = 0.1, oscillation = (0, 0.1), epochs = range(1800), deg = True) sweep_range = scan.get_oscillation_range(deg=False) im_width = scan.get_oscillation(deg=False)[1] assert sweep_range == (0., pi) assert approx_equal(im_width, 0.1 * pi / 180.) from dxtbx.model.experiment.experiment_list import ExperimentList, Experiment # Build an experiment list experiments = ExperimentList() experiments.append(Experiment( beam=beam, detector=detector, goniometer=goniometer, scan=scan, crystal=crystal, imageset=None)) # simulate some reflections refs, _ = generate_reflections(experiments) # change unit cell a bit (=0.1 Angstrom length upsets, 0.1 degree of # alpha and beta angles) from dials.algorithms.refinement.parameterisation.crystal_parameters import \ CrystalUnitCellParameterisation xluc_param = CrystalUnitCellParameterisation(crystal) xluc_p_vals = xluc_param.get_param_vals() cell_params = crystal.get_unit_cell().parameters() cell_params = [a + b for a, b in zip(cell_params, [0.1, -0.1, 0.1, 0.1, -0.1, 0.0])] from cctbx.uctbx import unit_cell from rstbx.symmetry.constraints.parameter_reduction import \ symmetrize_reduce_enlarge from scitbx import matrix new_uc = unit_cell(cell_params) newB = matrix.sqr(new_uc.fractionalization_matrix()).transpose() S = symmetrize_reduce_enlarge(crystal.get_space_group()) S.set_orientation(orientation=newB) X = tuple([e * 1.e5 for e in S.forward_independent_parameters()]) xluc_param.set_param_vals(X) # reparameterise the crystal at the perturbed geometry xluc_param = CrystalUnitCellParameterisation(crystal) # Dummy parameterisations for other models beam_param = None xlo_param = None det_param = None # parameterisation of the prediction equation from dials.algorithms.refinement.parameterisation.parameter_report import \ ParameterReporter pred_param = TwoThetaPredictionParameterisation(experiments, det_param, beam_param, xlo_param, [xluc_param]) param_reporter = ParameterReporter(det_param, beam_param, xlo_param, [xluc_param]) # reflection manager refman = TwoThetaReflectionManager(refs, experiments, nref_per_degree=20, verbosity=2) # reflection predictor ref_predictor = TwoThetaExperimentsPredictor(experiments) # target function target = TwoThetaTarget(experiments, ref_predictor, refman, pred_param) # minimisation engine from dials.algorithms.refinement.engine \ import LevenbergMarquardtIterations as Refinery refinery = Refinery(target = target, prediction_parameterisation = pred_param, log = None, verbosity = 0, track_step = False, track_gradient = False, track_parameter_correlation = False, max_iterations = 20) # Refiner from dials.algorithms.refinement.refiner import Refiner refiner = Refiner(reflections=refs, experiments=experiments, pred_param=pred_param, param_reporter=param_reporter, refman=refman, target=target, refinery=refinery, verbosity=1) history = refiner.run() # compare crystal with original crystal refined_xl = refiner.get_experiments()[0].crystal #print refined_xl assert refined_xl.is_similar_to(orig_xl, uc_rel_length_tolerance=0.001, uc_abs_angle_tolerance=0.01) #print "Unit cell esds:" #print refined_xl.get_cell_parameter_sd() return
def __call__(self, experiments, reflections): comm = MPI.COMM_WORLD rank = comm.Get_rank() params = self.working_phil.extract() if rank == 0: data = [] size =comm.Get_size() chunk_size = len(experiments) // size remainder = len(experiments) % size pointer = 0 self.working_phil.show() for i in xrange(size): if i < remainder: sel_range = xrange(pointer,pointer+chunk_size+1) else: sel_range = xrange(pointer,pointer+chunk_size) sel = flex.bool(len(reflections)) for exp_id in sel_range: sel |= reflections['id'] == exp_id if i < remainder: data.append((range(pointer,pointer+chunk_size+1),experiments[pointer:pointer+chunk_size+1],reflections.select(sel))) pointer += 1 else: data.append((range(pointer,pointer+chunk_size),experiments[pointer:pointer+chunk_size],reflections.select(sel))) pointer += chunk_size else: data = None data = comm.scatter(data, root=0) for i, (iexp, exp) in enumerate(zip(data[0],data[1])): print "Refining crystal", iexp # reflection subset for a single experiment refs = data[2].select(data[2]['id'] == iexp) refs['id'] = flex.size_t(len(refs),0) # experiment list for a single experiment exps=ExperimentList() exps.append(exp) refiner = RefinerFactory.from_parameters_data_experiments( params, refs, exps, verbosity=1) # do refinement refiner.run() refined_exps = refiner.get_experiments() # replace this experiment with the refined one data[1][i] = refined_exps[0] data = comm.gather(data, root=0) if rank == 0: for chunk in data: for iexp, experiment in zip(chunk[0], chunk[1]): experiments[iexp] = experiment return experiments else: assert data == None
def load(entry, exp_index): from dxtbx.model.experiment.experiment_list import ExperimentList from dxtbx.model.experiment.experiment_list import Experiment print "Loading NXmx" # Check file contains the feature assert("features" in entry) assert(6 in entry['features'].value) experiment_list = ExperimentList() # Find all the experiments entries = find_nx_mx_entries(entry, ".") if len(entries) > 1: entries = sorted(entries, key=lambda x: x['dials/index'].value) assert(len(entries) == len(exp_index)) for nxmx, name in zip(entries, exp_index): assert(nxmx.name == name) index = [] rotations = [] for name in exp_index: # Get the entry nxmx = entry.file[name] # Get the definition definition = nxmx['definition'] assert(definition.value == 'NXmx') assert(definition.attrs['version'] == 1) # Get dials specific stuff nx_dials = get_nx_dials(nxmx, "dials") # Set index b = nx_dials['index'].attrs['source'] d = nx_dials['index'].attrs['detector'] if "goniometer" in nx_dials['index'].attrs: g = nx_dials['index'].attrs['goniometer'] else: g = None if "scan" in nx_dials['index'].attrs: s = nx_dials['index'].attrs['scan'] else: s = None c = nx_dials['index'].attrs['sample'] index.append((b, d, g, s, c)) # Get the original orientation (dials specific) transformations = get_nx_transformations(nx_dials, "transformations") angle = transformations['angle'].value assert(transformations['angle'].attrs['transformation_type'] == 'rotation') axis = transformations['angle'].attrs['vector'] assert(tuple(transformations['angle'].attrs['offset']) == (0, 0, 0)) assert(transformations['angle'].attrs['offset_units'] == 'mm') assert(transformations['angle'].attrs['depends_on'] == '.') rotations.append((axis, angle)) # Get the tmeplate and imageset try: template = list(nx_dials['template']) image_range = None except Exception: template = nx_dials['template'].value if template == "": template = None if "range" in nx_dials['template'].attrs: image_range = nx_dials['template'].attrs['range'] else: image_range = None # Create the experiment experiment = Experiment() # Read the models experiment.beam = load_beam(nxmx) experiment.detector = load_detector(nxmx) experiment.goniometer = load_goniometer(nxmx) experiment.scan = load_scan(nxmx) experiment.crystal = load_crystal(nxmx) # Set the image range if image_range is not None and experiment.scan is not None: num = image_range[1] - image_range[0] + 1 assert(num == len(experiment.scan)) experiment.scan.set_image_range(image_range) # Return the experiment list experiment_list.append(experiment) # Convert from nexus beam direction experiment_list = convert_from_nexus_beam_direction(experiment_list,rotations) from collections import defaultdict beam = defaultdict(list) detector = defaultdict(list) goniometer = defaultdict(list) scan = defaultdict(list) crystal = defaultdict(list) for i, ind in enumerate(index): beam[ind[0]].append(i) detector[ind[1]].append(i) goniometer[ind[2]].append(i) scan[ind[3]].append(i) crystal[ind[4]].append(i) # Set all the shared beams for key, value in beam.iteritems(): b1 = experiment_list[value[0]].beam assert(all(experiment_list[v].beam == b1 for v in value[1:])) for v in value[1:]: experiment_list[v].beam = b1 # Set all the shared detectors for key, value in detector.iteritems(): d1 = experiment_list[value[0]].detector assert(all(experiment_list[v].detector == d1 for v in value[1:])) for v in value[1:]: experiment_list[v].detector = d1 # Set all the shared goniometer for key, value in goniometer.iteritems(): g1 = experiment_list[value[0]].goniometer assert(all(experiment_list[v].goniometer == g1 for v in value[1:])) for v in value[1:]: experiment_list[v].goniometer = g1 # Set all the shared scans for key, value in scan.iteritems(): s1 = experiment_list[value[0]].scan assert(all(experiment_list[v].scan == s1 for v in value[1:])) for v in value[1:]: experiment_list[v].scan = s1 # Set all the shared crystals for key, value in crystal.iteritems(): c1 = experiment_list[value[0]].crystal assert(all(experiment_list[v].crystal == c1 for v in value[1:])) for v in value[1:]: experiment_list[v].crystal = c1 return experiment_list
def find_lattices(self): if self.params.fft3d.reciprocal_space_grid.d_min is libtbx.Auto: # rough calculation of suitable d_min based on max cell # see also Campbell, J. (1998). J. Appl. Cryst., 31(3), 407-413. # fft_cell should be greater than twice max_cell, so say: # fft_cell = 2.5 * max_cell # then: # fft_cell = n_points * d_min/2 # 2.5 * max_cell = n_points * d_min/2 # a little bit of rearrangement: # d_min = 5 * max_cell/n_points max_cell = self.params.max_cell d_min = ( 5 * max_cell / self.params.fft3d.reciprocal_space_grid.n_points) d_spacings = 1/self.reflections['rlp'].norms() self.params.fft3d.reciprocal_space_grid.d_min = max( d_min, min(d_spacings)) logger.info("Setting d_min: %.2f" %self.params.fft3d.reciprocal_space_grid.d_min) n_points = self.params.fft3d.reciprocal_space_grid.n_points self.gridding = fftpack.adjust_gridding_triple( (n_points,n_points,n_points), max_prime=5) n_points = self.gridding[0] self.map_centroids_to_reciprocal_space_grid() self.d_min = self.params.fft3d.reciprocal_space_grid.d_min logger.info("Number of centroids used: %i" %( (self.reciprocal_space_grid>0).count(True))) self.fft() if self.params.debug: self.debug_write_ccp4_map(map_data=self.grid_real, file_name="fft3d.map") if self.params.fft3d.peak_search == 'flood_fill': self.find_peaks() elif self.params.fft3d.peak_search == 'clean': self.find_peaks_clean() if self.params.multiple_lattice_search.cluster_analysis_search: self.find_basis_vector_combinations_cluster_analysis() self.debug_show_candidate_basis_vectors() if self.params.debug_plots: self.debug_plot_candidate_basis_vectors() crystal_models = self.candidate_crystal_models if self.params.multiple_lattice_search.max_lattices is not None: crystal_models = \ crystal_models[:self.params.multiple_lattice_search.max_lattices] else: self.find_candidate_basis_vectors() self.debug_show_candidate_basis_vectors() if self.params.debug_plots: self.debug_plot_candidate_basis_vectors() self.candidate_crystal_models = self.find_candidate_orientation_matrices( self.candidate_basis_vectors, max_combinations=self.params.basis_vector_combinations.max_try) crystal_model, n_indexed = self.choose_best_orientation_matrix( self.candidate_crystal_models) if crystal_model is not None: crystal_models = [crystal_model] else: crystal_models = [] experiments = ExperimentList() for cm in crystal_models: for imageset in self.imagesets: experiments.append(Experiment(imageset=imageset, beam=imageset.get_beam(), detector=imageset.get_detector(), goniometer=imageset.get_goniometer(), scan=imageset.get_scan(), crystal=cm)) return experiments
print(input.experiments, input.reflections) print(len(working_params.input), "datasets specified as input") e = enumerate(working_params.input) i, line = next(e) reflections, exp = load_input(line.experiments, line.reflections) assert reflections["id"].all_eq(0) from dials.algorithms.indexing.indexer import Indexer reflections = Indexer.map_spots_pixel_to_mm_rad( reflections, exp.detector, exp.scan) experiment_from_crystal = ExperimentFromCrystal(exp.beam, exp.detector) experiments = ExperimentList() experiments.append(experiment_from_crystal(exp.crystal)) for i, line in e: refs, exp = load_input(line.experiments, line.reflections) print(i, line.reflections, len(refs)) refs["id"] = flex.size_t(len(refs), i) refs = Indexer.map_spots_pixel_to_mm_rad(refs, exp.detector, exp.scan) reflections.extend(refs) experiments.append(experiment_from_crystal(exp.crystal)) dr = DetectorRefiner() else: experiments = None reflections = None dr = None
class read_experiments(object): def __init__(self,params): import cPickle as pickle from dxtbx.model.beam import beam_factory from dxtbx.model.detector import detector_factory from dxtbx.model.crystal import crystal_model from cctbx.crystal_orientation import crystal_orientation,basis_type from dxtbx.model.experiment.experiment_list import Experiment, ExperimentList from scitbx import matrix self.experiments = ExperimentList() self.unique_file_names = [] self.params = params data = pickle.load(open(self.params.output.prefix+"_frame.pickle","rb")) frames_text = data.split("\n") for item in frames_text: tokens = item.split(' ') wavelength = float(tokens[order_dict["wavelength"]]) beam = beam_factory.simple(wavelength = wavelength) detector = detector_factory.simple( sensor = detector_factory.sensor("PAD"), # XXX shouldn't hard code for XFEL distance = float(tokens[order_dict["distance"]]), beam_centre = [float(tokens[order_dict["beam_x"]]), float(tokens[order_dict["beam_y"]])], fast_direction = "+x", slow_direction = "+y", pixel_size = [self.params.pixel_size,self.params.pixel_size], image_size = [1795,1795], # XXX obviously need to figure this out ) reciprocal_matrix = matrix.sqr([float(tokens[order_dict[k]]) for k in [ 'res_ori_1','res_ori_2','res_ori_3','res_ori_4','res_ori_5','res_ori_6','res_ori_7','res_ori_8','res_ori_9']]) ORI = crystal_orientation(reciprocal_matrix, basis_type.reciprocal) direct = matrix.sqr(ORI.direct_matrix()) crystal = crystal_model( real_space_a = matrix.row(direct[0:3]), real_space_b = matrix.row(direct[3:6]), real_space_c = matrix.row(direct[6:9]), space_group_symbol = "P63", # XXX obviously another gap in the database paradigm mosaicity = float(tokens[order_dict["half_mosaicity_deg"]]), ) crystal.domain_size = float(tokens[order_dict["domain_size_ang"]]) #if isoform is not None: # newB = matrix.sqr(isoform.fractionalization_matrix()).transpose() # crystal.set_B(newB) self.experiments.append(Experiment(beam=beam, detector=None, #dummy for now crystal=crystal)) self.unique_file_names.append(tokens[order_dict["unique_file_name"]]) self.show_summary() def get_experiments(self): return self.experiments def get_files(self): return self.unique_file_names def show_summary(self): w = flex.double([e.beam.get_wavelength() for e in self.experiments]) stats=flex.mean_and_variance(w) print "Wavelength mean and standard deviation:",stats.mean(),stats.unweighted_sample_standard_deviation() uc = [e.crystal.get_unit_cell().parameters() for e in self.experiments] a = flex.double([u[0] for u in uc]) stats=flex.mean_and_variance(a) print "Unit cell a mean and standard deviation:",stats.mean(),stats.unweighted_sample_standard_deviation() b = flex.double([u[1] for u in uc]) stats=flex.mean_and_variance(b) print "Unit cell b mean and standard deviation:",stats.mean(),stats.unweighted_sample_standard_deviation() c = flex.double([u[2] for u in uc]) stats=flex.mean_and_variance(c) print "Unit cell c mean and standard deviation:",stats.mean(),stats.unweighted_sample_standard_deviation() d = flex.double([e.crystal.domain_size for e in self.experiments]) stats=flex.mean_and_variance(d) # NOTE XXX FIXME: cxi.index seems to record the half-domain size; report here the full domain size print "Domain size mean and standard deviation:",2.*stats.mean(),2.*stats.unweighted_sample_standard_deviation()
def __init__(self, Ibase, Gbase, I_visited, G_visited, FSIM, **kwargs): g_counter = 0 forward_map_G = flex.size_t(len(G_visited)) backward_map_G = flex.size_t() for s in xrange(len(G_visited)): #print s, G_visited[s], c[len_I + s], c[len_I + len(Gbase) + s] if G_visited[s]: forward_map_G[s] = g_counter backward_map_G.append(s) g_counter += 1 subsetGbase = Gbase.select(backward_map_G) remapped_frame = forward_map_G.select(FSIM.frame) i_counter = 0 forward_map_I = flex.size_t(len(I_visited)) backward_map_I = flex.size_t() for s in xrange(len(I_visited)): #print s,I_visited[s], c[s] if I_visited[s]: forward_map_I[s] = i_counter backward_map_I.append(s) i_counter += 1 subsetIbase = Ibase.select(backward_map_I) remapped_miller = forward_map_I.select(FSIM.miller) from cctbx.examples.merging import intensity_data remapped_FSIM = intensity_data() remapped_FSIM.raw_obs = FSIM.raw_obs remapped_FSIM.exp_var = FSIM.exp_var remapped_FSIM.stol_sq = FSIM.stol_sq remapped_FSIM.origHKL = FSIM.origHKL remapped_FSIM.frame = remapped_frame remapped_FSIM.miller = remapped_miller if kwargs.has_key('experiments'): # XXX seems like we need to implement a proper select statement for ExperimentList # kwargs["experiments"] = kwargs["experiments"].select(G_visited==1) from dxtbx.model.experiment.experiment_list import ExperimentList new_experiments = ExperimentList() for idx in xrange(len(G_visited)): if G_visited[idx] == 1: new_experiments.append(kwargs["experiments"][idx]) kwargs["experiments"] = new_experiments base_class.__init__(self, subsetIbase, subsetGbase, remapped_FSIM, **kwargs) fitted = self.unpack() fitted_stddev = self.unpack_stddev() def help_expand_data(data): result = {} for key in data.keys(): if key == "I": ex = flex.double(len(Ibase)) for s in xrange(len(I_visited)): if I_visited[s]: ex[s] = data[key][forward_map_I[s]] result[key] = ex elif key in ["G", "B", "D", "Ax", "Ay"]: ex = flex.double(len(Gbase)) for s in xrange(len(G_visited)): if G_visited[s]: ex[s] = data[key][forward_map_G[s]] result[key] = ex return result self.expanded = help_expand_data(fitted) self.expanded_stddev = help_expand_data(fitted_stddev) print "DONE UNMAPPING HERE"
def run(self): '''Execute the script.''' from dials.algorithms.refinement.two_theta_refiner import \ TwoThetaReflectionManager, TwoThetaTarget, \ TwoThetaPredictionParameterisation start_time = time() # Parse the command line params, options = self.parser.parse_args(show_diff_phil=False) # set up global experiments and reflections lists from dials.array_family import flex reflections = flex.reflection_table() global_id = 0 from dxtbx.model.experiment.experiment_list import ExperimentList experiments = ExperimentList() # loop through the input, building up the global lists nrefs_per_exp = [] for ref_wrapper, exp_wrapper in zip(params.input.reflections, params.input.experiments): refs = ref_wrapper.data exps = exp_wrapper.data for i, exp in enumerate(exps): sel = refs['id'] == i sub_ref = refs.select(sel) nrefs_per_exp.append(len(sub_ref)) sub_ref['id'] = flex.int(len(sub_ref), global_id) reflections.extend(sub_ref) experiments.append(exp) global_id += 1 # Try to load the models and data nexp = len(experiments) if nexp == 0: print "No Experiments found in the input" self.parser.print_help() return if len(reflections) == 0: print "No reflection data found in the input" self.parser.print_help() return self.check_input(reflections) # Configure the logging log.config(info=params.output.log, debug=params.output.debug_log) logger.info(dials_version()) # Log the diff phil diff_phil = self.parser.diff_phil.as_str() if diff_phil is not '': logger.info('The following parameters have been modified:\n') logger.info(diff_phil) # Convert to P 1? if params.refinement.triclinic: reflections, experiments = self.convert_to_P1( reflections, experiments) # Combine crystals? if params.refinement.combine_crystal_models and len(experiments) > 1: logger.info('Combining {0} crystal models'.format( len(experiments))) experiments = self.combine_crystals(experiments) # Filter integrated centroids? if params.refinement.filter_integrated_centroids: reflections = self.filter_integrated_centroids(reflections) # Get the refiner logger.info('Configuring refiner') refiner = self.create_refiner(params, reflections, experiments) # Refine the geometry if nexp == 1: logger.info('Performing refinement of a single Experiment...') else: logger.info( 'Performing refinement of {0} Experiments...'.format(nexp)) # Refine and get the refinement history history = refiner.run() # get the refined experiments experiments = refiner.get_experiments() crystals = experiments.crystals() if len(crystals) == 1: # output the refined model for information logger.info('') logger.info('Final refined crystal model:') logger.info(crystals[0]) logger.info(self.cell_param_table(crystals[0])) # Save the refined experiments to file output_experiments_filename = params.output.experiments logger.info('Saving refined experiments to {0}'.format( output_experiments_filename)) from dxtbx.model.experiment.experiment_list import ExperimentListDumper dump = ExperimentListDumper(experiments) dump.as_json(output_experiments_filename) # Correlation plot if params.output.correlation_plot.filename is not None: from os.path import splitext root, ext = splitext(params.output.correlation_plot.filename) if not ext: ext = ".pdf" steps = params.output.correlation_plot.steps if steps is None: steps = [history.get_nrows() - 1] # extract individual column names or indices col_select = params.output.correlation_plot.col_select num_plots = 0 for step in steps: fname_base = root if len(steps) > 1: fname_base += "_step%02d" % step plot_fname = fname_base + ext corrmat, labels = refiner.get_parameter_correlation_matrix( step, col_select) if [corrmat, labels].count(None) == 0: from dials.algorithms.refinement.refinement_helpers import corrgram plt = corrgram(corrmat, labels) if plt is not None: logger.info( 'Saving parameter correlation plot to {}'.format( plot_fname)) plt.savefig(plot_fname) num_plots += 1 mat_fname = fname_base + ".pickle" with open(mat_fname, 'wb') as handle: py_mat = corrmat.as_scitbx_matrix( ) #convert to pickle-friendly form logger.info( 'Saving parameter correlation matrix to {0}'. format(mat_fname)) pickle.dump({ 'corrmat': py_mat, 'labels': labels }, handle) if num_plots == 0: msg = "Sorry, no parameter correlation plots were produced. Please set " \ "track_parameter_correlation=True to ensure correlations are " \ "tracked, and make sure correlation_plot.col_select is valid." logger.info(msg) if params.output.cif is not None: self.generate_cif(crystals[0], refiner, file=params.output.cif) if params.output.p4p is not None: self.generate_p4p(crystals[0], experiments[0].beam, file=params.output.p4p) if params.output.mmcif is not None: self.generate_mmcif(crystals[0], refiner, file=params.output.mmcif) # Log the total time taken logger.info("\nTotal time taken: {0:.2f}s".format(time() - start_time))
def test1(): '''Simple test with a single triclinic crystal restrained to a target unit cell''' from math import pi from random import gauss from dials.test.algorithms.refinement.setup_geometry import Extract from dxtbx.model.experiment.experiment_list import ExperimentList, Experiment #### Import model parameterisations from dials.algorithms.refinement.parameterisation.prediction_parameters import \ XYPhiPredictionParameterisation from dials.algorithms.refinement.parameterisation.detector_parameters import \ DetectorParameterisationSinglePanel from dials.algorithms.refinement.parameterisation.beam_parameters import \ BeamParameterisation from dials.algorithms.refinement.parameterisation.crystal_parameters import \ CrystalOrientationParameterisation, \ CrystalUnitCellParameterisation overrides = """geometry.parameters.crystal.a.length.range = 10 50 geometry.parameters.crystal.b.length.range = 10 50 geometry.parameters.crystal.c.length.range = 10 50""" master_phil = parse(""" include scope dials.test.algorithms.refinement.geometry_phil """, process_includes=True) models = Extract(master_phil, overrides) mydetector = models.detector mygonio = models.goniometer mycrystal = models.crystal mybeam = models.beam # Build a mock scan for a 72 degree sweep sweep_range = (0., pi/5.) from dxtbx.model.scan import scan_factory sf = scan_factory() myscan = sf.make_scan(image_range = (1,720), exposure_times = 0.1, oscillation = (0, 0.1), epochs = range(720), deg = True) # Create parameterisations of these models det_param = DetectorParameterisationSinglePanel(mydetector) s0_param = BeamParameterisation(mybeam, mygonio) xlo_param = CrystalOrientationParameterisation(mycrystal) xluc_param = CrystalUnitCellParameterisation(mycrystal) # Create an ExperimentList experiments = ExperimentList() experiments.append(Experiment( beam=mybeam, detector=mydetector, goniometer=mygonio, scan=myscan, crystal=mycrystal, imageset=None)) # Build a prediction parameterisation pred_param = XYPhiPredictionParameterisation(experiments, detector_parameterisations = [det_param], beam_parameterisations = [s0_param], xl_orientation_parameterisations = [xlo_param], xl_unit_cell_parameterisations = [xluc_param]) # Build a restraints parameterisation rp = RestraintsParameterisation(detector_parameterisations = [det_param], beam_parameterisations = [s0_param], xl_orientation_parameterisations = [xlo_param], xl_unit_cell_parameterisations = [xluc_param]) # make a unit cell target sigma = 1. uc = mycrystal.get_unit_cell().parameters() target_uc = [gauss(e, sigma) for e in uc] rp.add_restraints_to_target_xl_unit_cell(experiment_id=0, values=target_uc, sigma=[sigma]*6) # get analytical values and gradients vals, grads, weights = rp.get_residuals_gradients_and_weights() # get finite difference gradients p_vals = pred_param.get_param_vals() deltas = [1.e-7] * len(p_vals) fd_grad=[] for i in range(len(deltas)): val = p_vals[i] p_vals[i] -= deltas[i] / 2. pred_param.set_param_vals(p_vals) rev_state, foo, bar = rp.get_residuals_gradients_and_weights() rev_state = flex.double(rev_state) p_vals[i] += deltas[i] pred_param.set_param_vals(p_vals) fwd_state, foo, bar = rp.get_residuals_gradients_and_weights() fwd_state = flex.double(fwd_state) p_vals[i] = val fd = (fwd_state - rev_state) / deltas[i] fd_grad.append(fd) # for comparison, fd_grad is a list of flex.doubles, each of which corresponds # to a column of the sparse matrix grads. for i, fd in enumerate(fd_grad): # extract dense column from the sparse matrix an = grads.col(i).as_dense_vector() assert approx_equal(an, fd, eps=1e-5) print "OK"
def test1(): '''Simple test with a single triclinic crystal restrained to a target unit cell''' from math import pi from random import gauss from dials.test.algorithms.refinement.setup_geometry import Extract from dxtbx.model.experiment.experiment_list import ExperimentList, Experiment #### Import model parameterisations from dials.algorithms.refinement.parameterisation.prediction_parameters import \ XYPhiPredictionParameterisation from dials.algorithms.refinement.parameterisation.detector_parameters import \ DetectorParameterisationSinglePanel from dials.algorithms.refinement.parameterisation.beam_parameters import \ BeamParameterisation from dials.algorithms.refinement.parameterisation.crystal_parameters import \ CrystalOrientationParameterisation, \ CrystalUnitCellParameterisation overrides = """geometry.parameters.crystal.a.length.range = 10 50 geometry.parameters.crystal.b.length.range = 10 50 geometry.parameters.crystal.c.length.range = 10 50""" master_phil = parse(""" include scope dials.test.algorithms.refinement.geometry_phil """, process_includes=True) models = Extract(master_phil, overrides) mydetector = models.detector mygonio = models.goniometer mycrystal = models.crystal mybeam = models.beam # Build a mock scan for a 72 degree sweep sweep_range = (0., pi / 5.) from dxtbx.model.scan import scan_factory sf = scan_factory() myscan = sf.make_scan(image_range=(1, 720), exposure_times=0.1, oscillation=(0, 0.1), epochs=range(720), deg=True) # Create parameterisations of these models det_param = DetectorParameterisationSinglePanel(mydetector) s0_param = BeamParameterisation(mybeam, mygonio) xlo_param = CrystalOrientationParameterisation(mycrystal) xluc_param = CrystalUnitCellParameterisation(mycrystal) # Create an ExperimentList experiments = ExperimentList() experiments.append( Experiment(beam=mybeam, detector=mydetector, goniometer=mygonio, scan=myscan, crystal=mycrystal, imageset=None)) # Build a prediction parameterisation pred_param = XYPhiPredictionParameterisation( experiments, detector_parameterisations=[det_param], beam_parameterisations=[s0_param], xl_orientation_parameterisations=[xlo_param], xl_unit_cell_parameterisations=[xluc_param]) # Build a restraints parameterisation rp = RestraintsParameterisation( detector_parameterisations=[det_param], beam_parameterisations=[s0_param], xl_orientation_parameterisations=[xlo_param], xl_unit_cell_parameterisations=[xluc_param]) # make a unit cell target sigma = 1. uc = mycrystal.get_unit_cell().parameters() target_uc = [gauss(e, sigma) for e in uc] rp.add_restraints_to_target_xl_unit_cell(experiment_id=0, values=target_uc, sigma=[sigma] * 6) # get analytical values and gradients vals, grads, weights = rp.get_residuals_gradients_and_weights() # get finite difference gradients p_vals = pred_param.get_param_vals() deltas = [1.e-7] * len(p_vals) fd_grad = [] for i in range(len(deltas)): val = p_vals[i] p_vals[i] -= deltas[i] / 2. pred_param.set_param_vals(p_vals) rev_state, foo, bar = rp.get_residuals_gradients_and_weights() rev_state = flex.double(rev_state) p_vals[i] += deltas[i] pred_param.set_param_vals(p_vals) fwd_state, foo, bar = rp.get_residuals_gradients_and_weights() fwd_state = flex.double(fwd_state) p_vals[i] = val fd = (fwd_state - rev_state) / deltas[i] fd_grad.append(fd) # for comparison, fd_grad is a list of flex.doubles, each of which corresponds # to a column of the sparse matrix grads. for i, fd in enumerate(fd_grad): # extract dense column from the sparse matrix an = grads.col(i).as_dense_vector() assert approx_equal(an, fd, eps=1e-5) print "OK"
def __call__(self, experiments, reflections): comm = MPI.COMM_WORLD rank = comm.Get_rank() params = self.working_phil.extract() if rank == 0: data = [] size = comm.Get_size() chunk_size = len(experiments) // size remainder = len(experiments) % size pointer = 0 self.working_phil.show() for i in xrange(size): if i < remainder: sel_range = xrange(pointer, pointer + chunk_size + 1) else: sel_range = xrange(pointer, pointer + chunk_size) sel = flex.bool(len(reflections)) for exp_id in sel_range: sel |= reflections["id"] == exp_id if i < remainder: data.append(( range(pointer, pointer + chunk_size + 1), experiments[pointer:pointer + chunk_size + 1], reflections.select(sel), )) pointer += 1 else: data.append(( range(pointer, pointer + chunk_size), experiments[pointer:pointer + chunk_size], reflections.select(sel), )) pointer += chunk_size else: data = None data = comm.scatter(data, root=0) for i, (iexp, exp) in enumerate(zip(data[0], data[1])): print("Refining crystal", iexp) # reflection subset for a single experiment refs = data[2].select(data[2]["id"] == iexp) refs["id"] = flex.size_t(len(refs), 0) # experiment list for a single experiment exps = ExperimentList() exps.append(exp) refiner = RefinerFactory.from_parameters_data_experiments( params, refs, exps, verbosity=1) # do refinement refiner.run() refined_exps = refiner.get_experiments() # replace this experiment with the refined one data[1][i] = refined_exps[0] data = comm.gather(data, root=0) if rank == 0: for chunk in data: for iexp, experiment in zip(chunk[0], chunk[1]): experiments[iexp] = experiment return experiments else: assert data == None
def run(self): print "Parsing input" params, options = self.parser.parse_args(show_diff_phil=True) #Configure the logging log.config(params.detector_phase.refinement.verbosity, info='dials.refine.log', debug='dials.refine.debug.log') # Try to obtain the models and data if not params.input.experiments: raise Sorry("No Experiments found in the input") if not params.input.reflections: raise Sorry("No reflection data found in the input") try: assert len(params.input.reflections) == len(params.input.experiments) except AssertionError: raise Sorry("The number of input reflections files does not match the " "number of input experiments") # set up global experiments and reflections lists from dials.array_family import flex reflections = flex.reflection_table() global_id = 0 from dxtbx.model.experiment.experiment_list import ExperimentList experiments=ExperimentList() if params.reference_detector == "first": # Use the first experiment of the first experiment list as the reference detector ref_exp = params.input.experiments[0].data[0] else: # Average all the detectors to generate a reference detector assert params.detector_phase.refinement.parameterisation.detector.hierarchy_level == 0 from scitbx.matrix import col panel_fasts = [] panel_slows = [] panel_oris = [] for exp_wrapper in params.input.experiments: exp = exp_wrapper.data[0] if panel_oris: for i, panel in enumerate(exp.detector): panel_fasts[i] += col(panel.get_fast_axis()) panel_slows[i] += col(panel.get_slow_axis()) panel_oris[i] += col(panel.get_origin()) else: for i, panel in enumerate(exp.detector): panel_fasts.append(col(panel.get_fast_axis())) panel_slows.append(col(panel.get_slow_axis())) panel_oris.append(col(panel.get_origin())) ref_exp = copy.deepcopy(params.input.experiments[0].data[0]) for i, panel in enumerate(ref_exp.detector): # Averaging the fast and slow axes can make them be non-orthagonal. Fix by finding # the vector that goes exactly between them and rotate # around their cross product 45 degrees from that vector in either direction vf = panel_fasts[i]/len(params.input.experiments) vs = panel_slows[i]/len(params.input.experiments) c = vf.cross(vs) angle = vf.angle(vs, deg=True) v45 = vf.rotate(c, angle/2, deg=True) vf = v45.rotate(c, -45, deg=True) vs = v45.rotate(c, 45, deg=True) panel.set_frame(vf, vs, panel_oris[i]/len(params.input.experiments)) print "Reference detector (averaged):", str(ref_exp.detector) # set the experiment factory that combines a crystal with the reference beam # and the reference detector experiment_from_crystal=ExperimentFromCrystal(ref_exp.beam, ref_exp.detector) # keep track of the number of refl per accepted experiment for a table nrefs_per_exp = [] # loop through the input, building up the global lists for ref_wrapper, exp_wrapper in zip(params.input.reflections, params.input.experiments): refs = ref_wrapper.data exps = exp_wrapper.data # there might be multiple experiments already here. Loop through them for i, exp in enumerate(exps): # select the relevant reflections sel = refs['id'] == i sub_ref = refs.select(sel) ## DGW commented out as reflections.minimum_number_of_reflections no longer exists #if len(sub_ref) < params.crystals_phase.refinement.reflections.minimum_number_of_reflections: # print "skipping experiment", i, "in", exp_wrapper.filename, "due to insufficient strong reflections in", ref_wrapper.filename # continue # build an experiment with this crystal plus the reference models combined_exp = experiment_from_crystal(exp.crystal) # next experiment ID in series exp_id = len(experiments) # check this experiment if not check_experiment(combined_exp, sub_ref): print "skipping experiment", i, "in", exp_wrapper.filename, "due to poor RMSDs" continue # set reflections ID sub_ref['id'] = flex.int(len(sub_ref), exp_id) # keep number of reflections for the table nrefs_per_exp.append(len(sub_ref)) # obtain mm positions on the reference detector sub_ref = indexer_base.map_spots_pixel_to_mm_rad(sub_ref, combined_exp.detector, combined_exp.scan) # extend refl and experiments lists reflections.extend(sub_ref) experiments.append(combined_exp) # print number of reflections per accepted experiment from libtbx.table_utils import simple_table header = ["Experiment", "Nref"] rows = [(str(i), str(n)) for (i, n) in enumerate(nrefs_per_exp)] st = simple_table(rows, header) print "Number of reflections per experiment" print st.format() for cycle in range(params.n_macrocycles): print "MACROCYCLE %02d" % (cycle + 1) print "=============\n" # first run: multi experiment joint refinement of detector with fixed beam and # crystals print "PHASE 1" # SET THIS TEST TO FALSE TO REFINE WHOLE DETECTOR AS SINGLE JOB if params.detector_phase.refinement.parameterisation.detector.hierarchy_level > 0: experiments = detector_parallel_refiners(params.detector_phase, experiments, reflections) else: experiments = detector_refiner(params.detector_phase, experiments, reflections) # second run print "PHASE 2" experiments = crystals_refiner(params.crystals_phase, experiments, reflections) # Save the refined experiments to file output_experiments_filename = params.output.experiments_filename print 'Saving refined experiments to {0}'.format(output_experiments_filename) from dxtbx.model.experiment.experiment_list import ExperimentListDumper dump = ExperimentListDumper(experiments) dump.as_json(output_experiments_filename) # Write out refined reflections, if requested if params.output.reflections_filename: print 'Saving refined reflections to {0}'.format( params.output.reflections_filename) reflections.as_pickle(params.output.reflections_filename) return