def run(args):
    assert len(args) == 2

    expts = load.experiment_list(args[0], check_format=False)
    refl = flex.reflection_table.from_file(args[1])
    params = phil_scope.fetch(source=phil.parse("")).extract()
    params.refinement.verbosity = 12

    # no output by default
    print("Starting refinement #1")
    refiner = RefinerFactory.from_parameters_data_experiments(
        params, refl, expts)
    refiner.run()
    print("Finished refinement #1")

    # configure logging
    logging.basicConfig(level=logging.INFO,
                        format="%(asctime)s %(levelname)s %(message)s")

    # we should now get logging output from refinement
    print("Starting refinement #2")
    refiner = RefinerFactory.from_parameters_data_experiments(
        params, refl, expts)
    refiner.run()
    print("Finished refinement #2")

    # switch off logging for dials.algorithms.refinement
    logging.getLogger("dials.algorithms.refinement").setLevel(logging.ERROR)
    print("Starting refinement #3")
    refiner = RefinerFactory.from_parameters_data_experiments(
        params, refl, expts)
    refiner.run()
    print("Finished refinement #3")
Пример #2
0
def run(args):
  from dials.util import log
  import libtbx.load_env
  usage = "%s experiments.json indexed.pickle [options]" %libtbx.env.dispatcher_name


  from dials.util.options import OptionParser
  from dials.util.options import flatten_reflections
  from dials.util.options import flatten_experiments
  from dials.array_family import flex

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_experiments=True,
    read_reflections=True,
    check_format=False,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=False)

  # Configure the logging
  #log.config(info=params.output.log, debug=params.output.debug_log)

  from dials.util.version import dials_version
  logger.info(dials_version())

  # Log the diff phil
  diff_phil = parser.diff_phil.as_str()
  if diff_phil is not '':
    logger.info('The following parameters have been modified:\n')
    logger.info(diff_phil)

  experiments = flatten_experiments(params.input.experiments)
  reflections = flatten_reflections(params.input.reflections)
  assert(len(reflections) == 1)
  reflections = reflections[0]

  if len(experiments) == 0:
    parser.print_help()
    return

  #from dials.command_line import refine
  #params = refine.phil_scope.extract()
  indexed_reflections = reflections.select(reflections['id'] > -1)
  from dials.algorithms.refinement import RefinerFactory
  refiner = RefinerFactory.from_parameters_data_experiments(
    params, indexed_reflections, experiments)
  #refiner.run()
  rmsds = refiner.rmsds()
  import math
  xy_rmsds = math.sqrt(rmsds[0]**2 + rmsds[1]**2)

  print rmsds



  return
  def __call__(self, experiments, reflections):

    self.working_phil.show()
    params = self.working_phil.extract()
    refiner = RefinerFactory.from_parameters_data_experiments(
        params, reflections, experiments, verbosity=2)
    refiner.run()
    experiments = refiner.get_experiments()

    return experiments
    def __call__(self, experiments, reflections):

        self.working_phil.show()
        params = self.working_phil.extract()
        refiner = RefinerFactory.from_parameters_data_experiments(
            params, reflections, experiments, verbosity=2
        )
        refiner.run()
        experiments = refiner.get_experiments()

        return experiments
Пример #5
0
def refine(params, reflections, experiments, verbosity=0, debug_plots=False):
    detector = experiments.detectors()[0]

    from dials.algorithms.refinement import RefinerFactory

    refiner = RefinerFactory.from_parameters_data_experiments(params, reflections, experiments, verbosity=verbosity)

    outliers = None
    refined = refiner.run()
    if debug_plots:
        debug_plot_residuals(refiner)
    return refiner, refined, outliers
Пример #6
0
def refine(params, reflections, experiments, verbosity=0, debug_plots=False):
    detector = experiments.detectors()[0]

    from dials.algorithms.refinement import RefinerFactory
    refiner = RefinerFactory.from_parameters_data_experiments(
        params, reflections, experiments, verbosity=verbosity)

    outliers = None
    refined = refiner.run()
    if debug_plots:
        debug_plot_residuals(refiner)
    return refiner, refined, outliers
Пример #7
0
def get_rmsds_obs_pred(observations, experiment):
  from dials.algorithms.spot_prediction import ray_intersection
  from dials.algorithms.indexing.indexer import master_params
  from dials.algorithms.refinement import RefinerFactory
  from dxtbx.model.experiment.experiment_list import ExperimentList
  master_params.refinement.reflections.close_to_spindle_cutoff = 0.001
  from dials.model.data import ReflectionList
  ref_list = ReflectionList.from_table(observations)
  ref_list = ray_intersection(experiment.detector, ref_list)
  ref_table = ref_list.to_table()
  import copy
  reflections = copy.deepcopy(observations)
  reflections['xyzcal.mm'] = ref_table['xyzcal.mm']
  reflections['xyzcal.px'] = ref_table['xyzcal.px']

  # XXX hack to make it work for a single lattice
  reflections['id'] = flex.int(len(reflections), 0)
  refine = RefinerFactory.from_parameters_data_experiments(
    master_params, reflections, ExperimentList([experiment]), verbosity=0)
  return refine.rmsds()
Пример #8
0
  def _setup_perturbation(self):
    '''Obtain a PredictionParameterisation object, get the values of the
    parameters and their units'''

    self._refiner = RefinerFactory.from_parameters_data_experiments(
      self.params, self.dummy_reflections, self.original_experiments)

    pr = self._refiner.get_param_reporter()
    units = [p.param_type for p in pr.get_params()]
    self._pp = self._refiner._pred_param
    self._old_vals = self._pp.get_param_vals()

    # construct list of sigmas according to parameter units
    self._sigmas = []
    for (u, val) in zip(units, self._old_vals):
      if '(mm)' in str(u):
        self._sigmas.append(self._sig_mm)
      elif '(mrad)' in str(u):
        self._sigmas.append(self._sig_mrad)
      else: # no recognised unit
        self._sigmas.append(self._frac_sig_unitless * val)
    return
  def __call__(self, experiments, reflections):

    self.working_phil.show()
    params = self.working_phil.extract()

    for iexp, exp in enumerate(experiments):

      print "Refining crystal", iexp
      # reflection subset for a single experiment
      refs = reflections.select(reflections['id'] == iexp)
      refs['id'] = flex.size_t(len(refs),0)
      # experiment list for a single experiment
      exps=ExperimentList()
      exps.append(exp)
      refiner = RefinerFactory.from_parameters_data_experiments(
        params, refs, exps, verbosity=1)
      # do refinement
      refiner.run()
      refined_exps = refiner.get_experiments()
      # replace this experiment with the refined one
      experiments[iexp] = refined_exps[0]

    return experiments
Пример #10
0
    def __call__(self, experiments, reflections):

        self.working_phil.show()
        params = self.working_phil.extract()

        for iexp, exp in enumerate(experiments):

            print("Refining crystal", iexp)
            # reflection subset for a single experiment
            refs = reflections.select(reflections["id"] == iexp)
            refs["id"] = flex.size_t(len(refs), 0)
            # experiment list for a single experiment
            exps = ExperimentList()
            exps.append(exp)
            refiner = RefinerFactory.from_parameters_data_experiments(
                params, refs, exps, verbosity=1
            )
            # do refinement
            refiner.run()
            refined_exps = refiner.get_experiments()
            # replace this experiment with the refined one
            experiments[iexp] = refined_exps[0]

        return experiments
Пример #11
0
  def do_work(item):
    iexp, exp = item

    print "Refining crystal", iexp
    # reflection subset for a single experiment
    refs = reflections.select(reflections['id'] == iexp)
    refs['id'] = flex.int(len(refs),0)

    # DGW commented out as reflections.minimum_number_of_reflections no longer exists
    #if len(refs) < params.refinement.reflections.minimum_number_of_reflections:
    #  print "Not enough reflections to refine experiment"
    #  return

    # experiment list for a single experiment
    exps=ExperimentList()
    exps.append(exp)
    try:
      refiner = RefinerFactory.from_parameters_data_experiments(
        params, refs, exps)
      # do refinement
      refiner.run()
    except Exception, e:
      print "Error,", str(e)
      return
Пример #12
0
def run():
  parser = OptionParser(
    phil = phil_scope)

  params, options = parser.parse_args(show_diff_phil=True)
  assert params.input.single_img is not None
  assert params.output_dir is not None

  # load the image
  img = dxtbx.load(params.input.single_img)
  imgset = MemImageSet([img])
  datablock = DataBlockFactory.from_imageset(imgset)[0]

  spotfinder = SpotFinderFactory.from_parameters(params)
  reflections = spotfinder(datablock)

  base_name = os.path.splitext(params.input.single_img)[0]
  reflections.as_pickle(os.path.join(params.output_dir, base_name + "_strong.pickle"))

  # DGW commented out as reflections.minimum_number_of_reflections no longer exists
  #if len(reflections) < params.refinement.reflections.minimum_number_of_reflections:
  #  print "Not enough spots to index"
  #  return

  # create the spot finder

  print "Spotfinder spots found:", len(reflections)

  if params.indexing.method == "fft3d":
    from dials.algorithms.indexing.fft3d import indexer_fft3d as indexer
  elif params.indexing.method == "fft1d":
    from dials.algorithms.indexing.fft1d import indexer_fft1d as indexer
  elif params.method == "real_space_grid_search":
    from dials.algorithms.indexing.real_space_grid_search \
         import indexer_real_space_grid_search as indexer
  try:
    idxr = indexer(reflections, [imgset], params=params.indexing)
  except (RuntimeError, Sorry) as e:
    print str(e)
    return

  indexed = idxr.refined_reflections
  experiments = idxr.refined_experiments
  #from dxtbx.model.experiment.experiment_list import ExperimentListDumper
  #dump = ExperimentListDumper(experiments)
  #dump.as_json(os.path.join(params.output_dir, base_name + "_experiments.json"))
  indexed.as_pickle(os.path.join(params.output_dir, base_name + "_indexed.pickle"))

  refiner = RefinerFactory.from_parameters_data_experiments(
    params, indexed, experiments)

  refiner.run()
  refined_experiments = refiner.get_experiments()
  #dump = ExperimentListDumper(refined_experiments)
  #dump.as_json(os.path.join(params.output_dir, base_name + "_refined.json"))

  # Compute the profile model
  # Predict the reflections
  # Match the predictions with the reference
  # Create the integrator
  reference = indexed

  reference = process_reference(reference)
  profile_model = ProfileModelFactory.create(params, refined_experiments, reference)
  predicted = flex.reflection_table.from_predictions_multi(
    refined_experiments,
    dmin=params.prediction.dmin,
    dmax=params.prediction.dmax,
    margin=params.prediction.margin,
    force_static=params.prediction.force_static)
  predicted.match_with_reference(reference)
  integrator = IntegratorFactory.create(params, experiments, profile_model, predicted)

  # Integrate the reflections
  integrated = integrator.integrate()
  integrated.as_pickle(os.path.join(params.output_dir, base_name + "_integrated.pickle"))
  def __call__(self, experiments, reflections):
    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()
    params = self.working_phil.extract()

    if rank == 0:
      data = []
      size =comm.Get_size()
      chunk_size = len(experiments) // size
      remainder = len(experiments) % size
      pointer = 0

      self.working_phil.show()

      for i in xrange(size):
        if i < remainder:
          sel_range = xrange(pointer,pointer+chunk_size+1)
        else:
          sel_range = xrange(pointer,pointer+chunk_size)

        sel = flex.bool(len(reflections))
        for exp_id in sel_range:
          sel |= reflections['id'] == exp_id

        if i < remainder:
          data.append((range(pointer,pointer+chunk_size+1),experiments[pointer:pointer+chunk_size+1],reflections.select(sel)))
          pointer += 1
        else:
          data.append((range(pointer,pointer+chunk_size),experiments[pointer:pointer+chunk_size],reflections.select(sel)))
        pointer += chunk_size

    else:
      data = None

    data = comm.scatter(data, root=0)

    for i, (iexp, exp) in enumerate(zip(data[0],data[1])):

      print "Refining crystal", iexp
      # reflection subset for a single experiment
      refs = data[2].select(data[2]['id'] == iexp)
      refs['id'] = flex.size_t(len(refs),0)
      # experiment list for a single experiment
      exps=ExperimentList()
      exps.append(exp)
      refiner = RefinerFactory.from_parameters_data_experiments(
        params, refs, exps, verbosity=1)
      # do refinement
      refiner.run()
      refined_exps = refiner.get_experiments()
      # replace this experiment with the refined one
      data[1][i] = refined_exps[0]

    data = comm.gather(data, root=0)
    if rank == 0:
      for chunk in data:
        for iexp, experiment in zip(chunk[0], chunk[1]):
          experiments[iexp] = experiment

      return experiments
    else:
      assert data == None
Пример #14
0
  def run(self):
    '''Execute the script.'''
    from time import time
    import cPickle as pickle
    from logging import info
    from dials.util import log
    from dials.algorithms.refinement import RefinerFactory
    from dials.util.options import flatten_reflections, flatten_experiments

    start_time = time()

    # Parse the command line
    params, options = self.parser.parse_args(show_diff_phil=False)
    reflections = flatten_reflections(params.input.reflections)
    experiments = flatten_experiments(params.input.experiments)

    # Try to load the models and data
    nexp = len(experiments)
    if nexp == 0:
      print "No Experiments found in the input"
      self.parser.print_help()
      return
    if len(reflections) == 0:
      print "No reflection data found in the input"
      self.parser.print_help()
      return
    if len(reflections) > 1:
      raise Sorry("Only one reflections list can be imported at present")
    reflections = reflections[0]

    self.check_input(reflections)

    # Configure the logging
    log.config(info=params.output.log,
      debug=params.output.debug_log)
    from dials.util.version import dials_version
    info(dials_version())

    # Log the diff phil
    diff_phil = self.parser.diff_phil.as_str()
    if diff_phil is not '':
      info('The following parameters have been modified:\n')
      info(diff_phil)

    # Modify options if necessary
    if params.output.correlation_plot.filename is not None:
      params.refinement.refinery.track_parameter_correlation = True

    # Get the refiner
    info('Configuring refiner')
    refiner = RefinerFactory.from_parameters_data_experiments(params,
        reflections, experiments)

    # Refine the geometry
    if nexp == 1:
      info('Performing refinement of a single Experiment...')
    else:
      info('Performing refinement of {0} Experiments...'.format(nexp))

    # Refine and get the refinement history
    history = refiner.run()

    if params.output.centroids:
      info("Writing table of centroids to '{0}'".format(
        params.output.centroids))
      self.write_centroids_table(refiner, params.output.centroids)

    # Write scan-varying parameters to file, if there were any
    if params.output.parameter_table:
      scan = refiner.get_scan()
      if scan:
        text = refiner.get_param_reporter().varying_params_vs_image_number(
            scan.get_array_range())
        if text:
          info("Writing scan-varying parameter table to {0}".format(
            params.output.parameter_table))
          f = open(params.output.parameter_table,"w")
          f.write(text)
          f.close()
        else:
          info("No scan-varying parameter table to write")

    # get the refined experiments
    experiments = refiner.get_experiments()
    crystals = experiments.crystals()

    if len(crystals) == 1:
      # output the refined model for information
      info('')
      info('Final refined crystal model:')
      info(crystals[0])

    # Save the refined experiments to file
    output_experiments_filename = params.output.experiments
    info('Saving refined experiments to {0}'.format(output_experiments_filename))
    from dxtbx.model.experiment.experiment_list import ExperimentListDumper
    dump = ExperimentListDumper(experiments)
    dump.as_json(output_experiments_filename)

    # Save reflections with updated predictions if requested (allow to switch
    # this off if it is a time-consuming step)
    if params.output.reflections:
      # Update predictions for all indexed reflections
      info('Updating predictions for indexed reflections')
      preds = refiner.predict_for_indexed()

      # just copy over the columns of interest, leaving behind things
      # added by e.g. scan-varying refinement such as 'block', the
      # U, B and UB matrices and gradients.
      reflections['s1'] = preds['s1']
      reflections['xyzcal.mm'] = preds['xyzcal.mm']
      reflections['xyzcal.px'] = preds['xyzcal.px']
      if preds.has_key('entering'):
        reflections['entering'] = preds['entering']

      # set used_in_refinement and centroid_outlier flags
      assert len(preds) == len(reflections)
      reflections.unset_flags(flex.size_t_range(len(reflections)),
        reflections.flags.used_in_refinement | reflections.flags.centroid_outlier)
      mask = preds.get_flags(preds.flags.centroid_outlier)
      reflections.set_flags(mask, reflections.flags.centroid_outlier)
      mask = preds.get_flags(preds.flags.used_in_refinement)
      reflections.set_flags(mask, reflections.flags.used_in_refinement)

      info('Saving reflections with updated predictions to {0}'.format(
        params.output.reflections))
      if params.output.include_unused_reflections:
        reflections.as_pickle(params.output.reflections)
      else:
        sel = reflections.get_flags(reflections.flags.used_in_refinement)
        reflections.select(sel).as_pickle(params.output.reflections)

    # For debugging, if requested save matches to file
    if params.output.matches:
      matches = refiner.get_matches()
      info('Saving matches (use for debugging purposes) to {0}'.format(
        params.output.matches))
      matches.as_pickle(params.output.matches)

    # Correlation plot
    if params.output.correlation_plot.filename is not None:
      from os.path import splitext
      root, ext = splitext(params.output.correlation_plot.filename)
      if not ext: ext = ".pdf"

      steps = params.output.correlation_plot.steps
      if steps is None: steps = [history.get_nrows()-1]

      # extract individual column names or indices
      col_select = params.output.correlation_plot.col_select

      num_plots = 0
      for step in steps:
        fname_base = root + "_step%02d" % step
        plot_fname = fname_base + ext
        corrmat, labels = refiner.get_parameter_correlation_matrix(step, col_select)
        if [corrmat, labels].count(None) == 0:
          from dials.algorithms.refinement.refinement_helpers import corrgram
          plt = corrgram(corrmat, labels)
          if plt is not None:
            info('Saving parameter correlation plot to {}'.format(plot_fname))
            plt.savefig(plot_fname)
            num_plots += 1
          mat_fname = fname_base + ".pickle"
          with open(mat_fname, 'wb') as handle:
            py_mat = corrmat.as_scitbx_matrix() #convert to pickle-friendly form
            info('Saving parameter correlation matrix to {0}'.format(mat_fname))
            pickle.dump({'corrmat':py_mat, 'labels':labels}, handle)

      if num_plots == 0:
        msg = "Sorry, no parameter correlation plots were produced. Please set " \
              "track_parameter_correlation=True to ensure correlations are " \
              "tracked, and make sure correlation_plot.col_select is valid."
        info(msg)

    # Write out refinement history, if requested
    if params.output.history:
      with open(params.output.history, 'wb') as handle:
        info('Saving refinement step history to {0}'.format(
          params.output.history))
        pickle.dump(history, handle)

    # Log the total time taken
    info("\nTotal time taken: {0:.2f}s".format(time() - start_time))

    return
Пример #15
0
    beam.fix=all
    detector.hierarchy_level=1
    sparse=True
  }
  target.gradient_calculation_blocksize=10000
  reflections.outlier.algorithm=tukey
  refinery.engine=LBFGScurvs
}""")
from dials.data.refinement import phil_scope as master_phil
working_phil = master_phil.fetch(
  sources=[user_phil])
working_phil.show()
params = working_phil.extract()
from dials.algorithms.refinement import RefinerFactory
refiner = RefinerFactory.from_parameters_data_experiments(
    params, reflections, experiments,
    verbosity=2)
refiner.run()

# save the refined experiments
from dxtbx.model.experiment.experiment_list import ExperimentListDumper
experiments = refiner.get_experiments()
dump = ExperimentListDumper(experiments)
experiments_filename = "refined_experiments.json"
dump.as_json(experiments_filename)
print "refined geometry written to {0}".format(experiments_filename)

# save reflections used in refinement
matches = refiner.get_matches()
reflections_filename = "refined_reflections.json"
matches.as_pickle(reflections_filename)
def test4():
  '''Test group restraint with multiple crystals, and a stills refiner'''

  if not libtbx.env.has_module("dials_regression"):
    print "Skipping test2 in " + __file__ + " as dials_regression not present"
    return

  # The phil scope
  from dials.algorithms.refinement.refiner import phil_scope
  user_phil = parse('''
  refinement
  {
    parameterisation
    {
      crystal
      {
        unit_cell
        {
          restraints
          {
            tie_to_group
            {
              sigmas=1,0,2,0,0,0
              apply_to_all=true
            }
          }
        }
      }
    }
  }
  ''')

  working_phil = phil_scope.fetch(source=user_phil)
  working_params = working_phil.extract()

  dials_regression = libtbx.env.find_in_repositories(
    relative_path="dials_regression",
    test=os.path.isdir)

  # use the multi stills test data
  data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_stills")
  experiments_path = os.path.join(data_dir, "combined_experiments.json")
  pickle_path = os.path.join(data_dir, "combined_reflections.pickle")

  experiments = ExperimentListFactory.from_json_file(experiments_path,
                check_format=False)
  reflections = flex.reflection_table.from_pickle(pickle_path)

  refiner = RefinerFactory.from_parameters_data_experiments(working_params,
        reflections, experiments)

  # hack to extract the objects needed from the Refiner
  rp = refiner._target._restraints_parameterisation
  pred_param = refiner._pred_param

  # get analytical values and gradients
  vals, grads, weights = rp.get_residuals_gradients_and_weights()

  # get finite difference gradients
  p_vals = pred_param.get_param_vals()
  deltas = [1.e-7] * len(p_vals)

  fd_grad=[]
  for i in range(len(deltas)):

    val = p_vals[i]

    p_vals[i] -= deltas[i] / 2.
    pred_param.set_param_vals(p_vals)

    rev_state, foo, bar = rp.get_residuals_gradients_and_weights()
    rev_state = flex.double(rev_state)

    p_vals[i] += deltas[i]
    pred_param.set_param_vals(p_vals)

    fwd_state, foo, bar = rp.get_residuals_gradients_and_weights()
    fwd_state = flex.double(fwd_state)

    p_vals[i] = val

    fd = (fwd_state - rev_state) / deltas[i]
    fd_grad.append(fd)

  # for comparison, fd_grad is a list of flex.doubles, each of which corresponds
  # to the gradients of the residuals wrt to a single parameter.
  pnames = pred_param.get_param_names()
  for i, (pname, fd) in enumerate(zip(pnames, fd_grad)):
    # extract dense column from the sparse matrix
    an = grads.col(i).as_dense_vector()

    #print pname
    #print list(an.round(6))
    #print list(fd.round(6))
    #print
    assert approx_equal(an, fd, eps=1e-5)

  print "OK"
  return
Пример #17
0
def run():
    parser = OptionParser(phil=phil_scope)

    params, options = parser.parse_args(show_diff_phil=True)
    assert params.input.single_img is not None
    assert params.output_dir is not None

    # load the image
    img = dxtbx.load(params.input.single_img)
    imgset = MemImageSet([img])
    datablock = DataBlockFactory.from_imageset(imgset)[0]

    spotfinder = SpotFinderFactory.from_parameters(params)
    reflections = spotfinder(datablock)

    base_name = os.path.splitext(params.input.single_img)[0]
    reflections.as_pickle(
        os.path.join(params.output_dir, base_name + "_strong.pickle"))

    # DGW commented out as reflections.minimum_number_of_reflections no longer exists
    # if len(reflections) < params.refinement.reflections.minimum_number_of_reflections:
    #  print "Not enough spots to index"
    #  return

    # create the spot finder

    print("Spotfinder spots found:", len(reflections))

    if params.indexing.method == "fft3d":
        from dials.algorithms.indexing.fft3d import indexer_fft3d as indexer
    elif params.indexing.method == "fft1d":
        from dials.algorithms.indexing.fft1d import indexer_fft1d as indexer
    elif params.method == "real_space_grid_search":
        from dials.algorithms.indexing.real_space_grid_search import (
            indexer_real_space_grid_search as indexer, )
    try:
        idxr = indexer(reflections, [imgset], params=params.indexing)
    except (RuntimeError, Sorry) as e:
        print(str(e))
        return

    indexed = idxr.refined_reflections
    experiments = idxr.refined_experiments
    # from dxtbx.model.experiment.experiment_list import ExperimentListDumper
    # dump = ExperimentListDumper(experiments)
    # dump.as_json(os.path.join(params.output_dir, base_name + "_experiments.json"))
    indexed.as_pickle(
        os.path.join(params.output_dir, base_name + "_indexed.pickle"))

    refiner = RefinerFactory.from_parameters_data_experiments(
        params, indexed, experiments)

    refiner.run()
    refined_experiments = refiner.get_experiments()
    # dump = ExperimentListDumper(refined_experiments)
    # dump.as_json(os.path.join(params.output_dir, base_name + "_refined.json"))

    # Compute the profile model
    # Predict the reflections
    # Match the predictions with the reference
    # Create the integrator
    reference = indexed

    reference = process_reference(reference)
    profile_model = ProfileModelFactory.create(params, refined_experiments,
                                               reference)
    predicted = flex.reflection_table.from_predictions_multi(
        refined_experiments,
        dmin=params.prediction.dmin,
        dmax=params.prediction.dmax,
        margin=params.prediction.margin,
        force_static=params.prediction.force_static,
    )
    predicted.match_with_reference(reference)
    integrator = IntegratorFactory.create(params, experiments, profile_model,
                                          predicted)

    # Integrate the reflections
    integrated = integrator.integrate()
    integrated.as_pickle(
        os.path.join(params.output_dir, base_name + "_integrated.pickle"))