Ejemplo n.º 1
0
def do_import(filename):
    logger.info("Loading %s" % os.path.basename(filename))
    datablocks = DataBlockFactory.from_filenames([filename])
    if len(datablocks) == 0:
        try:
            datablocks = DataBlockFactory.from_json_file(filename)
        except ValueError:
            raise Abort("Could not load %s" % filename)

    if len(datablocks) == 0:
        raise Abort("Could not load %s" % filename)
    if len(datablocks) > 1:
        raise Abort("Got multiple datablocks from file %s" % filename)

    # Ensure the indexer and downstream applications treat this as set of stills
    reset_sets = []

    from dxtbx.imageset import ImageSetFactory
    for imageset in datablocks[0].extract_imagesets():
        imageset = ImageSetFactory.imageset_from_anyset(imageset)
        imageset.set_scan(None)
        imageset.set_goniometer(None)
        reset_sets.append(imageset)

    return DataBlockFactory.from_imageset(reset_sets)[0]
def test_cbf_writer(image_file, dials_regression, run_in_tmpdir):
    filename = os.path.join(dials_regression, image_file)
    datablock = DataBlockFactory.from_filenames([filename])[0]
    imageset = datablock.extract_imagesets()[0]

    FormatCBFMini.as_file(
        imageset.get_detector(),
        imageset.get_beam(),
        imageset.get_goniometer(),
        imageset.get_scan(),
        imageset.get_raw_data(0)[0],
        "image_0001.cbf",
    )

    assert datablock.format_class()

    datablock2 = DataBlockFactory.from_filenames(["image_0001.cbf"])[0]
    imageset2 = datablock2.extract_imagesets()[0]

    tolerance = tolerance_phil_scope.extract().tolerance

    diff = SweepDiff(tolerance)
    print("\n".join(diff(imageset, imageset2)))

    assert BeamComparison()(imageset.get_beam(), imageset2.get_beam())
    assert DetectorComparison(origin_tolerance=tolerance.detector.origin)(
        imageset.get_detector(), imageset2.get_detector())
    assert GoniometerComparison()(imageset.get_goniometer(),
                                  imageset2.get_goniometer())
    s1 = imageset.get_scan()
    s2 = imageset.get_scan()
    assert s1.get_exposure_times() == s2.get_exposure_times()
    assert s1.get_oscillation() == s2.get_oscillation()
    assert s1.get_image_range() == s2.get_image_range()
    assert imageset.get_raw_data(0) == imageset2.get_raw_data(0)
Ejemplo n.º 3
0
  def tst_json(self):

    filenames = self.multiple_block_filenames()
    blocks1 = DataBlockFactory.from_filenames(filenames)
    blocks2 = self.encode_json_then_decode(blocks1)
    assert(len(blocks2) == len(blocks1))
    for b1, b2 in zip(blocks1, blocks2):
      assert(b1.format_class() == b2.format_class())
      assert(b1 == b2)
    assert(blocks1 == blocks2)

    filenames = self.multiple_block_filenames()
    blocks1 = DataBlockFactory.from_filenames(filenames)
    blocks2 = self.encode_json_then_decode(blocks1, check_format=False)
    assert(len(blocks2) == len(blocks1))
    for b1, b2 in zip(blocks1, blocks2):
      for im1, im2 in zip(b1.extract_imagesets(), b2.extract_imagesets()):
        assert(len(im1) == len(im2))
        if isinstance(im1, ImageSweep):
          assert(isinstance(im2, ImageSweep))
          assert(im1.get_beam() == im2.get_beam())
          assert(im1.get_detector() == im2.get_detector())
          assert(im1.get_goniometer() == im2.get_goniometer())
          assert(im1.get_scan() == im2.get_scan())
        else:
          assert(not isinstance(im2, ImageSweep))
          for i in xrange(len(im1)):
            assert(im1.get_beam(i) == im2.get_beam(i))
            assert(im1.get_detector(i) == im2.get_detector(i))

    print 'OK'
def test_translate(dials_regression, tmpdir):
    """Test as written in https://github.com/dials/dials/issues/471. This
  is pretty slow!"""

    tmpdir.chdir()
    dials_regression = "~/sw/cctbx/modules/dials_regression"

    # use the i04_weak_data for this test
    data_dir = os.path.join(dials_regression, "image_examples", "DLS_I04")
    image_path = os.path.join(data_dir, "grid_full_cbf_0005.cbf")

    # Generate distortion maps
    cmd = ("dials.generate_distortion_maps {0} "
           "dx=1 dy=2").format(image_path)
    result = easy_run.fully_buffered(command=cmd).raise_if_errors()

    # Import without correction
    cmd = ("dials.import {0}").format(image_path)
    result = easy_run.fully_buffered(command=cmd).raise_if_errors()
    db1 = DataBlockFactory.from_serialized_format('datablock.json')[0]
    det1 = db1.unique_detectors()[0]

    # Import with correction
    cmd = ("dials.import {0} dx=dx.pickle dy=dy.pickle "
           "output.datablock=corrected_datablock.json").format(image_path)
    result = easy_run.fully_buffered(command=cmd).raise_if_errors()
    db2 = DataBlockFactory.from_serialized_format(
        'corrected_datablock.json')[0]
    det2 = db2.unique_detectors()[0]

    # FIXME, why doesn't db2 have dx, dy set?
    assert db2.extract_imagesets()[0].external_lookup.dx.filename
Ejemplo n.º 5
0
  def tst_json(self):
    from dxtbx.datablock import DataBlockFactory
    from dxtbx.imageset import ImageSweep

    filenames = self.multiple_block_filenames()
    blocks1 = DataBlockFactory.from_filenames(filenames)
    blocks2 = self.encode_json_then_decode(blocks1)
    assert(len(blocks2) == len(blocks1))
    for b1, b2 in zip(blocks1, blocks2):
      assert(b1.format_class() == b2.format_class())
      assert(b1 == b2)
    assert(blocks1 == blocks2)

    filenames = self.multiple_block_filenames()
    blocks1 = DataBlockFactory.from_filenames(filenames)
    blocks2 = self.encode_json_then_decode(blocks1, check_format=False)
    assert(len(blocks2) == len(blocks1))
    for b1, b2 in zip(blocks1, blocks2):
      for im1, im2 in zip(b1.extract_imagesets(), b2.extract_imagesets()):
        assert(len(im1) == len(im2))
        if isinstance(im1, ImageSweep):
          assert(isinstance(im2, ImageSweep))
          assert(im1.get_beam() == im2.get_beam())
          assert(im1.get_detector() == im2.get_detector())
          assert(im1.get_goniometer() == im2.get_goniometer())
          assert(im1.get_scan() == im2.get_scan())
        else:
          assert(not isinstance(im2, ImageSweep))
          for i in xrange(len(im1)):
            assert(im1.get_beam(i) == im2.get_beam(i))
            assert(im1.get_detector(i) == im2.get_detector(i))

    print 'OK'
def test_split_single_image_datablock(dials_regression, tmpdir):
    tmpdir.chdir()
    pytest.importorskip("h5py")
    sacla_file = os.path.join(
        dials_regression,
        "image_examples",
        "SACLA_MPCCD_Cheetah",
        "run266702-0-subset.h5",
    )
    db = DataBlockFactory.from_filenames([sacla_file])[0]
    assert db.num_images() == 4
    imageset = db.extract_imagesets()[0]
    subset = imageset[2:3]
    subblock = DataBlockFactory.from_imageset(subset)[0]
    assert subblock.num_images() == 1
    assert get_indices(subblock) == [2]

    dumped_filename = "split_datablock.json"
    dump = DataBlockDumper(subblock)
    dump.as_json(dumped_filename)

    db = DataBlockFactory.from_json_file(dumped_filename, check_format=True)[0]
    assert db.num_images() == 1
    assert get_indices(db) == [2]

    db = DataBlockFactory.from_json_file(dumped_filename,
                                         check_format=False)[0]
    assert db.num_images() == 1
    assert get_indices(db) == [2]
Ejemplo n.º 7
0
 def loader(x):
   try:
     obj = DataBlockFactory.from_filenames([x])[0].extract_imagesets()[0]
   except IndexError:
     import dxtbx.datablock
     try:
       obj = DataBlockFactory.from_json_file(x)[0].extract_imagesets()[0]
     except dxtbx.datablock.InvalidDataBlockError:
       obj = ExperimentListFactory.from_json_file(x)[0].imageset
   return obj
Ejemplo n.º 8
0
def do_import(filename):
    logger.info("Loading %s" % os.path.basename(filename))
    try:
        datablocks = DataBlockFactory.from_json_file(filename)
    except ValueError:
        datablocks = DataBlockFactory.from_filenames([filename])
    if len(datablocks) == 0:
        raise Abort("Could not load %s" % filename)
    if len(datablocks) > 1:
        raise Abort("Got multiple datablocks from file %s" % filename)
    return datablocks[0]
Ejemplo n.º 9
0
def test_split_single_image_datablock(dials_data, tmpdir):
    tmpdir.chdir()
    pytest.importorskip("h5py")
    sacla_file = os.path.join(
        dials_data("image_examples"),
        "SACLA-MPCCD-run266702-0-subset.h5",
    )
    db = DataBlockFactory.from_filenames([sacla_file])[0]
    assert db.num_images() == 4
    imageset = db.extract_imagesets()[0]
    subset = imageset[2:3]
    subblock = DataBlockFactory.from_imageset(subset)[0]
    assert subblock.num_images() == 1
    assert get_indices(subblock) == [2]
Ejemplo n.º 10
0
def test_combination_of_multiple_datablocks_and_strong_spots_files(
        dials_regression, tmpdir):
    tmpdir.chdir()

    path = os.path.join(dials_regression,
                        "centroid_test_data/centroid_####.cbf")

    # example combined two different spot-finding settings for the same dataset
    # e.d. for comparison with the reciprocal lattice viewer.
    cmd = "dials.import template={0}".format(path)
    result = easy_run.fully_buffered(cmd).raise_if_errors()
    cmd = "dials.find_spots datablock.json output.reflections=strong1.pickle"
    result = easy_run.fully_buffered(cmd).raise_if_errors()
    cmd = ("dials.find_spots datablock.json sigma_strong=5 "
           "output.reflections=strong2.pickle")
    result = easy_run.fully_buffered(cmd).raise_if_errors()
    cmd = ("dev.dials.combine_datablocks datablock.json datablock.json "
           "strong1.pickle strong2.pickle")
    result = easy_run.fully_buffered(cmd).raise_if_errors()

    # load results
    comb_db = DataBlockFactory.from_json_file('combined_datablocks.json')[0]
    comb_strong = flex.reflection_table.from_pickle("combined_strong.pickle")

    # load reference models and reflections
    db = DataBlockFactory.from_json_file('datablock.json')[0]
    ref_detector = db.unique_detectors()[0]
    ref_beam = db.unique_beams()[0]
    ref_scan = db.unique_scans()[0]
    ref_goniometer = db.unique_goniometers()[0]
    strong1 = flex.reflection_table.from_pickle("strong1.pickle")
    strong2 = flex.reflection_table.from_pickle("strong2.pickle")

    # check the models have not been modified
    for imset in comb_db.extract_imagesets():
        assert imset.get_detector() == ref_detector
        assert imset.get_beam() == ref_beam
        assert imset.get_scan() == ref_scan
        assert imset.get_goniometer() == ref_goniometer

    # check the reflections are unaffected, except for the change in id
    s1 = comb_strong.select(comb_strong['id'] == 0)
    s2 = comb_strong.select(comb_strong['id'] == 1)
    s2['id'] = flex.size_t(len(s2), 0)
    for r1, r2 in zip(s1, strong1):
        assert r1 == r2
    for r1, r2 in zip(s2, strong2):
        assert r1 == r2
Ejemplo n.º 11
0
  def to_datablocks(self):
    ''' Return the experiment list as a datablock list.
    This assumes that the experiment contains 1 datablock.'''
    from dxtbx.datablock import DataBlockFactory

    # Convert the experiment list to dict
    obj = self.to_dict()

    # Convert the dictionary to a datablock dictionary
    obj['__id__'] = 'DataBlock'
    for e in obj['experiment']:
      iid = e['imageset']
      imageset = obj['imageset'][iid]
      if 'beam' in e:
        imageset['beam'] = e['beam']
      if 'detector' in e:
        imageset['detector'] = e['detector']
      if 'goniometer' in e:
        imageset['goniometer'] = e['goniometer']
      if 'scan' in e:
        imageset['scan'] = e['scan']

    # Remove the experiments
    del obj['experiment']

    # Create the datablock
    return DataBlockFactory.from_dict([obj])
Ejemplo n.º 12
0
    def exercise_one_image(path, count_only_shadow, count_mask_shadow,
                           count_mask_no_shadow):

        from dxtbx.datablock import DataBlockFactory
        assert os.path.exists(path), path
        for shadowing in (libtbx.Auto, True, False):
            format_kwargs = {'dynamic_shadowing': shadowing}
            datablock = DataBlockFactory.from_filenames(
                [path], format_kwargs=format_kwargs)[0]
            imageset = datablock.extract_imagesets()[0]
            detector = imageset.get_detector()
            scan = imageset.get_scan()
            filename = imageset.get_path(0)
            masker = imageset.masker().format_class(
                filename, **format_kwargs).get_goniometer_shadow_masker()
            assert masker is not None
            mask = masker.get_mask(detector,
                                   scan_angle=scan.get_oscillation()[0])
            assert len(mask) == len(detector)
            # only shadowed pixels masked
            assert mask[0].count(False) == count_only_shadow, (
                mask[0].count(False), count_only_shadow)
            mask = imageset.get_mask(0)
            # dead pixels, pixels in gaps, etc also masked
            if shadowing is libtbx.Auto or shadowing is True:
                assert mask[0].count(False) == count_mask_shadow, (
                    mask[0].count(False), count_mask_shadow)
            else:
                assert mask[0].count(False) == count_mask_no_shadow, (
                    mask[0].count(False), count_mask_no_shadow)
Ejemplo n.º 13
0
    def run(self):
        params, options = self.parser.parse_args(show_diff_phil=True)
        assert params.input.single_img is not None

        filebase = os.path.splitext(params.input.single_img)[0]

        for item in dir(params.output):
            value = getattr(params.output, item)
            try:
                if "%s" in value:
                    setattr(params.output, item, value % filebase)
            except Exception:
                pass

        self.params = params
        self.options = options

        # load the image
        img = dxtbx.load(params.input.single_img)
        imgset = MemImageSet([img])
        datablock = DataBlockFactory.from_imageset(imgset)[0]

        # Cannot export MemImageSets
        # if self.params.output.datablock_filename:
        # from dxtbx.datablock import DataBlockDumper
        # dump = DataBlockDumper(datablock)
        # dump.as_json(self.params.output.datablock_filename)

        observed = self.find_spots(datablock)
        experiments, indexed = self.index(datablock, observed)
        experiments = self.refine(experiments, indexed)
        integrated = self.integrate(experiments, indexed)
Ejemplo n.º 14
0
  def run(self):

    from os.path import join, exists
    from libtbx import easy_run
    import os

    input_filename = join(self.path, "datablock.json")
    output_filename = "output_datablock.json"
    mask_filename = join(self.path, "lookup_mask.pickle")

    easy_run.fully_buffered(
      ['dials.apply_mask',
       'input.datablock=%s' % input_filename,
       'input.mask=%s' % mask_filename,
       'output.datablock=%s' % output_filename]).raise_if_errors()

    from dxtbx.datablock import DataBlockFactory
    datablocks = DataBlockFactory.from_json_file(output_filename)

    assert len(datablocks) == 1
    imagesets = datablocks[0].extract_imagesets()
    assert len(imagesets) == 1
    imageset = imagesets[0]
    assert imageset.external_lookup.mask.filename == mask_filename

    print 'OK'
Ejemplo n.º 15
0
 def from_filenames(
     filenames,
     verbose=False,
     unhandled=None,
     compare_beam=None,
     compare_detector=None,
     compare_goniometer=None,
     scan_tolerance=None,
     format_kwargs=None,
     load_models=True,
 ):
     """Create a list of data blocks from a list of directory or file names."""
     experiments = ExperimentList()
     for db in DataBlockFactory.from_filenames(
         filenames,
         verbose=verbose,
         unhandled=unhandled,
         compare_beam=compare_beam,
         compare_detector=compare_detector,
         compare_goniometer=compare_goniometer,
         scan_tolerance=scan_tolerance,
         format_kwargs=format_kwargs,
     ):
         experiments.extend(
             ExperimentListFactory.from_datablock_and_crystal(db, None, load_models)
         )
     return experiments
Ejemplo n.º 16
0
def test_experimentlist_factory_from_datablock():
    filenames = ["filename_%01d.cbf" % (i + 1) for i in range(0, 2)]

    imageset = Format.get_imageset(
        filenames,
        beam=Beam(),
        detector=Detector(),
        goniometer=Goniometer(),
        scan=Scan((1, 2), (0, 1)),
        as_sequence=True,
    )

    crystal = Crystal((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol="P1")

    datablock = DataBlockFactory.from_imageset(imageset)
    assert datablock[0].format_class()

    experiments = ExperimentListFactory.from_datablock_and_crystal(
        datablock, crystal)

    assert len(experiments) == 1
    assert experiments[0].imageset
    assert experiments[0].beam
    assert experiments[0].detector is not None
    assert experiments[0].goniometer
    assert experiments[0].scan
    assert experiments[0].crystal
Ejemplo n.º 17
0
def run(args=None):
    args = args or sys.argv[1:]
    user_phil = []
    files = []
    for arg in args:
        if os.path.isfile(arg):
            files.append(arg)
        else:
            try:
                user_phil.append(parse(arg))
            except Exception:
                raise Sorry("Unrecognized argument %s" % arg)
    params = phil_scope.fetch(sources=user_phil).extract()

    fig = plt.figure()
    colormap = plt.cm.gist_ncar
    colors = [colormap(i) for i in np.linspace(0, 0.9, len(files))]
    for file_name, color in zip(files, colors):

        # read the data and get the detector models
        try:
            datablocks = DataBlockFactory.from_json_file(file_name,
                                                         check_format=False)
            detectors = sum((db.unique_detectors() for db in datablocks), [])
        except Exception:
            try:
                experiments = ExperimentListFactory.from_json_file(
                    file_name, check_format=False)
            except ValueError:
                experiments = ExperimentListFactory.from_filenames([file_name])
            detectors = experiments.detectors()
        if not params.plot_all_detectors:
            detectors = detectors[0:1]
        for detector in detectors:
            # plot the hierarchy
            if params.orthographic:
                ax = fig.gca()
            else:
                ax = fig.gca(projection="3d")
            plot_group(
                detector.hierarchy(),
                color,
                ax,
                orthographic=params.orthographic,
                show_origin_vectors=params.show_origin_vectors,
                panel_numbers=params.panel_numbers,
            )

    plt.xlabel("x")
    plt.ylabel("y")
    if params.orthographic:
        plt.axes().set_aspect("equal", "datalim")

    if params.pdf_file:
        pp = PdfPages(params.pdf_file)
        for i in plt.get_fignums():
            pp.savefig(plt.figure(i))
        pp.close()
    else:
        plt.show()
Ejemplo n.º 18
0
 def run(self, idx, img):
     if os.path.isfile(self.term_file):
         raise IOTATermination('IOTA_TRACKER: Termination signal received!')
     else:
         datablock = DataBlockFactory.from_filenames([img])[0]
         observed = self.processor.find_spots(datablock=datablock)
         return [idx, int(len(observed)), img, None, None]
Ejemplo n.º 19
0
  def tst_with_external_lookup(self):
    from dxtbx.datablock import DataBlockFactory
    from dxtbx.imageset import ImageSweep
    from os.path import join

    filename = join(self.dials_regression, "centroid_test_data",
                    "datablock_with_lookup.json")
    blocks = DataBlockFactory.from_json_file(filename)
    assert(len(blocks) == 1)
    imageset = blocks[0].extract_imagesets()[0]
    assert imageset.external_lookup.mask.data is not None
    assert imageset.external_lookup.gain.data is not None
    assert imageset.external_lookup.pedestal.data is not None
    assert imageset.external_lookup.mask.filename is not None
    assert imageset.external_lookup.gain.filename is not None
    assert imageset.external_lookup.pedestal.filename is not None
    assert imageset.external_lookup.mask.data.all_eq(True)
    assert imageset.external_lookup.gain.data.all_eq(1)
    assert imageset.external_lookup.pedestal.data.all_eq(0)

    blocks = self.encode_json_then_decode(blocks)
    assert(len(blocks) == 1)
    imageset = blocks[0].extract_imagesets()[0]
    assert imageset.external_lookup.mask.data is not None
    assert imageset.external_lookup.gain.data is not None
    assert imageset.external_lookup.pedestal.data is not None
    assert imageset.external_lookup.mask.filename is not None
    assert imageset.external_lookup.gain.filename is not None
    assert imageset.external_lookup.pedestal.filename is not None
    assert imageset.external_lookup.mask.data.all_eq(True)
    assert imageset.external_lookup.gain.data.all_eq(1)
    assert imageset.external_lookup.pedestal.data.all_eq(0)

    print 'OK'
def test_experimentlist_factory_from_datablock():
  from dxtbx.model import Beam, Detector, Goniometer, Scan
  from dxtbx.datablock import DataBlockFactory
  from dxtbx.model import Crystal
  from dxtbx.format.Format import Format

  filenames = ["filename_%01d.cbf" % (i+1) for i in range(0, 2)]

  imageset = Format.get_imageset(
    filenames,
    beam = Beam(),
    detector = Detector(),
    goniometer = Goniometer(),
    scan = Scan((1,2), (0,1)),
    as_sweep=True)

  crystal = Crystal((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol="P1")

  datablock = DataBlockFactory.from_imageset(imageset)

  experiments = ExperimentListFactory.from_datablock_and_crystal(
    datablock, crystal)

  assert len(experiments) == 1
  assert experiments[0].imageset is not None
  assert experiments[0].beam is not None
  assert experiments[0].detector is not None
  assert experiments[0].goniometer is not None
  assert experiments[0].scan is not None
  assert experiments[0].crystal is not None
Ejemplo n.º 21
0
  def load_reference_geometry(self):
    if self.params.input.reference_geometry is None: return

    try:
      ref_datablocks = DataBlockFactory.from_json_file(self.params.input.reference_geometry, check_format=False)
    except Exception:
      ref_datablocks = None
    if ref_datablocks is None:
      from dxtbx.model.experiment_list import ExperimentListFactory
      try:
        ref_experiments = ExperimentListFactory.from_json_file(self.params.input.reference_geometry, check_format=False)
      except Exception:
        try:
          import dxtbx
          img = dxtbx.load(self.params.input.reference_geometry)
        except Exception:
          raise Sorry("Couldn't load geometry file %s"%self.params.input.reference_geometry)
        else:
          self.reference_detector = img.get_detector()
      else:
        assert len(ref_experiments.detectors()) == 1
        self.reference_detector = ref_experiments.detectors()[0]
    else:
      assert len(ref_datablocks) == 1 and len(ref_datablocks[0].unique_detectors()) == 1
      self.reference_detector = ref_datablocks[0].unique_detectors()[0]
  def tst_from_datablock(self):
    from dxtbx.imageset import ImageSweep, NullReader, SweepFileList
    from dxtbx.model import Beam, Detector, Goniometer, Scan
    from dxtbx.datablock import DataBlockFactory
    from dxtbx.model.crystal import crystal_model

    imageset = ImageSweep(NullReader(SweepFileList("filename%01d.cbf", (0, 2))))
    imageset.set_beam(Beam())
    imageset.set_detector(Detector())
    imageset.set_goniometer(Goniometer())
    imageset.set_scan(Scan((1, 2), (0, 1)))

    crystal = crystal_model((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol=0)

    datablock = DataBlockFactory.from_imageset(imageset)

    experiments = ExperimentListFactory.from_datablock_and_crystal(
      datablock, crystal)

    assert(len(experiments) == 1)
    assert(experiments[0].imageset is not None)
    assert(experiments[0].beam is not None)
    assert(experiments[0].detector is not None)
    assert(experiments[0].goniometer is not None)
    assert(experiments[0].scan is not None)
    assert(experiments[0].crystal is not None)

    print 'OK'
    pass
Ejemplo n.º 23
0
    def run(self):

        from os.path import join
        from libtbx import easy_run

        input_filename = join(self.path, "datablock.json")
        output_filename = "output_datablock.json"
        mask_filename = join(self.path, "lookup_mask.pickle")

        easy_run.fully_buffered([
            'dials.apply_mask',
            'input.datablock=%s' % input_filename,
            'input.mask=%s' % mask_filename,
            'output.datablock=%s' % output_filename
        ]).raise_if_errors()

        from dxtbx.datablock import DataBlockFactory
        datablocks = DataBlockFactory.from_json_file(output_filename)

        assert len(datablocks) == 1
        imagesets = datablocks[0].extract_imagesets()
        assert len(imagesets) == 1
        imageset = imagesets[0]
        assert imageset.external_lookup.mask.filename == mask_filename

        print 'OK'
Ejemplo n.º 24
0
    def __init__(self):
        super(BigWidget, self).__init__()
        my_box = QVBoxLayout()
        top_box = QHBoxLayout()
        left_top_box = QVBoxLayout()
        right_top_box = QVBoxLayout()

        self.my_painter = ImgPainter()

        #json_file_path = "/home/luiso/dui/dui_test/only_9_img/dui_idials_tst_04/dials-1/1_import/datablock.json"
        json_file_path = "/home/luiso/dui/dui_test/X4_wide/test_02/dials-1/1_import/datablock.json"
        #json_file_path = "/home/lui/dui/dui_test/X4_wide/tst01/datablock.json"

        datablocks = DataBlockFactory.from_json_file(json_file_path)
        db = datablocks[0]
        self.my_sweep = db.extract_sweeps()[0]

        print "self.my_sweep.get_array_range() =", self.my_sweep.get_array_range()
        print "self.my_sweep.get_image_size() =", self.my_sweep.get_image_size()

        n_of_imgs = self.my_sweep.get_array_range()[1]
        print "n_of_imgs =", n_of_imgs

        self.palette_lst = ["hot ascend", "hot descend", "black2white", "white2black"]
        self.palette = self.palette_lst[0]
        self.img_num = 0
        self.set_img()


        img_select = QComboBox()

        for num in xrange(n_of_imgs):
            labl = "image number:" + str(num)
            img_select.addItem(labl)

        img_select.setCurrentIndex(0)
        img_select.currentIndexChanged.connect(self.img_changed_by_user)

        palette_select = QComboBox()

        for plt in self.palette_lst:
            palette_select.addItem(plt)

        palette_select.currentIndexChanged.connect(self.palette_changed_by_user)

        left_top_box.addWidget(palette_select)
        top_box.addLayout(left_top_box)

        right_top_box.addWidget(img_select)
        top_box.addLayout(right_top_box)

        my_box.addLayout(top_box)

        my_scrollable =QScrollArea()
        my_scrollable.setWidget(self.my_painter)

        my_box.addWidget(my_scrollable)

        self.setLayout(my_box)
        self.show()
def test_VMXi_rotation_scan():
    master_h5 = "/dls/mx/data/mx21314/mx21314-27/VMXi-AB0816/well_7/images/image_14364_master.h5"
    assert FormatNexus.understand(master_h5)

    datablocks = DataBlockFactory.from_filenames([master_h5])
    imageset = datablocks[0].extract_imagesets()[0]
    assert imageset.get_format_class() == FormatNexus

    detector = imageset.get_detector()
    gonio = imageset.get_goniometer()
    scan = imageset.get_scan()
    beam = imageset.get_beam()

    panel = detector[0]
    assert panel.get_pixel_size() == (0.075, 0.075)
    assert panel.get_image_size() == (2068, 2162)
    assert panel.get_trusted_range() == (-1, 4096)
    assert panel.get_fast_axis() == (1, 0, 0)
    assert panel.get_slow_axis() == (0, -1, 0)
    assert panel.get_origin() == pytest.approx(
        (-78.05999999999999, 87.03, -194.5039999999999)
    )
    assert panel.get_distance() == pytest.approx(194.504)

    assert isinstance(gonio, Goniometer)
    assert gonio.get_rotation_axis() == (0, 1, 0)
    assert gonio.get_fixed_rotation() == (1, 0, 0, 0, 1, 0, 0, 0, 1)
    assert gonio.get_setting_rotation() == (1, 0, 0, 0, 1, 0, 0, 0, 1)

    assert scan.get_oscillation() == pytest.approx((-30, 0.1))
    assert scan.get_image_range() == (1, 600)

    assert beam.get_wavelength() == pytest.approx(0.979492)
    assert beam.get_s0() == pytest.approx((0, 0, -1 / beam.get_wavelength()))
Ejemplo n.º 26
0
    def to_datablocks(self):
        ''' Return the experiment list as a datablock list.
    This assumes that the experiment contains 1 datablock.'''
        from dxtbx.datablock import DataBlockFactory

        # Convert the experiment list to dict
        obj = self.to_dict()

        # Convert the dictionary to a datablock dictionary
        obj['__id__'] = 'DataBlock'
        for e in obj['experiment']:
            iid = e['imageset']
            imageset = obj['imageset'][iid]
            if 'beam' in e:
                imageset['beam'] = e['beam']
            if 'detector' in e:
                imageset['detector'] = e['detector']
            if 'goniometer' in e:
                imageset['goniometer'] = e['goniometer']
            if 'scan' in e:
                imageset['scan'] = e['scan']

        # Remove the experiments
        del obj['experiment']

        # Create the datablock
        return DataBlockFactory.from_dict([obj])
Ejemplo n.º 27
0
def test_rotation_scan(master_h5):
    assert FormatNexusEigerDLS16MI04.understand(master_h5)

    datablocks = DataBlockFactory.from_filenames(
        [master_h5], format_kwargs={"dynamic_shadowing": True})
    imageset = datablocks[0].extract_imagesets()[0]
    assert imageset.get_format_class() == FormatNexusEigerDLS16MI04

    detector = imageset.get_detector()
    gonio = imageset.get_goniometer()
    scan = imageset.get_scan()
    beam = imageset.get_beam()

    panel = detector[0]
    assert panel.get_pixel_size() == (0.075, 0.075)
    assert panel.get_image_size() == (4148, 4362)
    assert panel.get_trusted_range() == (-1, 65535)
    assert panel.get_fast_axis() == (1, 0, 0)
    assert panel.get_slow_axis() == (0, -1, 0)
    assert panel.get_origin() == pytest.approx(
        (-166.07661632390744, 172.5371934106162, -200.0))
    assert panel.get_distance() == 200

    assert len(gonio.get_axes()) == 3
    expected_axes = ((1, 0, 0), (0, 0, -1), (1, 0, 0))
    for a1, a2 in zip(gonio.get_axes(), expected_axes):
        assert a1 == pytest.approx(a2, abs=5e-2)
    assert gonio.get_scan_axis() == 2

    assert scan.get_oscillation() == (0, 0.2)
    assert scan.get_image_range() == (1, 900)

    assert beam.get_wavelength() == pytest.approx(0.979499)
    assert beam.get_s0() == pytest.approx((0, 0, -1 / beam.get_wavelength()))
Ejemplo n.º 28
0
def test_grid_scan():
    master_h5 = "/dls/i04/data/2019/cm23004-1/20190109/Eiger/grid/Thaum/Thau_5/Thau_5_1_master.h5"
    assert FormatNexusEigerDLS16MI04.understand(master_h5)

    datablocks = DataBlockFactory.from_filenames([master_h5])
    imageset = datablocks[0].extract_imagesets()[0]
    assert imageset.get_format_class() == FormatNexusEigerDLS16MI04

    detector = imageset.get_detector()
    gonio = imageset.get_goniometer()
    scan = imageset.get_scan()
    beam = imageset.get_beam()

    panel = detector[0]
    assert panel.get_pixel_size() == (0.075, 0.075)
    assert panel.get_image_size() == (4148, 4362)
    assert panel.get_trusted_range() == (-1, 65535)
    assert panel.get_fast_axis() == (1, 0, 0)
    assert panel.get_slow_axis() == (0, -1, 0)
    assert panel.get_origin() == pytest.approx(
        (-167.44717577120824, 172.46833023184868, -350.0))
    assert panel.get_distance() == 350

    assert len(gonio.get_axes()) == 3
    expected_axes = ((1, 0, 0), (0, 0, -1), (1, 0, 0))
    for a1, a2 in zip(gonio.get_axes(), expected_axes):
        assert a1 == pytest.approx(a2, abs=5e-2)
    # assert gonio.get_scan_axis() == 2

    assert scan is None

    assert beam.get_wavelength() == pytest.approx(0.979499)
    assert beam.get_s0() == pytest.approx((0, 0, -1 / beam.get_wavelength()))
Ejemplo n.º 29
0
    def to_datablocks(self):
        """Return the experiment list as a datablock list.
        This assumes that the experiment contains 1 datablock."""
        from dxtbx.datablock import DataBlockFactory

        # Convert the experiment list to dict
        obj = self.to_dict()

        # Convert the dictionary to a datablock dictionary
        obj["__id__"] = "DataBlock"
        for e in obj["experiment"]:
            iid = e["imageset"]
            imageset = obj["imageset"][iid]
            if "beam" in e:
                imageset["beam"] = e["beam"]
            if "detector" in e:
                imageset["detector"] = e["detector"]
            if "goniometer" in e:
                imageset["goniometer"] = e["goniometer"]
            if "scan" in e:
                imageset["scan"] = e["scan"]

        # Remove the experiments
        del obj["experiment"]

        # Create the datablock
        return DataBlockFactory.from_dict([obj])
Ejemplo n.º 30
0
    def ini_datablock(self, json_file_path):
        if(json_file_path != None):
            try:
                datablocks = DataBlockFactory.from_json_file(json_file_path)
                ##TODO check length of datablock for safety
                datablock = datablocks[0]
                self.my_sweep = datablock.extract_sweeps()[0]
                self.img_select.clear()

            except:
                print "Failed to load images from  datablock.json"

            try:
                print "self.my_sweep.get_array_range() =", self.my_sweep.get_array_range()
                n_of_imgs = len(self.my_sweep.indices())
                print "n_of_imgs =", n_of_imgs

                self.img_select.setMaximum(n_of_imgs)
                self.img_select.setMinimum(1)

                self.img_step.setMaximum(n_of_imgs / 2)
                self.img_step.setMinimum(1)

                self.num_of_imgs_to_add.setMaximum(n_of_imgs)
                self.num_of_imgs_to_add.setMinimum(1)

            except:
                print "Failed to set up IMG control dialog"

        self.btn_first_clicked()
        self.ini_contrast()
        self.set_img()
        QTimer.singleShot(1000, self.scale2border)
Ejemplo n.º 31
0
  def tst_from_datablock(self):
    from dxtbx.imageset import ImageSweep, NullReader, SweepFileList
    from dxtbx.model import Beam, Detector, Goniometer, Scan
    from dxtbx.datablock import DataBlockFactory
    from dxtbx.model.crystal import crystal_model

    imageset = ImageSweep(NullReader(SweepFileList("filename%01d.cbf", (0, 2))))
    imageset.set_beam(Beam())
    imageset.set_detector(Detector())
    imageset.set_goniometer(Goniometer())
    imageset.set_scan(Scan((1, 2), (0, 1)))

    crystal = crystal_model((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol=0)

    datablock = DataBlockFactory.from_imageset(imageset)

    experiments = ExperimentListFactory.from_datablock_and_crystal(
      datablock, crystal)

    assert(len(experiments) == 1)
    assert(experiments[0].imageset is not None)
    assert(experiments[0].beam is not None)
    assert(experiments[0].detector is not None)
    assert(experiments[0].goniometer is not None)
    assert(experiments[0].scan is not None)
    assert(experiments[0].crystal is not None)

    print 'OK'
Ejemplo n.º 32
0
  def tst_with_external_lookup(self):
    filename = join(self.dials_regression, "centroid_test_data",
                    "datablock_with_lookup.json")
    blocks = DataBlockFactory.from_json_file(filename)
    assert(len(blocks) == 1)
    imageset = blocks[0].extract_imagesets()[0]
    assert imageset.external_lookup.mask.data is not None
    assert imageset.external_lookup.gain.data is not None
    assert imageset.external_lookup.pedestal.data is not None
    assert imageset.external_lookup.mask.filename is not None
    assert imageset.external_lookup.gain.filename is not None
    assert imageset.external_lookup.pedestal.filename is not None
    assert imageset.external_lookup.mask.data.all_eq(True)
    assert imageset.external_lookup.gain.data.all_eq(1)
    assert imageset.external_lookup.pedestal.data.all_eq(0)

    blocks = self.encode_json_then_decode(blocks)
    assert(len(blocks) == 1)
    imageset = blocks[0].extract_imagesets()[0]
    assert imageset.external_lookup.mask.data is not None
    assert imageset.external_lookup.gain.data is not None
    assert imageset.external_lookup.pedestal.data is not None
    assert imageset.external_lookup.mask.filename is not None
    assert imageset.external_lookup.gain.filename is not None
    assert imageset.external_lookup.pedestal.filename is not None
    assert imageset.external_lookup.mask.data.all_eq(True)
    assert imageset.external_lookup.gain.data.all_eq(1)
    assert imageset.external_lookup.pedestal.data.all_eq(0)

    print 'OK'
Ejemplo n.º 33
0
def test_spring8_ccp4_2018_zenodo_1443110_data03():
    # https://zenodo.org/record/1443110#.XD8bD5ynzmE
    master_h5 = "/dls/mx-scratch/rjgildea/zenodo/spring8-ccp4-2018/1443110/ccp4school2018_bl41xu/05/data03/data03_master.h5"
    assert FormatHDF5EigerNearlyNexusSPring8.understand(master_h5)

    datablocks = DataBlockFactory.from_filenames([master_h5])
    imageset = datablocks[0].extract_imagesets()[0]
    assert imageset.get_format_class() == FormatHDF5EigerNearlyNexusSPring8

    detector = imageset.get_detector()
    gonio = imageset.get_goniometer()
    scan = imageset.get_scan()
    beam = imageset.get_beam()

    panel = detector[0]
    assert panel.get_pixel_size() == pytest.approx((0.075, 0.075))
    assert panel.get_image_size() == (4150, 4371)
    assert panel.get_trusted_range() == (-1, 2.094707e06)
    assert panel.get_fast_axis() == (1, 0, 0)
    assert panel.get_slow_axis() == (0, -1, 0)
    assert panel.get_origin() == pytest.approx((-151.939, 169.629, -180),
                                               abs=1e-3)
    assert panel.get_distance() == pytest.approx(180)

    assert isinstance(gonio, Goniometer)
    assert gonio.get_rotation_axis() == (-1, 0, 0)
    assert gonio.get_fixed_rotation() == (1, 0, 0, 0, 1, 0, 0, 0, 1)
    assert gonio.get_setting_rotation() == (1, 0, 0, 0, 1, 0, 0, 0, 1)

    assert scan.get_oscillation() == pytest.approx((-10, 1))
    assert scan.get_image_range() == (1, 180)

    assert beam.get_wavelength() == pytest.approx(1.28241, abs=1e-5)
    assert beam.get_s0() == pytest.approx((0, 0, -1 / beam.get_wavelength()))
Ejemplo n.º 34
0
def test_with_external_lookup(centroid_test_data):
    filename = os.path.join(centroid_test_data, "datablock_with_lookup.json")
    blocks = DataBlockFactory.from_json_file(filename)
    assert len(blocks) == 1
    imageset = blocks[0].extract_imagesets()[0]
    assert not imageset.external_lookup.mask.data.empty()
    assert not imageset.external_lookup.gain.data.empty()
    assert not imageset.external_lookup.pedestal.data.empty()
    assert imageset.external_lookup.mask.filename is not None
    assert imageset.external_lookup.gain.filename is not None
    assert imageset.external_lookup.pedestal.filename is not None
    assert imageset.external_lookup.mask.data.tile(0).data().all_eq(True)
    assert imageset.external_lookup.gain.data.tile(0).data().all_eq(1)
    assert imageset.external_lookup.pedestal.data.tile(0).data().all_eq(0)

    blocks = encode_json_then_decode(blocks)
    assert len(blocks) == 1
    imageset = blocks[0].extract_imagesets()[0]
    assert not imageset.external_lookup.mask.data.empty()
    assert not imageset.external_lookup.gain.data.empty()
    assert not imageset.external_lookup.pedestal.data.empty()
    assert imageset.external_lookup.mask.filename is not None
    assert imageset.external_lookup.gain.filename is not None
    assert imageset.external_lookup.pedestal.filename is not None
    assert imageset.external_lookup.mask.data.tile(0).data().all_eq(True)
    assert imageset.external_lookup.gain.data.tile(0).data().all_eq(1)
    assert imageset.external_lookup.pedestal.data.tile(0).data().all_eq(0)
Ejemplo n.º 35
0
def test_units():
    master_h5 = "/dls/i04/data/2019/cm23004-1/20190114/Eiger/grid/Se_Thaum/Se_Thaum_12/Se_Thaum_12_2_master.h5"
    assert FormatNexusEigerDLS16MI04.understand(master_h5)

    datablocks = DataBlockFactory.from_filenames([master_h5])
    imageset = datablocks[0].extract_imagesets()[0]
    assert imageset.get_format_class() == FormatNexusEigerDLS16MI04

    detector = imageset.get_detector()
    gonio = imageset.get_goniometer()
    scan = imageset.get_scan()
    beam = imageset.get_beam()

    panel = detector[0]
    assert panel.get_pixel_size() == (0.075, 0.075)
    assert panel.get_fast_axis() == (1, 0, 0)
    assert panel.get_slow_axis() == (0, -1, 0)
    # XXX Need to check and update expected values here, however the ones
    # dxtbx is currently generating from the file are very wrong
    assert panel.get_origin() == pytest.approx(
        (-167.35570274412459, 172.4729262553403, -339.9887931971389))
    assert panel.get_distance() == pytest.approx(339.9887931971389)

    assert scan is None

    assert beam.get_wavelength() == pytest.approx(0.979499)
    assert beam.get_s0() == pytest.approx((0, 0, -1 / beam.get_wavelength()))
    assert panel.get_beam_centre_px(beam.get_s0()) == pytest.approx(
        (2231.41, 2299.64))
Ejemplo n.º 36
0
def datablock_from_numpyarrays(image, detector, beam, mask=None):
    """
    So that one can do e.g.
    >> dblock = datablock_from_numpyarrays( image, detector, beam)
    >> refl = flex.reflection_table.from_observations(dblock, spot_finder_params)
    without having to utilize the harddisk

    :param image:  numpy array image, or list of numpy arrays
    :param mask:  numpy mask, should be same shape format as numpy array
    :param detector: dxtbx detector model
    :param beam: dxtbx beam model
    :return: datablock for the image
    """
    if isinstance(image, list):
        image = np.array(image)
    if mask is not None:
        if isinstance(mask, list):
            mask = np.array(mask).astype(bool)
    I = FormatInMemory(image=image, mask=mask)
    reader = MemReader([I])
    #masker = MemMasker([I])
    iset_Data = ImageSetData(reader, None)  #, masker)
    iset = ImageSet(iset_Data)
    iset.set_beam(beam)
    iset.set_detector(detector)
    dblock = DataBlockFactory.from_imageset([iset])[0]
    return dblock
Ejemplo n.º 37
0
 def do_import(filename):
   info("Loading %s"%os.path.basename(filename))
   datablocks = DataBlockFactory.from_filenames([filename])
   if len(datablocks) == 0:
     raise Abort("Could not load %s"%filename)
   if len(datablocks) > 1:
     raise Abort("Got multiple datablocks from file %s"%filename)
   return datablocks[0]
Ejemplo n.º 38
0
  def __init__(self,
               source_image=None,
               object_folder=None,
               gain = 0.32,
               params=None):
    '''Initialise the script.'''
    from dials.util.options import OptionParser
    from dxtbx.datablock import DataBlockFactory
    from dials.array_family import flex

    from iotbx.phil import parse
    from xfel.command_line.xfel_process import phil_scope

    phil_scope = parse('''
      include scope xfel.command_line.xtc_process.phil_scope
    ''', process_includes=True)

    sub_phil_scope = parse('''
      output {
        cxi_merge_picklefile = None
          .type = str
          .help = Output integration results for each color data to separate cctbx.xfel-style pickle files
      }
      indexing {
        stills {
          ewald_proximity_resolution_cutoff = 2.0
            .type = float
            .help = For calculating the area under the green curve, or the acceptable
            .help = volume of reciprocal space for spot prediction, use this high-resolution cutoff
        }
      }
      cxi_merge {
        include scope xfel.command_line.cxi_merge.master_phil
      }
    ''', process_includes=True)

    phil_scope.adopt_scope(sub_phil_scope)

    # Create the parser
    self.parser = OptionParser(
      phil=phil_scope,
      read_datablocks=True,
      read_datablocks_from_images=True)

    self.params = params
    self.img = [source_image]
    self.obj_base = object_folder
    self.phil = phil_scope.extract()
    with misc.Capturing() as junk_output:
      self.datablock = DataBlockFactory.from_filenames(self.img)[0]

    self.obj_filename = "int_{}".format(os.path.basename(self.img[0]))
    self.phil.output.cxi_merge_picklefile = os.path.join(self.obj_base, self.img[0])
Ejemplo n.º 39
0
Archivo: phil.py Proyecto: dials/dials
 def from_string(self, s):
   from dxtbx.datablock import DataBlockFactory
   from os.path import exists
   from libtbx.utils import Sorry
   if s is None:
     return None
   if s not in self.cache:
     if not exists(s):
       raise Sorry('File %s does not exist' % s)
     self.cache[s] = FilenameDataWrapper(s,
       DataBlockFactory.from_json_file(s,
         check_format=self._check_format))
   return self.cache[s]
Ejemplo n.º 40
0
def datablock(filename, check_format=True):
  ''' Load a given JSON or pickle file.

  Params:
    filename The input filename

  Returns:
    The datablock

  '''
  from dxtbx.datablock import DataBlockFactory
  return DataBlockFactory.from_serialized_format(
    filename, check_format=check_format)
Ejemplo n.º 41
0
def work(filename, cl=[]):
  from dials.command_line.find_spots import phil_scope as params
  from dxtbx.datablock import DataBlockFactory
  from dials.array_family import flex
  interp = params.command_line_argument_interpreter()
  for cla in cl:
    params = params.fetch(interp.process(cla))
  datablock = DataBlockFactory.from_filenames([filename])[0]
  reflections = flex.reflection_table.from_observations(
    datablock, params.extract())
  detector = datablock.unique_detectors()[0]
  beam = datablock.unique_beams()[0]
  return analyse(reflections, detector, beam)
Ejemplo n.º 42
0
def do_import(filename):
  logger.info("Loading %s"%os.path.basename(filename))
  try:
    datablocks = DataBlockFactory.from_json_file(filename)
  except ValueError:
    datablocks = DataBlockFactory.from_filenames([filename])
  if len(datablocks) == 0:
    raise Abort("Could not load %s"%filename)
  if len(datablocks) > 1:
    raise Abort("Got multiple datablocks from file %s"%filename)

  # Ensure the indexer and downstream applications treat this as set of stills
  from dxtbx.imageset import ImageSet
  reset_sets = []

  for imageset in datablocks[0].extract_imagesets():
    imageset = ImageSet(imageset.reader(), imageset.indices())
    imageset._models = imageset._models
    imageset.set_scan(None)
    imageset.set_goniometer(None)
    reset_sets.append(imageset)

  return DataBlockFactory.from_imageset(reset_sets)[0]
Ejemplo n.º 43
0
  def tst_pickling(self):

    from dxtbx.datablock import DataBlockFactory

    filenames = self.multiple_block_filenames()
    blocks1 = DataBlockFactory.from_filenames(filenames)
    blocks2 = self.pickle_then_unpickle(blocks1)
    assert(len(blocks2) == len(blocks1))
    for b1, b2 in zip(blocks1, blocks2):
      assert(b1.format_class() == b2.format_class())
      assert(b1 == b2)
    assert(blocks1 == blocks2)

    print 'OK'
Ejemplo n.º 44
0
  def tst_create_multiple_sweeps(self):

    from dxtbx.datablock import DataBlockFactory

    filenames = self.multiple_sweep_filenames()
    blocks = DataBlockFactory.from_filenames(filenames)
    assert(len(blocks) == 1)
    assert(blocks[0].num_images() == 6)
    imageset = blocks[0].extract_imagesets()
    assert(len(imageset) == 2)
    sweeps = blocks[0].extract_sweeps()
    assert(len(sweeps) == 2)
    assert(len(sweeps[0]) == 3)
    assert(len(sweeps[1]) == 3)
    print 'OK'
Ejemplo n.º 45
0
  def OnChooseDirectory (self, event) :
    dir_name = self.dir_ctrl.GetPhilValue()
    if (dir_name is not None) :
      from dxtbx.datablock import DataBlockFactory
      datablocks = DataBlockFactory.from_filenames([dir_name])
      imagesets = datablocks[0].extract_imagesets()

      self._imagesets = imagesets

      #from iotbx.detectors import identify_dataset
      #self._datasets = identify_dataset(dir_name)
      #choices = [ d.format() for d in self._datasets ]
      choices = [imgset.get_template() for imgset in self._imagesets]
      self.stack_ctrl.SetItems(choices)
      for i in range(len(choices)):
        self.stack_ctrl.SetSelection(i)
Ejemplo n.º 46
0
  def __init__(self,
               source_image,
               object_folder,
               final_folder,
               final_filename,
               final,
               logfile,
               gain = 0.32,
               params=None):
    '''Initialise the script.'''
    from dxtbx.datablock import DataBlockFactory

    self.params = params

    # Read settings from the DIALS target (.phil) file
    # If none is provided, use default settings (and may God have mercy)
    if self.params.dials.target != None:
      with open(self.params.dials.target, 'r') as settings_file:
        settings_file_contents = settings_file.read()
      settings = parse(settings_file_contents)
      current_phil = phil_scope.fetch(sources=[settings])
      self.phil = current_phil.extract()
    else:
      self.phil = phil_scope.extract()

   # Set general file-handling settings
    file_basename = os.path.basename(source_image).split('.')[0]
    self.phil.output.datablock_filename = "{}/{}.json".format(object_folder, file_basename)
    self.phil.output.indexed_filename = "{}/{}_indexed.pickle".format(object_folder, file_basename)
    self.phil.output.strong_filename = "{}/{}_strong.pickle".format(object_folder, file_basename)
    self.phil.output.refined_experiments_filename = "{}/{}_refined_experiments.json".format(object_folder, file_basename)
    self.phil.output.integrated_filename = "{}/{}_integrated.pickle".format(object_folder, file_basename)
    self.phil.output.profile_filename = "{}/{}_profile.phil".format(object_folder, file_basename)
    self.phil.output.integration_pickle = final_filename
    self.int_log = logfile #"{}/int_{}.log".format(final_folder, file_basename)

    self.img = [source_image]
    self.obj_base = object_folder
    self.gain = gain
    self.fail = None
    self.frame = None
    self.final = final
    self.final['final'] = final_filename
    with misc.Capturing() as junk_output:
      self.datablock = DataBlockFactory.from_filenames(self.img)[0]
    self.obj_filename = "int_{}".format(os.path.basename(self.img[0]))
Ejemplo n.º 47
0
    def ini_datablock(self, json_file_path):
        if json_file_path is not None:
            try:
                datablocks = DataBlockFactory.from_json_file(json_file_path)
                # TODO check length of datablock for safety
                datablock = datablocks[0]
                self.my_sweep = datablock.extract_sweeps()[0]
                self.img_select.clear()
            except BaseException as e:
                # We don't want to catch bare exceptions but don't know
                # what this was supposed to catch. Log it.
                logger.error(
                    "Caught unknown exception type %s: %s", type(e).__name__, e
                )
                logger.debug("Failed to load images from  datablock.json")

            try:
                logger.debug(
                    "self.my_sweep.get_array_range() = %s",
                    self.my_sweep.get_array_range(),
                )
                n_of_imgs = len(self.my_sweep.indices())
                logger.debug("n_of_imgs = %s", n_of_imgs)

                self.img_select.setMaximum(n_of_imgs)
                self.img_select.setMinimum(1)

                self.img_step.setMaximum(n_of_imgs / 2)
                self.img_step.setMinimum(1)

                self.num_of_imgs_to_add.setMaximum(n_of_imgs)
                self.num_of_imgs_to_add.setMinimum(1)

            except BaseException as e:
                # We don't want to catch bare exceptions but don't know
                # what this was supposed to catch. Log it.
                logger.error(
                    "Caught unknown exception type %s: %s", type(e).__name__, e
                )
                logger.debug("Failed to set up IMG control dialog")

        self.btn_first_clicked()
        self.ini_contrast()
        self.set_img()
        QTimer.singleShot(1000, self.scale2border)
Ejemplo n.º 48
0
  def try_read_datablocks_from_images(self,
                                      args,
                                      verbose,
                                      compare_beam,
                                      compare_detector,
                                      compare_goniometer,
                                      scan_tolerance,
                                      format_kwargs):
    '''
    Try to import images.

    :param args: The input arguments
    :param verbose: Print verbose output
    :return: Unhandled arguments

    '''
    from dxtbx.datablock import DataBlockFactory
    from dials.phil import FilenameDataWrapper, DataBlockConverters
    from glob import glob

    # If filenames contain wildcards, expand
    args_new = []
    for arg in args:
      if "*" in arg:
        args_new.extend(glob(arg))
      else:
        args_new.append(arg)
    args = args_new

    unhandled = []
    datablocks = DataBlockFactory.from_filenames(
      args,
      verbose=verbose,
      unhandled=unhandled,
      compare_beam=compare_beam,
      compare_detector=compare_detector,
      compare_goniometer=compare_goniometer,
      scan_tolerance=scan_tolerance,
      format_kwargs=format_kwargs)
    if len(datablocks) > 0:
      filename = "<image files>"
      obj = FilenameDataWrapper(filename, datablocks)
      DataBlockConverters.cache[filename] = obj
      self.datablocks.append(obj)
    return unhandled
Ejemplo n.º 49
0
  def tst_create_multiple_blocks(self):

    from dxtbx.datablock import DataBlockFactory

    filenames = self.multiple_block_filenames()
    blocks = DataBlockFactory.from_filenames(filenames, verbose=False)
    assert(len(blocks) == 22)

    # Block 1
    assert(blocks[0].num_images() == 9)
    imageset = blocks[0].extract_imagesets()
    assert(len(imageset) == 1)
    assert(len(imageset[0]) == 9)
    sweeps = blocks[0].extract_sweeps()
    assert(len(sweeps) == 1)
    assert(len(sweeps[0]) == 9)

    print 'OK'
Ejemplo n.º 50
0
  def load_reference_geometry(self):
    if self.params.input.reference_geometry is None: return

    try:
      ref_datablocks = DataBlockFactory.from_json_file(self.params.input.reference_geometry, check_format=False)
    except Exception:
      ref_datablocks = None
    if ref_datablocks is None:
      from dxtbx.model.experiment.experiment_list import ExperimentListFactory
      try:
        ref_experiments = ExperimentListFactory.from_json_file(self.params.input.reference_geometry, check_format=False)
      except Exception:
        raise Sorry("Couldn't load geometry file %s"%self.params.input.reference_geometry)
      assert len(ref_experiments.detectors()) == 1
      self.reference_detector = ref_experiments.detectors()[0]
    else:
      assert len(ref_datablocks) == 1 and len(ref_datablocks[0].unique_detectors()) == 1
      self.reference_detector = ref_datablocks[0].unique_detectors()[0]
Ejemplo n.º 51
0
  def __call__(self):
    '''
    Import the datablocks

    '''
    from dxtbx.datablock import DataBlockTemplateImporter
    from dxtbx.datablock import DataBlockFactory
    from dials.util.options import flatten_datablocks
    from libtbx.utils import Sorry

    # Get the datablocks
    datablocks = flatten_datablocks(self.params.input.datablock)

    # Check we have some filenames
    if len(datablocks) == 0:

      format_kwargs = {
        'dynamic_shadowing' : self.params.format.dynamic_shadowing
      }

      # Check if a template has been set and print help if not, otherwise try to
      # import the images based on the template input
      if len(self.params.input.template) > 0:
        importer = DataBlockTemplateImporter(
          self.params.input.template,
          max(self.params.verbosity-1, 0),
          format_kwargs=format_kwargs)
        datablocks = importer.datablocks
        if len(datablocks) == 0:
          raise Sorry('No datablocks found matching template %s' % self.params.input.template)
      elif len(self.params.input.directory) > 0:
        datablocks = DataBlockFactory.from_filenames(
          self.params.input.directory,
          max(self.params.verbosity-1, 0),
          format_kwargs=format_kwargs)
        if len(datablocks) == 0:
          raise Sorry('No datablocks found in directories %s' % self.params.input.directory)
      else:
        raise Sorry('No datablocks found')
    if len(datablocks) > 1:
      raise Sorry("More than 1 datablock found")

    # Return the datablocks
    return datablocks[0]
Ejemplo n.º 52
0
  def __init__(self, img, gain, params):
    """ Initialization and data read-in
    """
    from dxtbx.datablock import DataBlockFactory

    self.gain = gain
    self.params = params

    # Read settings from the DIALS target (.phil) file
    # If none is provided, use default settings (and may God have mercy)
    if self.params.dials.target != None:
      with open(self.params.dials.target, 'r') as settings_file:
        settings_file_contents = settings_file.read()
      settings = parse(settings_file_contents)
      current_phil = phil_scope.fetch(sources=[settings])
      self.phil = current_phil.extract()
    else:
      self.phil = phil_scope.extract()

    # Convert raw image into single-image datablock
    with misc.Capturing() as junk_output:
      self.datablock = DataBlockFactory.from_filenames([img])[0]
Ejemplo n.º 53
0
  def tst_from_null_sweep(self):
    from dxtbx.datablock import DataBlockFactory
    from dxtbx.imageset import NullReader, ImageSweep, SweepFileList
    from dxtbx.model import Beam, Detector, Goniometer, Scan

    sweep = ImageSweep(NullReader(SweepFileList("template_%2d.cbf", (0, 10))))
    sweep.set_beam(Beam((0, 0, 1)))
    sweep.set_detector(Detector())
    sweep.set_goniometer(Goniometer((1, 0, 0)))
    sweep.set_scan(Scan((1, 10), (0, 0.1)))

    # Create the datablock
    datablock = DataBlockFactory.from_imageset(sweep)
    assert(len(datablock) == 1)
    datablock = datablock[0]

    sweeps = datablock.extract_sweeps()
    assert(len(sweeps) == 1)
    assert(sweeps[0].get_beam() == sweep.get_beam())
    assert(sweeps[0].get_detector() == sweep.get_detector())
    assert(sweeps[0].get_goniometer() == sweep.get_goniometer())
    assert(sweeps[0].get_scan() == sweep.get_scan())

    print 'OK'
Ejemplo n.º 54
0
  
  filenames = []
  for arg in args:
     if "indexing.data" in arg:
       path = arg.split('=')[1]
       if os.path.isdir(path):
         for subfile in os.listdir(path):
 	  subpath = os.path.join(path, subfile)
 	  if os.path.isfile(subpath):
             filenames.append(subpath)
       else:
         filenames.append(path)

  print filenames

  datablock = DataBlockFactory.from_filenames(filenames)[0]
 
  observed = flex.reflection_table.from_observations(datablock, params)
  observed.as_pickle("strong.pickle")
  print "Number of observed reflections:", len(observed)
 
  working_params = copy.deepcopy(params)
  imagesets = datablock.extract_imagesets()

# old labelit
#  from spotfinder.applications.xfel import cxi_phil
#  horizons_phil = cxi_phil.cxi_versioned_extract(args)


  print "indexing..."
  t0 = clock()
Ejemplo n.º 55
0
def exercise_spotfinder():
    if not libtbx.env.has_module("dials_regression"):
        print "Skipping exercise_spotfinder: dials_regression not present"
        return

    data_dir = libtbx.env.find_in_repositories(relative_path="dials_regression/centroid_test_data", test=os.path.isdir)
    template = glob(os.path.join(data_dir, "centroid*.cbf"))
    args = ["dials.find_spots", " ".join(template), "output.reflections=spotfinder.pickle", "output.shoeboxes=True"]
    result = easy_run.fully_buffered(command=" ".join(args)).raise_if_errors()
    assert os.path.exists("spotfinder.pickle")
    with open("spotfinder.pickle", "rb") as f:
        reflections = pickle.load(f)
        assert len(reflections) == 653, len(reflections)
        refl = reflections[0]
        assert approx_equal(refl["intensity.sum.value"], 42)
        assert approx_equal(refl["bbox"], (1398, 1400, 513, 515, 0, 1))
        assert approx_equal(refl["xyzobs.px.value"], (1399.1190476190477, 514.2142857142857, 0.5))
        assert "shoebox" in reflections
    print "OK"

    # now with a resolution filter
    args = [
        "dials.find_spots",
        "filter.d_min=2",
        "filter.d_max=15",
        " ".join(template),
        "output.reflections=spotfinder.pickle",
        "output.shoeboxes=False",
    ]
    result = easy_run.fully_buffered(command=" ".join(args)).raise_if_errors()
    assert os.path.exists("spotfinder.pickle")
    with open("spotfinder.pickle", "rb") as f:
        reflections = pickle.load(f)
        assert len(reflections) == 467, len(reflections)
        assert "shoebox" not in reflections
    print "OK"

    # now with more generous parameters
    args = [
        "dials.find_spots",
        "min_spot_size=3",
        "max_separation=3",
        " ".join(template),
        "output.reflections=spotfinder.pickle",
    ]
    result = easy_run.fully_buffered(command=" ".join(args)).raise_if_errors()
    assert os.path.exists("spotfinder.pickle")
    with open("spotfinder.pickle", "rb") as f:
        reflections = pickle.load(f)
        assert len(reflections) == 678, len(reflections)
    print "OK"

    # Now with a user defined mask
    template = glob(os.path.join(data_dir, "centroid*.cbf"))
    args = [
        "dials.find_spots",
        " ".join(template),
        "output.reflections=spotfinder.pickle",
        "output.shoeboxes=True",
        "lookup.mask=%s" % os.path.join(data_dir, "mask.pickle"),
    ]
    result = easy_run.fully_buffered(command=" ".join(args)).raise_if_errors()
    assert os.path.exists("spotfinder.pickle")
    with open("spotfinder.pickle", "rb") as f:
        reflections = pickle.load(f)
        from dxtbx.datablock import DataBlockFactory

        datablocks = DataBlockFactory.from_json_file(os.path.join(data_dir, "datablock.json"))
        assert len(datablocks) == 1
        imageset = datablocks[0].extract_imagesets()[0]
        detector = imageset.get_detector()
        beam = imageset.get_beam()
        for x, y, z in reflections["xyzobs.px.value"]:
            d = detector[0].get_resolution_at_pixel(beam.get_s0(), (x, y))
            assert d >= 3

    # Now with a user defined mask
    template = glob(os.path.join(data_dir, "centroid*.cbf"))
    args = [
        "dials.find_spots",
        " ".join(template),
        "output.reflections=spotfinder.pickle",
        "output.shoeboxes=True",
        "region_of_interest=800,1200,800,1200",
    ]
    result = easy_run.fully_buffered(command=" ".join(args)).raise_if_errors()
    assert os.path.exists("spotfinder.pickle")
    with open("spotfinder.pickle", "rb") as f:
        reflections = pickle.load(f)
        x, y, z = reflections["xyzobs.px.value"].parts()
        assert x.all_ge(800)
        assert y.all_ge(800)
        assert x.all_lt(1200)
        assert y.all_lt(1200)

    print "OK"

    # now with XFEL stills
    data_dir = libtbx.env.find_in_repositories(
        relative_path="dials_regression/spotfinding_test_data", test=os.path.isdir
    )
    template = os.path.join(data_dir, "idx-s00-20131106040302615.cbf")
    args = ["dials.find_spots", template, "output.reflections=spotfinder.pickle"]
    result = easy_run.fully_buffered(command=" ".join(args)).raise_if_errors()
    assert os.path.exists("spotfinder.pickle")
    with open("spotfinder.pickle", "rb") as f:
        reflections = pickle.load(f)
        assert len(reflections) == 2643, len(reflections)
    print "OK"
Ejemplo n.º 56
0
def index_from_files(f_one,f_two,f_three):

    import dxtbx
    from iotbx.phil import parse
    from dxtbx.datablock import DataBlockFactory
    from dials.array_family import flex
    from dials.algorithms.indexing.indexer import indexer_base
    from dials.util.options import OptionParser
    import copy

    phil_scope_str='''
        output {{
          shoeboxes = True
        .type = bool
        .help = Save the raw pixel values inside the reflection shoeboxes.
        }}
        include scope dials.algorithms.spot_finding.factory.phil_scope
        include scope dials.algorithms.indexing.indexer.index_only_phil_scope
        include scope dials.algorithms.refinement.refiner.phil_scope
        indexing.known_symmetry.unit_cell={0}
          .type = unit_cell
        indexing.known_symmetry.space_group={1}
          .type = space_group
          '''
    phil_scope = parse(phil_scope_str.format(target_cell,target_sg), process_includes=True)
    #  from dials.util.options import OptionParser
    parser = OptionParser(phil=phil_scope)
    params, options = parser.parse_args(args=[], show_diff_phil=True)

    params.refinement.parameterisation.scan_varying = False
    params.indexing.method='real_space_grid_search'
    # params.indexing.method='fft3d'
    #  params.indexing.max_cell=800
    #  params.spotfinder.filter.min_spot_size=3
      
    filenames = [f_one,f_two,f_three]

    datablock = DataBlockFactory.from_filenames(filenames)[0]

    observed = flex.reflection_table.from_observations(datablock, params)
    observed.as_pickle("strong.pickle")
    print "Number of observed reflections:", len(observed)

    working_params = copy.deepcopy(params)
    imagesets = datablock.extract_imagesets()

    print "indexing..."
    t0 = time()
    # new dials, fix by Aaron
    idxr = indexer_base.from_parameters(observed, imagesets, params=params)
    idxr.index()
    tel = time()-t0
    print "done indexing (",tel," sec)"

    # new dials
    indexed = idxr.refined_reflections
    experiments = idxr.refined_experiments
    print experiments.crystals()[0]
    crystal_params = experiments.crystals()[0]
    with open('crystal.pkl', 'wb') as output:
        pickle.dump(crystal_params, output, pickle.HIGHEST_PROTOCOL)

    return
print mycrystal

# get a CS-PAD detector for testing
import os
import libtbx.load_env
dials_regression = libtbx.env.find_in_repositories(
    relative_path="dials_regression",
    test=os.path.isdir)
data_dir = os.path.join(dials_regression, "refinement_test_data",
                        "hierarchy_test")
datablock_path = os.path.join(data_dir, "datablock.json")
assert os.path.exists(datablock_path)

# load models
from dxtbx.datablock import DataBlockFactory
datablock = DataBlockFactory.from_serialized_format(datablock_path, check_format=False)
im_set = datablock[0].extract_imagesets()[0]
from copy import deepcopy
cspad = deepcopy(im_set.get_detector())

# get also a hierarchical type P6M detector
data_dir = os.path.join(dials_regression, "refinement_test_data",
                        "metrology", "i03-2.5A-thaumatin-20140514-split")
datablock_path = os.path.join(data_dir, "datablock.json")
datablock = DataBlockFactory.from_serialized_format(datablock_path, check_format=False)
im_set = datablock[0].extract_imagesets()[0]
from copy import deepcopy
p6m = deepcopy(im_set.get_detector())
print p6m[0]

# get a P12M (cropped to middle 18 modules)
Ejemplo n.º 58
0
def test_refinement():
  '''Test a refinement run'''

  dials_regression = libtbx.env.find_in_repositories(
    relative_path="dials_regression",
    test=os.path.isdir)

  # Get a beam and detector from a datablock. This one has a CS-PAD, but that
  # is irrelevant
  data_dir = os.path.join(dials_regression, "refinement_test_data",
                          "hierarchy_test")
  datablock_path = os.path.join(data_dir, "datablock.json")
  assert os.path.exists(datablock_path)

  # load models
  from dxtbx.datablock import DataBlockFactory
  datablock = DataBlockFactory.from_serialized_format(datablock_path, check_format=False)
  im_set = datablock[0].extract_imagesets()[0]
  from copy import deepcopy
  detector = deepcopy(im_set.get_detector())
  beam = im_set.get_beam()

  # Invent a crystal, goniometer and scan for this test
  from dxtbx.model.crystal import crystal_model
  crystal = crystal_model((40.,0.,0.) ,(0.,40.,0.), (0.,0.,40.),
                          space_group_symbol = "P1")
  orig_xl = deepcopy(crystal)

  from dxtbx.model.experiment import goniometer_factory
  goniometer = goniometer_factory.known_axis((1., 0., 0.))

  # Build a mock scan for a 180 degree sweep
  from dxtbx.model.scan import scan_factory
  sf = scan_factory()
  scan = sf.make_scan(image_range = (1,1800),
                      exposure_times = 0.1,
                      oscillation = (0, 0.1),
                      epochs = range(1800),
                      deg = True)
  sweep_range = scan.get_oscillation_range(deg=False)
  im_width = scan.get_oscillation(deg=False)[1]
  assert sweep_range == (0., pi)
  assert approx_equal(im_width, 0.1 * pi / 180.)

  from dxtbx.model.experiment.experiment_list import ExperimentList, Experiment

  # Build an experiment list
  experiments = ExperimentList()
  experiments.append(Experiment(
        beam=beam, detector=detector, goniometer=goniometer,
        scan=scan, crystal=crystal, imageset=None))

  # simulate some reflections
  refs, _ = generate_reflections(experiments)

  # change unit cell a bit (=0.1 Angstrom length upsets, 0.1 degree of
  # alpha and beta angles)
  from dials.algorithms.refinement.parameterisation.crystal_parameters import \
    CrystalUnitCellParameterisation
  xluc_param = CrystalUnitCellParameterisation(crystal)
  xluc_p_vals = xluc_param.get_param_vals()
  cell_params = crystal.get_unit_cell().parameters()
  cell_params = [a + b for a, b in zip(cell_params, [0.1, -0.1, 0.1, 0.1,
                                                     -0.1, 0.0])]
  from cctbx.uctbx import unit_cell
  from rstbx.symmetry.constraints.parameter_reduction import \
      symmetrize_reduce_enlarge
  from scitbx import matrix
  new_uc = unit_cell(cell_params)
  newB = matrix.sqr(new_uc.fractionalization_matrix()).transpose()
  S = symmetrize_reduce_enlarge(crystal.get_space_group())
  S.set_orientation(orientation=newB)
  X = tuple([e * 1.e5 for e in S.forward_independent_parameters()])
  xluc_param.set_param_vals(X)

  # reparameterise the crystal at the perturbed geometry
  xluc_param = CrystalUnitCellParameterisation(crystal)

  # Dummy parameterisations for other models
  beam_param = None
  xlo_param = None
  det_param = None

  # parameterisation of the prediction equation
  from dials.algorithms.refinement.parameterisation.parameter_report import \
      ParameterReporter
  pred_param = TwoThetaPredictionParameterisation(experiments,
    det_param, beam_param, xlo_param, [xluc_param])
  param_reporter = ParameterReporter(det_param, beam_param,
                                     xlo_param, [xluc_param])

  # reflection manager
  refman = TwoThetaReflectionManager(refs, experiments, nref_per_degree=20,
    verbosity=2)

  # reflection predictor
  ref_predictor = TwoThetaExperimentsPredictor(experiments)

  # target function
  target = TwoThetaTarget(experiments, ref_predictor, refman, pred_param)

  # minimisation engine
  from dials.algorithms.refinement.engine \
    import LevenbergMarquardtIterations as Refinery
  refinery = Refinery(target = target,
                      prediction_parameterisation = pred_param,
                      log = None,
                      verbosity = 0,
                      track_step = False,
                      track_gradient = False,
                      track_parameter_correlation = False,
                      max_iterations = 20)

  # Refiner
  from dials.algorithms.refinement.refiner import Refiner
  refiner = Refiner(reflections=refs,
                    experiments=experiments,
                    pred_param=pred_param,
                    param_reporter=param_reporter,
                    refman=refman,
                    target=target,
                    refinery=refinery,
                    verbosity=1)

  history = refiner.run()

  # compare crystal with original crystal
  refined_xl = refiner.get_experiments()[0].crystal

  #print refined_xl
  assert refined_xl.is_similar_to(orig_xl, uc_rel_length_tolerance=0.001,
    uc_abs_angle_tolerance=0.01)

  #print "Unit cell esds:"
  #print refined_xl.get_cell_parameter_sd()

  return
def test1():

  dials_regression = libtbx.env.find_in_repositories(
    relative_path="dials_regression",
    test=os.path.isdir)

  # use a datablock that contains a CS-PAD detector description
  data_dir = os.path.join(dials_regression, "refinement_test_data",
                          "hierarchy_test")
  datablock_path = os.path.join(data_dir, "datablock.json")
  assert os.path.exists(datablock_path)

  # load models
  from dxtbx.datablock import DataBlockFactory
  datablock = DataBlockFactory.from_serialized_format(datablock_path, check_format=False)
  im_set = datablock[0].extract_imagesets()[0]
  from copy import deepcopy
  detector = deepcopy(im_set.get_detector())
  beam = im_set.get_beam()

  # we'll invent a crystal, goniometer and scan for this test
  from dxtbx.model.crystal import crystal_model
  crystal = crystal_model((40.,0.,0.) ,(0.,40.,0.), (0.,0.,40.),
                          space_group_symbol = "P1")

  from dxtbx.model.experiment import goniometer_factory
  goniometer = goniometer_factory.known_axis((1., 0., 0.))

  # Build a mock scan for a 180 degree sweep
  from dxtbx.model.scan import scan_factory
  sf = scan_factory()
  scan = sf.make_scan(image_range = (1,1800),
                      exposure_times = 0.1,
                      oscillation = (0, 0.1),
                      epochs = range(1800),
                      deg = True)
  sweep_range = scan.get_oscillation_range(deg=False)
  im_width = scan.get_oscillation(deg=False)[1]
  assert sweep_range == (0., pi)
  assert approx_equal(im_width, 0.1 * pi / 180.)

  from dxtbx.model.experiment.experiment_list import ExperimentList, Experiment

  # Build an experiment list
  experiments = ExperimentList()
  experiments.append(Experiment(
        beam=beam, detector=detector, goniometer=goniometer,
        scan=scan, crystal=crystal, imageset=None))

  # simulate some reflections
  refs, ref_predictor = generate_reflections(experiments)

  # move the detector quadrants apart by 2mm both horizontally and vertically
  from dials.algorithms.refinement.parameterisation \
    import DetectorParameterisationHierarchical
  det_param = DetectorParameterisationHierarchical(detector, level=1)
  det_p_vals = det_param.get_param_vals()
  p_vals = list(det_p_vals)
  p_vals[1] += 2
  p_vals[2] -= 2
  p_vals[7] += 2
  p_vals[8] += 2
  p_vals[13] -= 2
  p_vals[14] += 2
  p_vals[19] -= 2
  p_vals[20] -= 2
  det_param.set_param_vals(p_vals)

  # reparameterise the detector at the new perturbed geometry
  det_param = DetectorParameterisationHierarchical(detector, level=1)

  # parameterise other models
  from dials.algorithms.refinement.parameterisation.beam_parameters import \
      BeamParameterisation
  from dials.algorithms.refinement.parameterisation.crystal_parameters import \
      CrystalOrientationParameterisation, CrystalUnitCellParameterisation
  beam_param = BeamParameterisation(beam, goniometer)
  xlo_param = CrystalOrientationParameterisation(crystal)
  xluc_param = CrystalUnitCellParameterisation(crystal)

  # fix beam
  beam_param.set_fixed([True]*3)

  # fix crystal
  xluc_param.set_fixed([True]*6)
  xlo_param.set_fixed([True]*3)

  # parameterisation of the prediction equation
  from dials.algorithms.refinement.parameterisation.prediction_parameters import \
      XYPhiPredictionParameterisation
  from dials.algorithms.refinement.parameterisation.parameter_report import \
      ParameterReporter
  pred_param = XYPhiPredictionParameterisation(experiments,
    [det_param], [beam_param], [xlo_param], [xluc_param])
  param_reporter = ParameterReporter([det_param], [beam_param],
                                     [xlo_param], [xluc_param])

  # reflection manager and target function
  from dials.algorithms.refinement.target import \
    LeastSquaresPositionalResidualWithRmsdCutoff
  from dials.algorithms.refinement.reflection_manager import ReflectionManager
  refman = ReflectionManager(refs, experiments, nref_per_degree=20)

  # set a very tight rmsd target of 1/10000 of a pixel
  target = LeastSquaresPositionalResidualWithRmsdCutoff(experiments,
      ref_predictor, refman, pred_param, restraints_parameterisation=None,
      frac_binsize_cutoff=0.0001)

  # minimisation engine
  from dials.algorithms.refinement.engine \
    import LevenbergMarquardtIterations as Refinery
  refinery = Refinery(target = target,
                      prediction_parameterisation = pred_param,
                      log = None,
                      verbosity = 0,
                      track_step = False,
                      track_gradient = False,
                      track_parameter_correlation = False,
                      max_iterations = 20)

  # Refiner
  from dials.algorithms.refinement.refiner import Refiner
  refiner = Refiner(reflections=refs,
                    experiments=experiments,
                    pred_param=pred_param,
                    param_reporter=param_reporter,
                    refman=refman,
                    target=target,
                    refinery=refinery,
                    verbosity=0)

  history = refiner.run()
  assert history.reason_for_termination == "RMSD target achieved"

  #compare detector with original detector
  orig_det = im_set.get_detector()
  refined_det = refiner.get_experiments()[0].detector

  from scitbx import matrix
  import math
  for op, rp in zip(orig_det, refined_det):
    # compare the origin vectors by...
    o1 = matrix.col(op.get_origin())
    o2 = matrix.col(rp.get_origin())
    # ...their relative lengths
    assert approx_equal(
      math.fabs(o1.length() - o2.length()) / o1.length(), 0, eps=1e-5)
    # ...the angle between them
    assert approx_equal(o1.accute_angle(o2), 0, eps=1e-5)

  print "OK"
  return
from __future__ import division


if __name__ == '__main__':
  import sys
  from dxtbx.datablock import DataBlockFactory
  from dxtbx.model import ParallaxCorrectedPxMmStrategy
  datablocks = DataBlockFactory.from_args(sys.argv[1:])
  assert(len(datablocks) == 1)
  detectors = datablocks[0].unique_detectors()
  assert(len(detectors) == 1)
  detector = detectors[0]
  assert(len(detector) == 1)
  px_mm = detector[0].get_px_mm_strategy()
  assert(isinstance(px_mm, ParallaxCorrectedPxMmStrategy))
  print "Mu: %f mm^-1 " % px_mm.mu()
  print "t0: %f mm" % px_mm.t0()
  from matplotlib import pylab
  from scitbx.array_family import flex
  image_size = detector[0].get_image_size()[::-1]
  xcorr = flex.double(flex.grid(image_size))
  ycorr = flex.double(flex.grid(image_size))
  pixel_size = detector[0].get_pixel_size()
  for j in range(xcorr.all()[0]):
    for i in range(xcorr.all()[1]):
      x1, y1 = detector[0].pixel_to_millimeter((i,j))
      x0, y0 = i * pixel_size[0], j * pixel_size[1]
      xcorr[j,i] = x1 - x0
      ycorr[j,i] = y1 - y0
  vmin = min([flex.min(xcorr), flex.min(ycorr)])