def user_callback(self,dc,panel,wx):
    center_x, center_y = panel._img.get_beam_center()
    xc, yc = panel._img.image_coords_as_screen_coords(center_x, center_y)
    dc.SetPen(wx.Pen('red'))
    dc.SetBrush(wx.TRANSPARENT_BRUSH)

    wavelength = panel._img._raw.wavelength #should be this
    wavelength_from_avg_file = False
    if wavelength_from_avg_file:
      # go through hoops to get the proper wavelength corresponding to this run
      avepath = self.path.replace("stddev","avg")
      import pickle
      info = pickle.load(open(avepath,"rb"))
      wavelength = info["WAVELENGTH"]

    twotheta = 2.* flex.asin(
                 flex.double(len(self.two_theta_experimental), wavelength/2.)/
                 self.experimental_d)
    L_mm = panel.settings.distance * flex.tan(twotheta)
    L_pixels = L_mm / panel._img._raw.pixel_size

    [ dc.DrawCircle(xc, yc, panel._img.get_scale() * pxl) for pxl in L_pixels ]
Beispiel #2
0
    def user_callback(self, dc, panel, wx):
        center_x, center_y = panel._img.get_beam_center()
        xc, yc = panel._img.image_coords_as_screen_coords(center_x, center_y)
        dc.SetPen(wx.Pen('red'))
        dc.SetBrush(wx.TRANSPARENT_BRUSH)

        wavelength = panel._img._raw.wavelength  #should be this
        wavelength_from_avg_file = False
        if wavelength_from_avg_file:
            # go through hoops to get the proper wavelength corresponding to this run
            avepath = self.path.replace("stddev", "avg")
            info = pickle.load(open(avepath, "rb"))
            wavelength = info["WAVELENGTH"]

        twotheta = 2. * flex.asin(
            flex.double(len(self.two_theta_experimental), wavelength / 2.) /
            self.experimental_d)
        L_mm = panel.settings.distance * flex.tan(twotheta)
        L_pixels = L_mm / panel._img._raw.pixel_size

        [
            dc.DrawCircle(xc, yc,
                          panel._img.get_scale() * pxl) for pxl in L_pixels
        ]
Beispiel #3
0
def run(args, imageset=None):
    # Parse input
    try:
        len(args)
    except Exception:
        params = args
    else:
        user_phil = []
        for arg in args:
            if "=" in arg:
                try:
                    user_phil.append(libtbx.phil.parse(arg))
                except RuntimeError as e:
                    raise Sorry("Unrecognized argument '%s' (error: %s)" %
                                (arg, str(e)))
            else:
                try:
                    user_phil.append(
                        libtbx.phil.parse("""file_path=%s""" % arg))
                except ValueError:
                    raise Sorry("Unrecognized argument '%s'" % arg)
        params = master_phil.fetch(sources=user_phil).extract()
    if imageset is None:
        if (params.file_path is None or len(params.file_path) == 0
                or not all(os.path.isfile(f) for f in params.file_path)):
            master_phil.show()
            raise Usage(
                "file_path must be defined (either file_path=XXX, or the path alone)."
            )
    assert params.n_bins is not None
    assert params.verbose is not None
    assert params.output_bins is not None

    # Allow writing to a file instead of stdout
    if params.output_file is None:
        logger = sys.stdout
    else:
        logger = open(params.output_file, "w")
        logger.write("%s " % params.output_file)

    if params.show_plots:
        from matplotlib import pyplot as plt

        colormap = plt.cm.gist_ncar
        plt.gca().set_color_cycle(
            [colormap(i) for i in np.linspace(0, 0.9, len(params.file_path))])

    if params.mask is not None:
        params.mask = easy_pickle.load(params.mask)

    if imageset is None:
        iterable = params.file_path

        def load_func(x):
            try:
                obj = dxtbx.datablock.DataBlockFactory.from_filenames(
                    [x])[0].extract_imagesets()[0]
            except IndexError:
                try:
                    obj = dxtbx.datablock.DataBlockFactory.from_json_file(
                        x)[0].extract_imagesets()[0]
                except dxtbx.datablock.InvalidDataBlockError:
                    obj = ExperimentListFactory.from_json_file(x)[0].imageset
            return obj

    else:
        iterable = [imageset]

        def load_func(x):
            return x

    # Iterate over each file provided
    for item in iterable:
        iset = load_func(item)
        n_images = len(iset)
        if params.image_number is None:
            if params.max_images is None:
                subiterable = range(n_images)
            else:
                subiterable = range(0, min(params.max_images, n_images))
        else:
            subiterable = [params.image_number]
        for image_number in subiterable:
            beam = iset.get_beam(image_number)
            detector = iset.get_detector(image_number)
            s0 = col(beam.get_s0())

            # Search the detector for the panel farthest from the beam. The
            # number of bins in the radial average will be equal to the
            # farthest point from the beam on the detector, in pixels, unless
            # overridden at the command line
            panel_res = [p.get_max_resolution_at_corners(s0) for p in detector]
            farthest_panel = detector[panel_res.index(min(panel_res))]
            size2, size1 = farthest_panel.get_image_size()
            corners = [(0, 0), (size1 - 1, 0), (0, size2 - 1),
                       (size1 - 1, size2 - 1)]
            corners_lab = [
                col(farthest_panel.get_pixel_lab_coord(c)) for c in corners
            ]
            corner_two_thetas = [
                farthest_panel.get_two_theta_at_pixel(s0, c) for c in corners
            ]
            extent_two_theta = max(corner_two_thetas)
            max_corner = corners_lab[corner_two_thetas.index(extent_two_theta)]
            extent = int(
                math.ceil(max_corner.length() * math.sin(extent_two_theta) /
                          max(farthest_panel.get_pixel_size())))
            extent_two_theta *= 180 / math.pi

            if params.n_bins < extent:
                params.n_bins = extent

            # These arrays will store the radial average info
            sums = flex.double(params.n_bins) * 0
            sums_sq = flex.double(params.n_bins) * 0
            counts = flex.int(params.n_bins) * 0

            all_data = iset[image_number]

            if not isinstance(all_data, tuple):
                all_data = (all_data, )

            for tile, (panel, data) in enumerate(zip(detector, all_data)):
                if params.panel is not None and tile != params.panel:
                    continue

                if params.mask is None:
                    mask = flex.bool(flex.grid(data.focus()), True)
                else:
                    mask = params.mask[tile]

                if hasattr(data, "as_double"):
                    data = data.as_double()

                logger.flush()
                if params.verbose:
                    logger.write("Average intensity tile %d: %9.3f\n" %
                                 (tile, flex.mean(data)))
                    logger.write("N bins: %d\n" % params.n_bins)
                    logger.flush()

                x1, y1, x2, y2 = (
                    0,
                    0,
                    panel.get_image_size()[1],
                    panel.get_image_size()[0],
                )
                bc = panel.get_beam_centre_px(beam.get_s0())
                bc = int(round(bc[1])), int(round(bc[0]))

                # compute the average
                radial_average(
                    data,
                    mask,
                    bc,
                    sums,
                    sums_sq,
                    counts,
                    panel.get_pixel_size()[0],
                    panel.get_distance(),
                    (x1, y1),
                    (x2, y2),
                )

            # average the results, avoiding division by zero
            results = sums.set_selected(counts <= 0, 0)
            results /= counts.set_selected(counts <= 0, 1).as_double()

            if params.median_filter_size is not None:
                logger.write(
                    "WARNING, the median filter is not fully propagated to the variances\n"
                )
                from scipy.ndimage.filters import median_filter

                results = flex.double(
                    median_filter(results.as_numpy_array(),
                                  size=params.median_filter_size))

            # calculate standard devations
            stddev_sel = ((sums_sq - sums * results) >= 0) & (counts > 0)
            std_devs = flex.double(len(sums), 0)
            std_devs.set_selected(
                stddev_sel,
                (sums_sq.select(stddev_sel) -
                 sums.select(stddev_sel) * results.select(stddev_sel)) /
                counts.select(stddev_sel).as_double(),
            )
            std_devs = flex.sqrt(std_devs)

            twotheta = (flex.double(range(len(results))) * extent_two_theta /
                        params.n_bins)
            q_vals = (4 * math.pi * flex.sin(math.pi * twotheta / 360) /
                      beam.get_wavelength())
            # nlmbda = 2dsin(theta)
            resolution = flex.double(len(twotheta), 0)
            nonzero = twotheta > 0
            resolution.set_selected(
                nonzero,
                beam.get_wavelength() / (2 * flex.asin(
                    (math.pi / 180) * twotheta.select(nonzero) / 2)),
            )

            if params.low_max_two_theta_limit is None:
                subset = results
            else:
                subset = results.select(
                    twotheta >= params.low_max_two_theta_limit)

            max_result = flex.max(subset)

            if params.x_axis == "two_theta":
                xvals = twotheta
                max_x = twotheta[flex.first_index(results, max_result)]
            elif params.x_axis == "q":
                xvals = q_vals
                max_x = q_vals[flex.first_index(results, max_result)]
            elif params.x_axis == "resolution":
                xvals = resolution
                max_x = resolution[flex.first_index(results, max_result)]

            for i, r in enumerate(results):
                val = xvals[i]
                if params.output_bins and "%.3f" % r != "nan":
                    # logger.write("%9.3f %9.3f\n"%     (val,r))        #.xy  format for Rex.cell.
                    logger.write(
                        "%9.3f %9.3f %9.3f\n" %
                        (val, r, std_devs[i]))  # .xye format for GSASII
                # logger.write("%.3f %.3f %.3f\n"%(val,r,ds[i]))  # include calculated d spacings
            logger.write("Maximum %s: %f, value: %f\n" %
                         (params.x_axis, max_x, max_result))

            if params.show_plots:
                if params.plot_x_max is not None:
                    results = results.select(xvals <= params.plot_x_max)
                    xvals = xvals.select(xvals <= params.plot_x_max)
                if params.normalize:
                    plt.plot(
                        xvals.as_numpy_array(),
                        (results / flex.max(results)).as_numpy_array(),
                        "-",
                    )
                else:
                    plt.plot(xvals.as_numpy_array(), results.as_numpy_array(),
                             "-")
                if params.x_axis == "two_theta":
                    plt.xlabel("2 theta")
                elif params.x_axis == "q":
                    plt.xlabel("q")
                elif params.x_axis == "resolution":
                    plt.xlabel("Resolution ($\\AA$)")
                    plt.gca().set_xscale("log")
                    plt.gca().invert_xaxis()
                    plt.xlim(0, 50)
                plt.ylabel("Avg ADUs")
                if params.plot_y_max is not None:
                    plt.ylim(0, params.plot_y_max)

    if params.show_plots:
        # plt.legend([os.path.basename(os.path.splitext(f)[0]) for f in params.file_path], ncol=2)
        plt.show()

    return xvals, results
class DIALSSpfIdx(Thread):
  def __init__(self,
               img,
               index=None,
               termfile=None,
               paramfile=None,
               output_file=None,
               output_dir=None,
               backend='dials',
               action_code='spotfind',
               min_bragg=10,
               n_processors=1,
               verbose=False
               ):

    self.img = img
    self.backend = backend
    self.paramfile = paramfile
    self.termfile = termfile
    self.n_processors = n_processors
    self.index = index
    self.verbose = verbose
    self.min_bragg = min_bragg

    if output_file is not None:
      if output_dir is not None:
        self.output = os.path.join(os.path.abspath(output_dir), output_file)

      else:
        self.output = os.path.abspath(output_file)
    else:
      self.output = None

    Thread.__init__(self)

    # Determine which processes will be included
    if action_code == 'spotfind':
      self.run_indexing = False
      self.run_integration = False
    elif action_code == 'index':
      self.run_indexing = True
      self.run_integration = False
    elif action_code == 'integrate':
      self.run_indexing = True
      self.run_integration = True

    # Initialize IOTA DIALS Processor
    if self.backend.lower() == 'dials':
      if self.paramfile is not None:
        with open(self.paramfile, 'r') as phil_file:
          phil_string = phil_file.read()
        user_phil = ip.parse(phil_string)
        self.dials_phil = phil_scope.fetch(source=user_phil)
      else:
        self.dials_phil = phil_scope
      self.params = self.dials_phil.extract()

    if self.backend == 'dials':
      self.processor = IOTADialsProcessor(params=self.params)


  def process_image(self):
    if os.path.isfile(self.termfile):
      raise IOTATermination('IOTA_TRACKER: Termination signal received!')
    else:
      with Capturing() as junk_output:
        err = []
        start = time.time()
        fail = False
        sg = None
        uc = None
        obs = None
        status = None
        res = 99
        try:
          datablock = DataBlockFactory.from_filenames([self.img])[0]
          observed = self.processor.find_spots(datablock=datablock)
          status = 'spots found'
        except Exception, e:
          fail = True
          observed = []
          err.append(e)
          pass

        # TODO: Indexing / lattice determination very slow (how to speed up?)
        if self.run_indexing:
          if not fail:
            try:
              experiments, indexed = self.processor.index(
                datablock=datablock, reflections=observed)
            except Exception, e:
              fail = True
              err.append(e)
              pass

          if not fail:
            try:
              solution = self.processor.refine_bravais_settings(
                reflections=indexed, experiments=experiments)

              # Only reindex if higher-symmetry solution found
              if solution is not None:
                experiments, indexed = self.processor.reindex(
                  reflections=indexed,
                  experiments=experiments,
                  solution=solution)
              obs = experiments
              lat = experiments[0].crystal.get_space_group().info()
              sg = str(lat).replace(' ', '')
              status = 'indexed'

            except Exception:
              fail = True
              err.append(e)
              pass

          if not fail:
            unit_cell = experiments[0].crystal.get_unit_cell().parameters()
            uc = ' '.join(['{:.1f}'.format(i) for i in unit_cell])

          if self.run_integration:
            if not fail:
              try:
                # Run refinement
                experiments, indexed = self.processor.refine(
                  experiments=experiments,
                  centroids=indexed)

                integrated = self.processor.integrate(experiments=experiments,
                                                           indexed=indexed)
                status = 'integrated'
              except Exception:
                fail = True
                err.append(e)
                pass

      if status == 'integrated':
        res = self.processor.frame['observations'][0].d_max_min()
      else:
        detector = datablock.unique_detectors()[0]
        beam = datablock.unique_beams()[0]

        s1 = flex.vec3_double()
        for i in xrange(len(observed)):
          s1.append(detector[observed['panel'][i]].get_pixel_lab_coord(
            observed['xyzobs.px.value'][i][0:2]))
        two_theta = s1.angle(beam.get_s0())
        d = beam.get_wavelength() / (2 * flex.asin(two_theta / 2))
        res = (np.max(d), np.min(d))

      if len(observed) < self.min_bragg:
        res = (99, 99)

      elapsed = time.time() - start
      info = [self.index, len(observed), self.img, sg, uc]
      return status, info, res, elapsed, err
                                err.append('INTEGRATION ERROR: {}'.format(e))
                                pass

            if status == 'integrated':
                res = frame['observations'][0].d_max_min()
            else:
                detector = datablock.unique_detectors()[0]
                beam = datablock.unique_beams()[0]

                s1 = flex.vec3_double()
                for i in xrange(len(observed)):
                    s1.append(
                        detector[observed['panel'][i]].get_pixel_lab_coord(
                            observed['xyzobs.px.value'][i][0:2]))
                two_theta = s1.angle(beam.get_s0())
                d = beam.get_wavelength() / (2 * flex.asin(two_theta / 2))
                res = (np.max(d), np.min(d))

            if len(observed) < self.min_bragg:
                res = (99, 99)

        elapsed = time.time() - start
        info = [self.index, len(observed), self.img, sg, uc]
        return status, info, res, score, elapsed, err

    def run(self):
        errors = []
        n_spots = 0
        n_overloads = 0
        res = (99, 99)
        n_rings = 0
Beispiel #6
0
        3.909750772351232, 14.695435562625814, 7.826561485909148,
        9.46004259845192, 6.877868747512286, 12.434327938231078,
        3.087677172795908, 11.57943133769928, 4.304608777648071,
        14.758942891036405, 4.595167128766594, 9.767325535313098
    ])
    M = minimizer(d_i, psi_i, flex.double(), flex.double(), eta_rad, Deff)

    print("pos_neg output Deff/2, eta_deg ", 1. / M.x[0], M.x[1] * 180. / pi)

    from xfel.mono_simulation.max_like import minimizer as legacy_minimizer
    Q = legacy_minimizer(d_i, psi_i, eta_rad, Deff)

    print("legacy output Deff/2, eta_deg ", 1. / Q.x[0], Q.x[1] * 180. / pi)

    from matplotlib import pyplot as plt
    bragg = 2. * flex.asin(1.0 / d_i)
    model_psi_leg = d_i * Q.x[0] / 2. + Q.x[1] / 2.
    model_psi = d_i * M.x[0] / 2. + M.x[1] / 2.
    plt.plot(bragg, psi_i, "b.")
    plt.plot(bragg, model_psi, "r.")
    plt.plot(bragg, -model_psi, "r.")
    plt.plot(bragg, model_psi_leg, "g.")
    plt.plot(bragg, -model_psi_leg, "g.")

    plt.show()

    exit()
    ydata = flex.double()
    xdata = flex.double()
    yydata = flex.double()
    def process_image(self):
        if os.path.isfile(self.termfile):
            raise IOTATermination('IOTA_TRACKER: Termination signal received!')
        else:
            with Capturing() as junk_output:
                # if True:
                err = []
                start = time.time()
                fail = False
                sg = None
                uc = None
                status = None
                score = 0
                try:
                    datablock = DataBlockFactory.from_filenames([self.img])[0]
                    observed = self.processor.find_spots(datablock=datablock)
                    status = 'spots found'
                except Exception as e:
                    fail = True
                    observed = []
                    err.append('SPOTFINDING ERROR: {}'.format(e))
                    pass

                # TODO: Indexing / lattice determination very slow (how to speed up?)
                if self.run_indexing:
                    if not fail:
                        try:
                            experiments, indexed = self.processor.index(
                                datablock=datablock, reflections=observed)
                            score = len(indexed)
                        except Exception as e:
                            fail = True
                            err.append('INDEXING ERROR: {}'.format(e))
                            pass

                    if not fail:
                        try:
                            solution = self.processor.refine_bravais_settings(
                                reflections=indexed, experiments=experiments)

                            # Only reindex if higher-symmetry solution found
                            if solution is not None:
                                experiments, indexed = self.processor.reindex(
                                    reflections=indexed,
                                    experiments=experiments,
                                    solution=solution)
                            obs = experiments
                            lat = experiments[0].crystal.get_space_group(
                            ).info()
                            sg = str(lat).replace(' ', '')
                            status = 'indexed'
                        except Exception as e:
                            fail = True
                            err.append('LATTICE ERROR: {}'.format(e))
                            pass

                    if not fail:
                        unit_cell = experiments[0].crystal.get_unit_cell(
                        ).parameters()
                        uc = ' '.join(['{:.4f}'.format(i) for i in unit_cell])

                    if self.run_integration:
                        if not fail:
                            try:
                                # Run refinement
                                experiments, indexed = self.processor.refine(
                                    experiments=experiments, centroids=indexed)
                            except Exception as e:
                                fail = True
                                err.append('REFINEMENT ERROR: {}'.format(e))
                                pass

                        if not fail:
                            try:
                                print experiments
                                print indexed
                                integrated = self.processor.integrate(
                                    experiments=experiments, indexed=indexed)
                                frame = ConstructFrame(
                                    integrated, experiments[0]).make_frame()
                                status = 'integrated'
                            except Exception as e:
                                err.append('INTEGRATION ERROR: {}'.format(e))
                                pass

            if status == 'integrated':
                res = frame['observations'][0].d_max_min()
            else:
                detector = datablock.unique_detectors()[0]
                beam = datablock.unique_beams()[0]

                s1 = flex.vec3_double()
                for i in xrange(len(observed)):
                    s1.append(
                        detector[observed['panel'][i]].get_pixel_lab_coord(
                            observed['xyzobs.px.value'][i][0:2]))
                two_theta = s1.angle(beam.get_s0())
                d = beam.get_wavelength() / (2 * flex.asin(two_theta / 2))
                res = (np.max(d), np.min(d))

            if len(observed) < self.min_bragg:
                res = (99, 99)

        elapsed = time.time() - start
        info = [self.index, len(observed), self.img, sg, uc]
        return status, info, res, score, elapsed, err