示例#1
0
    def beginjob(self, evt, env):
        """The beginjob() function does one-time initialisation from
    event- or environment data.  It is called at an XTC configure
    transition.

    @param evt Event data object, a configure object
    @param env Environment object
    """

        super(mod_hitfind, self).beginjob(evt, env)
        self.set_up_hitfinder(env)

        if self.m_db_logging:
            from cxi_xdr_xes.cftbx.cspad_ana import db
            self.logger.info("Connecting to db...")
            dbobj = db.dbconnect(self.m_db_host, self.m_db_name,
                                 self.m_db_user, self.m_db_password)
            assert dbobj.open
            self.logger.info("Connected.")

            try:
                self.trial = self.m_trial_id  # TODO: beat the race condition and use db.get_next_trial_id if
                # this is not set or is zero or less
                db.create_tables(dbobj, self.m_db_table_name)

            except Exception as e:
                self.logger.info("Couldn't create root tables: %s" % (e))
            dbobj.close()
def run(args):
  master_phil = libtbx.phil.parse("""
    db {
      host = None
        .type = str
      name = None
        .type = str
      table_name = None
        .type = str
      user = None
        .type = str
      password = None
        .type = str
    }
  """)

  if (__name__ == "__main__") :
    user_phil = []
    for arg in args :
      try :
        user_phil.append(libtbx.phil.parse(arg))
      except RuntimeError, e :
        raise Sorry("Unrecognized argument '%s' (error: %s)" % (arg, str(e)))
    params = master_phil.fetch(sources=user_phil).extract()

    try:
      from cxi_xdr_xes.cftbx.cspad_ana import db as db
    except ImportError:
      raise Sorry("Trial logging not supported for this installation. Conact the developers for access.")

    dbobj = db.dbconnect(host=params.db.host, db=params.db.name, username=params.db.user, password=params.db.password)
    db.list_db_metadata(dbobj, params.db.table_name)
示例#3
0
    def commit_progress_entries(self):
        if len(self.buffered_progress_entries) > 0:
            print("Commiting %d entries to the db" %
                  len(self.buffered_progress_entries))

            from cxi_xdr_xes.cftbx.cspad_ana import db
            dbobj = db.dbconnect(self.m_db_host, self.m_db_name,
                                 self.m_db_user, self.m_db_password)
            cursor = dbobj.cursor()

            for entry in self.buffered_progress_entries:
                frame_sql, parameters, kwargs = entry['frame']

                cursor.execute(frame_sql, parameters[0])
                frame_id = cursor.lastrowid

                _, _, kwargs = entry['observations']

                kwargs['frames_id'] = [frame_id] * len(kwargs['frames_id'])

                query = ("INSERT INTO `%s_observations` (" % self.m_db_experiment_tag) \
                        + ", ".join(kwargs) + ") values (" \
                        + ", ".join(["%s"] * len(kwargs)) + ")"
                try:
                    parameters = list(zip(*list(kwargs.values())))
                except TypeError:
                    parameters = [list(kwargs.values())]
                cursor.executemany(query, parameters)

            dbobj.commit()
            cursor.close()
            dbobj.close()
            self.buffered_progress_entries = []
def run(args):
  try:
    from cxi_xdr_xes.cftbx.cspad_ana import db as db
  except ImportError:
    raise Sorry("Trial logging not supported for this installation. Contact the developers for access.")

  phil = iotbx.phil.process_command_line(args=args, master_string=master_phil)
  params = phil.work.extract()

  if params.db.host is None:
    raise Usage("Please provide a host name")
  if params.db.name is None:
    raise Usage("Please provide a database name")
  if params.db.user is None:
    raise Usage("Please provide a user name")
  if params.db.password is None:
    import getpass
    password = getpass.getpass()
  else:
    password = params.db.password

  try:
    dbobj = db.dbconnect(host=params.db.host, db=params.db.name, username=params.db.user, password=password)
  except Exception, e:
    raise Sorry(e)
示例#5
0
def run(args):
  try:
    from cxi_xdr_xes.cftbx.cspad_ana import db as db
  except ImportError:
    raise Sorry("Trial logging not supported for this installation. Contact the developers for access.")

  phil = iotbx.phil.process_command_line(args=args, master_string=master_phil)
  params = phil.work.extract()

  if params.db.host is None:
    raise Usage("Please provide a host name")
  if params.db.name is None:
    raise Usage("Please provide a database name")
  if params.db.user is None:
    raise Usage("Please provide a user name")
  if params.db.password is None:
    import getpass
    password = getpass.getpass()
  else:
    password = params.db.password

  try:
    dbobj = db.dbconnect(host=params.db.host, db=params.db.name, username=params.db.user, password=password)
  except Exception, e:
    raise Sorry(e)
示例#6
0
  def commit_progress_entries(self):
    if len(self.buffered_progress_entries) > 0:
      print "Commiting %d entries to the db"%len(self.buffered_progress_entries)

      from cxi_xdr_xes.cftbx.cspad_ana import db
      dbobj = db.dbconnect(self.m_db_host, self.m_db_name, self.m_db_user, self.m_db_password)
      cursor = dbobj.cursor()

      for entry in self.buffered_progress_entries:
        frame_sql, parameters, kwargs = entry['frame']

        cursor.execute(frame_sql, parameters[0])
        frame_id = cursor.lastrowid

        _, _, kwargs = entry['observations']

        kwargs['frames_id'] = [frame_id] * len(kwargs['frames_id'])

        query = ("INSERT INTO `%s_observations` (" % self.m_db_experiment_tag) \
                + ", ".join(kwargs.keys()) + ") values (" \
                + ", ".join(["%s"] * len(kwargs.keys())) + ")"
        try:
          parameters = zip(*kwargs.values())
        except TypeError:
          parameters = [kwargs.values()]
        cursor.executemany(query, parameters)

      dbobj.commit()
      cursor.close()
      dbobj.close()
      self.buffered_progress_entries = []
示例#7
0
  def beginjob(self, evt, env):
    """The beginjob() function does one-time initialisation from
    event- or environment data.  It is called at an XTC configure
    transition.

    @param evt Event data object, a configure object
    @param env Environment object
    """

    super(mod_hitfind, self).beginjob(evt, env)
    self.set_up_hitfinder()

    if self.m_db_logging:
      from cxi_xdr_xes.cftbx.cspad_ana import db
      self.logger.info("Connecting to db...")
      dbobj = db.dbconnect(self.m_db_host, self.m_db_name, self.m_db_user, self.m_db_password)
      assert dbobj.open
      self.logger.info("Connected.")

      try:
        self.trial = self.m_trial_id # TODO: beat the race condition and use db.get_next_trial_id if
                                      # this is not set or is zero or less
        db.create_tables(dbobj, self.m_db_table_name)

      except Exception,e:
        self.logger.info("Couldn't create root tables: %s"%(e))
      dbobj.close()
示例#8
0
def run(args):
  master_phil = libtbx.phil.parse("""
    db {
      host = None
        .type = str
      name = None
        .type = str
      table_name = None
        .type = str
      user = None
        .type = str
      password = None
        .type = str
    }
  """)

  if (__name__ == "__main__") :
    user_phil = []
    for arg in args :
      try :
        user_phil.append(libtbx.phil.parse(arg))
      except RuntimeError as e :
        raise Sorry("Unrecognized argument '%s' (error: %s)" % (arg, str(e)))
    params = master_phil.fetch(sources=user_phil).extract()

    try:
      from cxi_xdr_xes.cftbx.cspad_ana import db as db
    except ImportError:
      raise Sorry("Trial logging not supported for this installation. Conact the developers for access.")

    dbobj = db.dbconnect(host=params.db.host, db=params.db.name, username=params.db.user, password=params.db.password)
    db.list_db_metadata(dbobj, params.db.table_name)
示例#9
0
  def commit_entries(self):
    if len(self.buffered_sql_entries) > 0:
      from cxi_xdr_xes.cftbx.cspad_ana import db
      dbobj = db.dbconnect(self.m_db_host, self.m_db_name, self.m_db_user, self.m_db_password)
      cursor = dbobj.cursor()
      cmd = "INSERT INTO %s (trial,run,eventstamp,hitcount,distance,sifoil,wavelength,indexed,\
mosaic_block_rotation,mosaic_block_size,ewald_proximal_volume,spacegroup,\
cell_a,cell_b,cell_c,cell_alpha,cell_beta,cell_gamma,resolution,tags) VALUES "%(self.m_db_table_name)
      comma = ""
      for entry in self.buffered_sql_entries:
        cmd += comma + "(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,'%s',%s,%s,%s,%s,%s,%s,%s,'%s')"%entry
        comma = ", "
      cursor.execute(cmd)
      dbobj.commit()
      dbobj.close()
      self.buffered_sql_entries = []
示例#10
0
  def commit_entries(self):
    if len(self.buffered_sql_entries) > 0:
      from cxi_xdr_xes.cftbx.cspad_ana import db
      dbobj = db.dbconnect(self.m_db_host, self.m_db_name, self.m_db_user, self.m_db_password)
      cursor = dbobj.cursor()
      cmd = "INSERT INTO %s (trial,run,eventstamp,hitcount,distance,sifoil,wavelength,indexed,\
mosaic_block_rotation,mosaic_block_size,ewald_proximal_volume,spacegroup,\
cell_a,cell_b,cell_c,cell_alpha,cell_beta,cell_gamma,resolution,tags) VALUES "%(self.m_db_table_name)
      comma = ""
      for entry in self.buffered_sql_entries:
        cmd += comma + "(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,'%s',%s,%s,%s,%s,%s,%s,%s,'%s')"%entry
        comma = ", "
      cursor.execute(cmd)
      dbobj.commit()
      dbobj.close()
      self.buffered_sql_entries = []
示例#11
0
  def load_data (self):
    ttop = time.time()
    print "Loading data..."
    assert (self.trial_id is not None)

    import cxi_xdr_xes.cftbx.cspad_ana.db as cxidb
    db=cxidb.dbconnect()
    assert(db is not None and db.open)

    # retrieve the run IDs in this trial
    #t1 = time.time()
    cursor = db.cursor()
    #cursor.execute("SELECT DISTINCT(run) FROM %s WHERE trial = %s"%(cxidb.table_name,self.trial_id))
    cmd = "SELECT DISTINCT(run) FROM %s WHERE trial = %s"
    if self.params.run_num is not None:
      extra = " AND run = %s"%self.params.run_num
    elif self.params.run_min is not None and self.params.run_max is not None:
      extra = " AND run >= %s AND run <= %s"%(self.params.run_min, self.params.run_max)
    else:
      extra = " ORDER BY run DESC LIMIT 5"
    cursor.execute(cmd%(cxidb.table_name,self.trial_id) + extra)
    #t2 = time.time()
    #print "Runs queried in %.2fs" % (t2 - t1)

    if(self.full_data_load):
      self.runs = []
    if(len(self.runs) > 5):
      self.runs = self.runs[-5:]

    new_data = False

    for runId in cursor.fetchall():
      if self.full_data_load:
        run = Run(int(runId[0]))
        self.runs.append(run)
      else:
        foundit=False
        for runtest in self.runs:
          if runtest.runId == int(runId[0]):
            foundit = True
            run = runtest
            break
        if not foundit:
          print "New run: %s"%runId
          run = Run(int(runId[0]))
          self.runs.append(run)

      #t1 = time.time()
      #print "Loading data from run %s" % (run.runId)
      if self.full_data_load or not hasattr(run, "latest_entry_id"):
        print "Full load"
        cursor.execute("SELECT id, eventstamp, hitcount, distance, sifoil, wavelength, indexed FROM %s \
          WHERE trial = %s AND run = %s ORDER BY eventstamp"%(cxidb.table_name,self.trial_id,run.runId))
      else:
        print "Partial load"
        cursor.execute("SELECT id, eventstamp, hitcount, distance, sifoil, wavelength, indexed FROM %s \
          WHERE trial = %s AND run = %s AND id > %s ORDER BY eventstamp"%(cxidb.table_name,self.trial_id,run.runId,run.latest_entry_id ))

      #t2 = time.time()
      #print "Query ran in %.2fs" % (t2 - t1)

      ids = flex.int()

      for id, eventstamp, hitcount, distance, sifoil, wavelength, indexed in cursor.fetchall():
        run.bragg_times.append(float(eventstamp))
        run.braggs.append(int(hitcount))
        ids.append(id)

        run.distances.append(float(distance))
        run.sifoils.append(float(sifoil))
        run.wavelengths.append(float(wavelength))
        run.indexed.append(bool(indexed))

      if len(ids) > 0:
        run.latest_entry_id = max(ids)
        new_data = True
        run.recalc_hits(self.params.average_window, self.params.hit_cutoff)


    self.total_width = 0
    for run in self.runs:
      perm = flex.sort_permutation(run.hit_rates_times)
      run.hit_rates_times = run.hit_rates_times.select(perm)
      run.hit_rates = run.hit_rates.select(perm)

      self.total_width += run.width()

    self.cull_braggs()

    #self.full_data_load = False #always do a full load
    self.runs.sort(key=operator.attrgetter('runId'))
    tbot = time.time()
    print "Data loaded in %.2fs" % (tbot - ttop)
    return new_data
示例#12
0
def run(args):
  try:
    from cxi_xdr_xes.cftbx.cspad_ana import db as db
  except ImportError:
    raise Sorry("Trial logging not supported for this installation. Contact the developers for access.")

  phil = iotbx.phil.process_command_line(args=args, master_string=master_phil)
  params = phil.work.extract()

  if params.db.host is None:
    raise Usage("Please provide a host name")
  if params.db.name is None:
    raise Usage("Please provide a database name")
  if params.db.user is None:
    raise Usage("Please provide a user name")
  if params.db.password is None:
    import getpass
    password = getpass.getpass()
  else:
    password = params.db.password

  try:
    dbobj = db.dbconnect(host=params.db.host, db=params.db.name, username=params.db.user, password=password)
  except Exception as e:
    raise Sorry(e)

  from xfel.xpp.simulate import file_table
  query = "https://pswww.slac.stanford.edu/ws-auth/dataexport/placed?exp_name=%s"%(params.experiment)

  # set up extra run tags, if provided
  if params.run_tags is not None:
    extra1 = ", tags"
    extra2 = ",'%s'"%params.run_tags
  else:
    extra1 = ""
    extra2 = ""

  while True:
    # Get the set of known runs in the experiment database
    cmd = "SELECT run from %s_runs"%params.experiment_tag
    cursor = dbobj.cursor()
    cursor.execute(cmd)

    # Get the set of runs from SLAC's database
    FT = file_table(params,query)

    # Find the delta
    known_runs = [int(entry[0]) for entry in cursor.fetchall()]
    unknown_runs = [run for run in FT.rundict if run not in known_runs]

    print "%d new runs"%len(unknown_runs)

    # Enter any new runs into the experiment database
    if len(unknown_runs) > 0:
      cmd = "INSERT INTO %s_runs (run%s) VALUES "%(params.experiment_tag, extra1)
      comma = ""
      for run in unknown_runs:
        cmd += comma + "(%d%s)"%(run, extra2)
        comma = ", "

      cursor = dbobj.cursor()
      cursor.execute(cmd)
      dbobj.commit()

    time.sleep(10)
示例#13
0
def run(args):
    try:
        from cxi_xdr_xes.cftbx.cspad_ana import db as db
    except ImportError:
        raise Sorry(
            "Trial logging not supported for this installation. Conact the developers for access."
        )

    user_phil = []
    # TODO: replace this stuff with iotbx.phil.process_command_line_with_files
    # as soon as I can safely modify it
    for arg in args:
        #if (os.path.isdir(arg)) :
        #user_phil.append(libtbx.phil.parse("""status_dir=\"%s\"""" % arg))
        #elif (not "=" in arg) :
        if (not "=" in arg):
            try:
                user_phil.append(
                    libtbx.phil.parse("""trial_id=%d""" % int(arg)))
            except ValueError as e:
                raise Sorry("Unrecognized argument '%s'" % arg)
        else:
            try:
                user_phil.append(libtbx.phil.parse(arg))
            except RuntimeError as e:
                raise Sorry("Unrecognized argument '%s' (error: %s)" %
                            (arg, str(e)))
    params = master_phil.fetch(sources=user_phil).extract()
    if (params.trial_id is None):
        master_phil.show()
        raise Usage(
            "trial_id must be defined (either trial_id=XXX, or the integer " +
            "ID alone).")
    assert (params.hit_cutoff is not None) and (params.hit_cutoff > 0)

    extra_cmd = ""
    if params.run_start is not None:
        extra_cmd += "AND run >= %d" % params.run_start
    if params.run_end is not None:
        extra_cmd += "AND run <= %d" % params.run_end

    dbobj = db.dbconnect(host=params.db.host,
                         db=params.db.name,
                         username=params.db.user,
                         password=params.db.password)

    cursor = dbobj.cursor()
    cmd = "SELECT DISTINCT(run) FROM %s WHERE trial = %%s %s ORDER BY run" % (
        params.db.table_name, extra_cmd)
    cursor.execute(cmd, params.trial_id)

    frames_total = 0
    hits_total = 0
    indexed_total = 0

    for runId in cursor.fetchall():
        run = int(runId[0])
        cmd = "SELECT id, eventstamp, hitcount, distance, sifoil, wavelength, indexed FROM %s \
        WHERE trial = %s AND run = %s"

        if params.db.tags is not None:
            for tag in params.db.tags.split(','):
                cmd += """ AND tags LIKE "%%{0}%%" """.format(tag)
        cursor.execute(cmd % (params.db.table_name, params.trial_id, run))

        numframes = numhits = numindexed = 0
        for id, eventstamp, hitcount, distance, sifoil, wavelength, indexed in cursor.fetchall(
        ):
            numframes += 1
            if hitcount >= params.hit_cutoff:
                numhits += 1
            if indexed:
                numindexed += 1

        if numhits == 0:
            hitrate = 0
        else:
            hitrate = 100 * numhits / numframes
        if numindexed == 0:
            indexingrate = 0
        else:
            indexingrate = 100 * numindexed / numframes

        print "Run: %3d, number of hits: %6d, number of frames: %6d, hitrate: %4.1f%%. Number indexed: %6d (%4.1f%%)" % (
            run, numhits, numframes, hitrate, numindexed, indexingrate)
        frames_total += numframes
        hits_total += numhits
        indexed_total += numindexed

    if hits_total == 0:
        hitrate = 0
    else:
        hitrate = 100 * hits_total / frames_total
    if indexed_total == 0:
        indexingrate = 0
    else:
        indexingrate = 100 * indexed_total / frames_total

    print "Totals: frames: %d, hits: %d (%4.1f%%), indexed: %d (%4.1f%%)" % (
        frames_total, hits_total, hitrate, indexed_total, indexingrate)
    dbobj.close()
示例#14
0
def run(args):
  try:
    from cxi_xdr_xes.cftbx.cspad_ana import db as db
  except ImportError:
    raise Sorry("Trial logging not supported for this installation. Contact the developers for access.")

  phil = iotbx.phil.process_command_line(args=args, master_string=master_phil)
  params = phil.work.extract()

  if params.db.host is None:
    raise Usage("Please provide a host name")
  if params.db.name is None:
    raise Usage("Please provide a database name")
  if params.db.user is None:
    raise Usage("Please provide a user name")
  if params.db.password is None:
    import getpass
    password = getpass.getpass()
  else:
    password = params.db.password

  while True:
    print "Checking for new jobs to submit"
    # need to get a new connection each iteration to prevent caching
    try:
      dbobj = db.dbconnect(host=params.db.host, db=params.db.name, username=params.db.user, password=password)
    except Exception, e:
      raise Sorry(e)

    # Get the set of known runs in the experiment database
    cmd = "SELECT run from %s_runs"%params.experiment_tag
    cursor = dbobj.cursor()
    cursor.execute(cmd)
    known_runs = [int(parse_entry(entry)[0]) for entry in cursor.fetchall()]

    # Get the list of active trials
    cmd = "SELECT trial_id, trial from %s_trials WHERE active = True"%params.experiment_tag
    cursor = dbobj.cursor()
    cursor.execute(cmd)
    entries = [parse_entry(entry) for entry in cursor.fetchall()]
    active_trial_ids = [int(entry[0]) for entry in entries]
    active_trials = [int(entry[1]) for entry in entries]

    for trial_id, trial in zip(active_trial_ids, active_trials):
      # Get the active rungroups for this trial
      cmd = "SELECT rungroups_id from %s_trial_rungroups WHERE trials_id = %d AND active = True"%(params.experiment_tag, trial_id)
      cursor = dbobj.cursor()
      cursor.execute(cmd)
      active_rungroup_ids = [int(parse_entry(entry)[0]) for entry in cursor.fetchall()]

      for rungroup_id in active_rungroup_ids:
        # Get the list of runs for this rungroup
        cmd = "SELECT startrun, endrun from %s_rungroups WHERE %s_rungroups.rungroup_id = %d"%(params.experiment_tag, params.experiment_tag, rungroup_id)
        cursor = dbobj.cursor()
        cursor.execute(cmd)

        assert cursor.rowcount == 1
        startrun, endrun = parse_entry(cursor.fetchall()[0])

        cmd = "SELECT run_id, run from %s_runs WHERE run >= %d"%(params.experiment_tag, startrun)
        if endrun is not None:
          cmd += " AND run <= %d"%endrun
        cursor = dbobj.cursor()
        cursor.execute(cmd)

        entries = [parse_entry(entry) for entry in cursor.fetchall()]
        run_ids = [int(entry[0]) for entry in entries]
        runs = [int(entry[1]) for entry in entries]

        # Find the submitted runs for this trial/rungroup combination
        cmd = "SELECT runs_id from %s_jobs WHERE trials_id = %d and rungroups_id = %d"%(params.experiment_tag, trial_id, rungroup_id)
        cursor = dbobj.cursor()
        cursor.execute(cmd)

        submitted_run_ids = [int(parse_entry(entry)[0]) for entry in cursor.fetchall()]

        # Submit new runs
        for run_id, run in zip(run_ids, runs):
          if run_id in submitted_run_ids:
            continue

          print "Submitting run %d into trial %d using rungroup %d"%(run_id, trial, rungroup_id)

          config_path = write_hitfind_cfg(params, dbobj, trial_id, trial, rungroup_id)
          if submit_job(params, dbobj, trial_id, trial, rungroup_id, run, config_path):
            pass

          cmd = "INSERT INTO %s_jobs (runs_id, trials_id, rungroups_id, status) VALUES (%d, %d, %d, '%s')"%(params.experiment_tag, run_id, trial_id, rungroup_id, "submitted")
          cursor = dbobj.cursor()
          cursor.execute(cmd)
          dbobj.commit()

    time.sleep(10)
示例#15
0
      except RuntimeError, e :
        raise Sorry("Unrecognized argument '%s' (error: %s)" % (arg, str(e)))
  params = master_phil.fetch(sources=user_phil).extract()
  if (params.trial_id is None) :
    master_phil.show()
    raise Usage("trial_id must be defined (either trial_id=XXX, or the integer "+
      "ID alone).")
  assert (params.hit_cutoff is not None) and (params.hit_cutoff > 0)

  extra_cmd = ""
  if params.run_start is not None:
    extra_cmd += "AND run >= %d" % params.run_start
  if params.run_end is not None:
    extra_cmd += "AND run <= %d" % params.run_end

  dbobj = db.dbconnect(host=params.db.host, db=params.db.name, username=params.db.user, password=params.db.password)


  cursor = dbobj.cursor()
  cmd = "SELECT DISTINCT(run) FROM %s WHERE trial = %%s %s ORDER BY run"%(params.db.table_name, extra_cmd)
  cursor.execute(cmd, params.trial_id)

  frames_total = 0
  hits_total = 0
  indexed_total = 0

  for runId in cursor.fetchall():
    run = int(runId[0])
    cmd = "SELECT id, eventstamp, hitcount, distance, sifoil, wavelength, indexed FROM %s \
        WHERE trial = %s AND run = %s"
    if params.db.tags is not None:
示例#16
0
  def event(self, evt, env):
    """The event() function is called for every L1Accept transition.
    XXX more?

    Previously, common-mode correction was applied only after initial
    threshold filtering.  Since the common_mode class applies the
    (lengthy) common-mode correction immediately after reading the
    image from the stream, this optimisation is currently not
    (elegantly) doable.

    @param evt Event data object, a configure object
    @param env Environment object
    """

    super(mod_hitfind, self).event(evt, env)
    if (evt.get("skip_event")):
      return

    # This module only applies to detectors for which a distance is
    # available.
    distance = cspad_tbx.env_distance(self.address, env, self._detz_offset)
    if distance is None:
      self.nfail += 1
      self.logger.warning("event(): no distance, shot skipped")
      evt.put(skip_event_flag(), "skip_event")
      return

    device = cspad_tbx.address_split(self.address)[2]

    # ***** HITFINDING ***** XXX For hitfinding it may be interesting
    # to look at the fraction of subzero pixels in the dark-corrected
    # image.
    if (self.m_threshold is not None):
      # If a threshold value is given it can be applied in one of three ways:
      #    1.  Apply it over the whole image
      if (self.m_roi is None and self.m_distl_min_peaks is None):
        vmax = flex.max(self.cspad_img)
        if (vmax < self.m_threshold):
          if not self.m_negate_hits:
            # Tell downstream modules to skip this event if the threshold was not met.
            evt.put(skip_event_flag(), "skip_event")
            return
        elif self.m_negate_hits:
          evt.put(skip_event_flag(), "skip_event")
          return

      #    2. Apply threshold over a rectangular region of interest.
      elif (self.m_roi is not None):
        vmax = flex.max(self.cspad_img[self.m_roi[2]:self.m_roi[3],
                                       self.m_roi[0]:self.m_roi[1]])
        if (vmax < self.m_threshold):
          if not self.m_negate_hits:
            evt.put(skip_event_flag(), "skip_event")
            return
        elif self.m_negate_hits:
          evt.put(skip_event_flag(), "skip_event")
          return

      #    3. Determine the spotfinder spots within the central ASICS, and accept the
      #       image as a hit if there are m_distl_min_peaks exceeding m_threshold.
      #       As a further requirement, the peaks must exceed 2.5 * the 90-percentile
      #       pixel value of the central ASICS.  This filter was added to avoid high-background
      #       false positives.
      elif (self.m_distl_min_peaks is not None):
        if device == 'marccd':
          self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0]
          self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1]
        elif device == 'Rayonix':
          self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0]
          self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1]

        peak_heights,outvalue = self.distl_filter(
          self.address,
          self.cspad_img.iround(), # XXX correct?
          distance,
          self.timestamp,
          self.wavelength)
        if ('permissive' in self.m_distl_flags):
          number_of_accepted_peaks = (peak_heights > self.m_threshold).count(True)
        else:
          number_of_accepted_peaks = ((peak_heights > self.m_threshold).__and__(outvalue==0)).count(True)

        sec,ms = cspad_tbx.evt_time(evt)
        evt_time = sec + ms/1000
        self.stats_logger.info("BRAGG %.3f %d" %(evt_time, number_of_accepted_peaks))

        skip_event = False
        if number_of_accepted_peaks < self.m_distl_min_peaks:
          self.logger.info("Subprocess %02d: Spotfinder NO  HIT image #%05d @ %s; %d spots > %d" %(
            env.subprocess(), self.nshots, self.timestamp, number_of_accepted_peaks, self.m_threshold))

          if not self.m_negate_hits:
            skip_event = True
        else:
          self.logger.info("Subprocess %02d: Spotfinder YES HIT image #%05d @ %s; %d spots > %d" %(
            env.subprocess(), self.nshots, self.timestamp, number_of_accepted_peaks, self.m_threshold))

          if self.m_negate_hits:
            skip_event = True

        if skip_event:
          if self.m_db_logging:
            # log misses to the database
            self.queue_entry((self.trial, evt.run(), "%.3f"%evt_time, number_of_accepted_peaks, distance,
                              self.sifoil, self.wavelength, False, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, self.m_db_tags))
          evt.put(skip_event_flag(), "skip_event")
          return
        # the indexer will log this hit when it is ran. Bug: if the spotfinder is ran by itself, this
        # hit will not be logged in the db.
        evt.put(number_of_accepted_peaks, 'sfspots')

    self.logger.info("Subprocess %02d: process image #%05d @ %s" %
                     (env.subprocess(), self.nshots, self.timestamp))

    # See r17537 of mod_average.py.
    if device == 'Cspad':
      pixel_size = cspad_tbx.pixel_size
      saturated_value = cspad_tbx.cspad_saturated_value
    elif device == 'marccd':
      pixel_size = evt.get("marccd_pixel_size")
      saturated_value = evt.get("marccd_saturated_value")
    elif device == 'Rayonix':
      pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size)
      saturated_value = rayonix_tbx.rayonix_saturated_value

    d = cspad_tbx.dpack(
      active_areas=self.active_areas,
      address=self.address,
      beam_center_x=pixel_size * self.beam_center[0],
      beam_center_y=pixel_size * self.beam_center[1],
      data=self.cspad_img.iround(), # XXX ouch!
      distance=distance,
      pixel_size=pixel_size,
      saturated_value=saturated_value,
      timestamp=self.timestamp,
      wavelength=self.wavelength,
      xtal_target=self.m_xtal_target)

    if (self.m_dispatch == "index"):
      import sys
      from xfel.cxi.integrate_image_api import integrate_one_image
      info = integrate_one_image(d,
                                 integration_dirname  = self.m_integration_dirname,
                                 integration_basename = self.m_integration_basename)
      sys.stdout = sys.__stdout__
      sys.stderr = sys.__stderr__

      indexed = info is not None
      if indexed and self.m_progress_logging:
        # integration pickle dictionary is available here as info.last_saved_best
        if info.last_saved_best["identified_isoform"] is not None:
          #print info.last_saved_best.keys()
          from cxi_xdr_xes.cftbx.cspad_ana import db
          dbobj = db.dbconnect(self.m_db_host, self.m_db_name, self.m_db_user, self.m_db_password)
          cursor = dbobj.cursor()
          if info.last_saved_best["identified_isoform"] in self.isoforms:
            PM, indices, miller_id = self.isoforms[info.last_saved_best["identified_isoform"]]
          else:
            from xfel.xpp.progress_support import progress_manager
            PM = progress_manager(info.last_saved_best,self.m_db_experiment_tag, self.m_trial_id, self.m_rungroup_id, evt.run())
            indices, miller_id = PM.get_HKL(cursor)
            # cache these as they don't change for a given isoform
            self.isoforms[info.last_saved_best["identified_isoform"]] = PM, indices, miller_id
          if self.m_sql_buffer_size > 1:
            self.queue_progress_entry(PM.scale_frame_detail(self.timestamp,cursor,do_inserts=False))
          else:
            PM.scale_frame_detail(self.timestamp,cursor,do_inserts=True)
            dbobj.commit()
            cursor.close()
            dbobj.close()

      if self.m_db_logging:
        sec,ms = cspad_tbx.evt_time(evt)
        evt_time = sec + ms/1000
        sfspots = evt.get('sfspots')
        if sfspots is None:
          if indexed:
            n_spots = len(info.spotfinder_results.images[info.frames[0]]['spots_total'])
          else:
            n_spots = 0
        else:
          n_spots = sfspots

        if indexed:
          mosaic_bloc_rotation = info.last_saved_best.get('ML_half_mosaicity_deg', [0])[0]
          mosaic_block_size = info.last_saved_best.get('ML_domain_size_ang', [0])[0]
          ewald_proximal_volume = info.last_saved_best.get('ewald_proximal_volume', [0])[0]

          obs = info.last_saved_best['observations'][0]
          cell_a, cell_b, cell_c, cell_alpha, cell_beta, cell_gamma = obs.unit_cell().parameters()
          pointgroup = info.last_saved_best['pointgroup']
          resolution = obs.d_min()
        else:
          mosaic_bloc_rotation = mosaic_block_size = ewald_proximal_volume = cell_a = cell_b = cell_c = \
            cell_alpha = cell_beta = cell_gamma = spacegroup = resolution = 0

        self.queue_entry((self.trial, evt.run(), "%.3f"%evt_time, n_spots, distance,
                          self.sifoil, self.wavelength, indexed, mosaic_bloc_rotation,
                          mosaic_block_size, ewald_proximal_volume, pointgroup, cell_a,
                          cell_b, cell_c, cell_alpha, cell_beta, cell_gamma, resolution,
                          self.m_db_tags))

      if (not indexed):
        evt.put(skip_event_flag(), "skip_event")
        return

    elif (self.m_dispatch == "nop"):
      pass

    elif (self.m_dispatch == "view"): #interactive image viewer

      args = ["indexing.data=dummy"]
      detector_format_version = detector_format_function(
        self.address, evt.GetTime())
      if detector_format_version is not None:
        args += ["distl.detector_format_version=%" % detector_format_version]

      from xfel.phil_preferences import load_cxi_phil
      horizons_phil = load_cxi_phil(self.m_xtal_target, args)
      horizons_phil.indexing.data = d

      from xfel.cxi import display_spots
      display_spots.parameters.horizons_phil = horizons_phil
      display_spots.wrapper_of_callback().display(horizons_phil.indexing.data)

    elif (self.m_dispatch == "spots"): #interactive spotfinder viewer

      args = ["indexing.data=dummy"]
      detector_format_version = detector_format_function(
        self.address, evt.GetTime())
      if detector_format_version is not None:
        args += ["distl.detector_format_version=%s" % detector_format_version]

      from xfel.phil_preferences import load_cxi_phil
      horizons_phil = load_cxi_phil(self.m_xtal_target, args)
      horizons_phil.indexing.data = d

      from xfel.cxi import display_spots
      display_spots.parameters.horizons_phil = horizons_phil

      from rstbx.new_horizons.index import pre_indexing_validation,pack_names
      pre_indexing_validation(horizons_phil)
      imagefile_arguments = pack_names(horizons_phil)
      horizons_phil.persist.show()
      from spotfinder.applications import signal_strength
      info = signal_strength.run_signal_strength_core(horizons_phil,imagefile_arguments)

      work = display_spots.wrapper_of_callback(info)
      work.display_with_callback(horizons_phil.indexing.data)

    elif (self.m_dispatch == "write_dict"):
      self.logger.warning(
        "event(): deprecated dispatch 'write_dict', use mod_dump instead")
      if (self.m_out_dirname  is not None or
          self.m_out_basename is not None):
        cspad_tbx.dwritef(d, self.m_out_dirname, self.m_out_basename)

    # Diagnostic message emitted only when all the processing is done.
    if (env.subprocess() >= 0):
      self.logger.info("Subprocess %02d: accepted #%05d @ %s" %
                       (env.subprocess(), self.nshots, self.timestamp))
    else:
      self.logger.info("Accepted #%05d @ %s" %
                       (self.nshots, self.timestamp))
示例#17
0
def application(params, loop=True):
    from cxi_xdr_xes.cftbx.cspad_ana import db as cxidb
    dbobj = cxidb.dbconnect(params.db.host, params.db.name, params.db.user,
                            params.db.password)
    cursor = dbobj.cursor()
    PM = progress_manager(params, cursor)
    PM.setup_isoforms(cursor)
    PM.setup_runtags(cursor)
    isoforms = PM.isoforms
    del dbobj

    while 1:
        dbobj = cxidb.dbconnect(params.db.host, params.db.name, params.db.user,
                                params.db.password)
        cursor = dbobj.cursor()

        results = {}
        print "Looking for data..."

        for tag in params.run_tags.split(','):
            for isoform in isoforms:
                M = PM.get_HKL(cursor, isoform=isoform, run_tags=tag)
                cell = isoforms[isoform]['cell']
                miller_set = mset(
                    anomalous_flag=False,
                    crystal_symmetry=symmetry(
                        unit_cell=cell,
                        space_group_symbol=isoforms[isoform]['lookup_symbol']),
                    indices=M)
                miller_set.show_comprehensive_summary()

                miller_set.setup_binner(d_min=params.resolution,
                                        n_bins=params.n_bins)
                given = miller_set.binner().counts_given()
                ccomplete = miller_set.binner().counts_complete()
                for i_bin in miller_set.binner().range_used():
                    sel = miller_set.binner().selection(i_bin)
                    self_sel = miller_set.select(sel)
                    d_max, d_min = self_sel.d_max_min()
                    compl = self_sel.completeness(d_max=d_max)

                    n_ref = sel.count(True)
                    if ccomplete[i_bin] == 0.:
                        multiplicity = 0.
                    else:
                        res_highest = d_min
                        multiplicity = given[i_bin] / ccomplete[i_bin]
                    d_range = miller_set.binner().bin_legend(
                        i_bin=i_bin, show_bin_number=False, show_counts=True)
                    fmt = "%3d: %-24s %4.2f %6d mult=%4.2f"
                    print fmt % (i_bin, d_range, compl, n_ref, multiplicity)
                print
                if len(tag) > 0:
                    key = "%s %s" % (tag, isoform)
                else:
                    key = isoform
                given_used = flex.int(given).select(
                    flex.size_t(miller_set.binner().range_used()))
                ccomplete_used = flex.int(ccomplete).select(
                    flex.size_t(miller_set.binner().range_used()))
                results[key] = dict(multiplicity=flex.sum(given_used) /
                                    flex.sum(ccomplete_used),
                                    completeness=miller_set.completeness(),
                                    multiplicity_highest=multiplicity,
                                    completeness_highest=compl,
                                    resolution_highest=res_highest)
        del dbobj
        if not loop:
            return results
        time.sleep(10)
示例#18
0
  def load_data (self):
    ttop = time.time()
    print "Loading data..."
    assert (self.trial_id is not None)

    import cxi_xdr_xes.cftbx.cspad_ana.db as cxidb
    db=cxidb.dbconnect()
    assert(db is not None and db.open)

    # retrieve the run IDs in this trial
    #t1 = time.time()
    cursor = db.cursor()
    #cursor.execute("SELECT DISTINCT(run) FROM %s WHERE trial = %s"%(cxidb.table_name,self.trial_id))
    cmd = "SELECT DISTINCT(run) FROM %s WHERE trial = %s"
    if self.params.run_num is not None:
      extra = " AND run = %s"%self.params.run_num
    elif self.params.run_min is not None and self.params.run_max is not None:
      extra = " AND run >= %s AND run <= %s"%(self.params.run_min, self.params.run_max)
    else:
      extra = " ORDER BY run DESC LIMIT 5"
    cursor.execute(cmd%(cxidb.table_name,self.trial_id) + extra)
    #t2 = time.time()
    #print "Runs queried in %.2fs" % (t2 - t1)

    if(self.full_data_load):
      self.runs = []
    if(len(self.runs) > 5):
      self.runs = self.runs[-5:]

    new_data = False

    for runId in cursor.fetchall():
      if self.full_data_load:
        run = Run(int(runId[0]))
        self.runs.append(run)
      else:
        foundit=False
        for runtest in self.runs:
          if runtest.runId == int(runId[0]):
            foundit = True
            run = runtest
            break
        if not foundit:
          print "New run: %s"%runId
          run = Run(int(runId[0]))
          self.runs.append(run)

      #t1 = time.time()
      #print "Loading data from run %s" % (run.runId)
      if self.full_data_load or not hasattr(run, "latest_entry_id"):
        print "Full load"
        cursor.execute("SELECT id, eventstamp, hitcount, distance, sifoil, wavelength, indexed FROM %s \
          WHERE trial = %s AND run = %s ORDER BY eventstamp"%(cxidb.table_name,self.trial_id,run.runId))
      else:
        print "Partial load"
        cursor.execute("SELECT id, eventstamp, hitcount, distance, sifoil, wavelength, indexed FROM %s \
          WHERE trial = %s AND run = %s AND id > %s ORDER BY eventstamp"%(cxidb.table_name,self.trial_id,run.runId,run.latest_entry_id ))

      #t2 = time.time()
      #print "Query ran in %.2fs" % (t2 - t1)

      ids = flex.int()

      for id, eventstamp, hitcount, distance, sifoil, wavelength, indexed in cursor.fetchall():
        run.bragg_times.append(float(eventstamp))
        run.braggs.append(int(hitcount))
        ids.append(id)

        run.distances.append(float(distance))
        run.sifoils.append(float(sifoil))
        run.wavelengths.append(float(wavelength))
        run.indexed.append(bool(indexed))

      if len(ids) > 0:
        run.latest_entry_id = max(ids)
        new_data = True
        run.recalc_hits(self.params.average_window, self.params.hit_cutoff)


    self.total_width = 0
    for run in self.runs:
      perm = flex.sort_permutation(run.hit_rates_times)
      run.hit_rates_times = run.hit_rates_times.select(perm)
      run.hit_rates = run.hit_rates.select(perm)

      self.total_width += run.width()

    self.cull_braggs()

    #self.full_data_load = False #always do a full load
    self.runs.sort(key=operator.attrgetter('runId'))
    tbot = time.time()
    print "Data loaded in %.2fs" % (tbot - ttop)
    return new_data
示例#19
0
def application(params, loop = True):
  from cxi_xdr_xes.cftbx.cspad_ana import db as cxidb
  dbobj = cxidb.dbconnect(params.db.host, params.db.name, params.db.user, params.db.password)
  cursor = dbobj.cursor()
  PM = progress_manager(params, cursor)
  PM.setup_isoforms(cursor)
  PM.setup_runtags(cursor)
  isoforms = PM.isoforms
  del dbobj

  while 1:
    dbobj = cxidb.dbconnect(params.db.host, params.db.name, params.db.user, params.db.password)
    cursor = dbobj.cursor()

    results = {}
    print "Looking for data..."

    for tag in params.run_tags.split(','):
      for isoform in isoforms:
        M = PM.get_HKL(cursor,isoform=isoform,run_tags=tag)
        cell = isoforms[isoform]['cell']
        miller_set = mset(anomalous_flag = False, crystal_symmetry=symmetry(unit_cell=cell, space_group_symbol=isoforms[isoform]['lookup_symbol']), indices=M)
        miller_set.show_comprehensive_summary()

        miller_set.setup_binner(d_min=params.resolution, n_bins=params.n_bins)
        given = miller_set.binner().counts_given()
        ccomplete = miller_set.binner().counts_complete()
        for i_bin in miller_set.binner().range_used():
            sel         = miller_set.binner().selection(i_bin)
            self_sel    = miller_set.select(sel)
            d_max,d_min = self_sel.d_max_min()
            compl       = self_sel.completeness(d_max = d_max)

            n_ref       = sel.count(True)
            if ccomplete[i_bin] == 0.:
              multiplicity = 0.
            else:
              res_highest   = d_min
              multiplicity  = given[i_bin]/ccomplete[i_bin]
            d_range     = miller_set.binner().bin_legend(
                   i_bin = i_bin, show_bin_number = False, show_counts = True)
            fmt = "%3d: %-24s %4.2f %6d mult=%4.2f"
            print fmt % (i_bin,d_range,compl,n_ref,
                          multiplicity)
        print
        if len(tag) > 0:
          key = "%s %s"%(tag, isoform)
        else:
          key = isoform
        given_used = flex.int(given).select(flex.size_t(miller_set.binner().range_used()))
        ccomplete_used = flex.int(ccomplete).select(flex.size_t(miller_set.binner().range_used()))
        results[key] = dict(
          multiplicity = flex.sum(given_used)/flex.sum(ccomplete_used),
          completeness = miller_set.completeness(),
          multiplicity_highest = multiplicity,
          completeness_highest = compl,
          resolution_highest = res_highest
        )
    del dbobj
    if not loop:
      return results
    time.sleep(10)
示例#20
0
def run(args):
    try:
        from cxi_xdr_xes.cftbx.cspad_ana import db as db
    except ImportError:
        raise Sorry(
            "Trial logging not supported for this installation. Contact the developers for access."
        )

    phil = iotbx.phil.process_command_line(args=args,
                                           master_string=master_phil)
    params = phil.work.extract()

    if params.db.host is None:
        raise Usage("Please provide a host name")
    if params.db.name is None:
        raise Usage("Please provide a database name")
    if params.db.user is None:
        raise Usage("Please provide a user name")
    if params.db.password is None:
        import getpass
        password = getpass.getpass()
    else:
        password = params.db.password

    while True:
        print "Checking for new jobs to submit"
        # need to get a new connection each iteration to prevent caching
        try:
            dbobj = db.dbconnect(host=params.db.host,
                                 db=params.db.name,
                                 username=params.db.user,
                                 password=password)
        except Exception as e:
            raise Sorry(e)

        # Get the set of known runs in the experiment database
        cmd = "SELECT run from %s_runs" % params.experiment_tag
        cursor = dbobj.cursor()
        cursor.execute(cmd)
        known_runs = [
            int(parse_entry(entry)[0]) for entry in cursor.fetchall()
        ]

        # Get the list of active trials
        cmd = "SELECT trial_id, trial from %s_trials WHERE active = True" % params.experiment_tag
        cursor = dbobj.cursor()
        cursor.execute(cmd)
        entries = [parse_entry(entry) for entry in cursor.fetchall()]
        active_trial_ids = [int(entry[0]) for entry in entries]
        active_trials = [int(entry[1]) for entry in entries]

        for trial_id, trial in zip(active_trial_ids, active_trials):
            # Get the active rungroups for this trial
            cmd = "SELECT rungroups_id from %s_trial_rungroups WHERE trials_id = %d AND active = True" % (
                params.experiment_tag, trial_id)
            cursor = dbobj.cursor()
            cursor.execute(cmd)
            active_rungroup_ids = [
                int(parse_entry(entry)[0]) for entry in cursor.fetchall()
            ]

            for rungroup_id in active_rungroup_ids:
                # Get the list of runs for this rungroup
                cmd = "SELECT startrun, endrun from %s_rungroups WHERE %s_rungroups.rungroup_id = %d" % (
                    params.experiment_tag, params.experiment_tag, rungroup_id)
                cursor = dbobj.cursor()
                cursor.execute(cmd)

                assert cursor.rowcount == 1
                startrun, endrun = parse_entry(cursor.fetchall()[0])

                cmd = "SELECT run_id, run from %s_runs WHERE run >= %d" % (
                    params.experiment_tag, startrun)
                if endrun is not None:
                    cmd += " AND run <= %d" % endrun
                cursor = dbobj.cursor()
                cursor.execute(cmd)

                entries = [parse_entry(entry) for entry in cursor.fetchall()]
                run_ids = [int(entry[0]) for entry in entries]
                runs = [int(entry[1]) for entry in entries]

                # Find the submitted runs for this trial/rungroup combination
                cmd = "SELECT runs_id from %s_jobs WHERE trials_id = %d and rungroups_id = %d" % (
                    params.experiment_tag, trial_id, rungroup_id)
                cursor = dbobj.cursor()
                cursor.execute(cmd)

                submitted_run_ids = [
                    int(parse_entry(entry)[0]) for entry in cursor.fetchall()
                ]

                # Submit new runs
                for run_id, run in zip(run_ids, runs):
                    if run_id in submitted_run_ids:
                        continue

                    print "Submitting run %d into trial %d using rungroup %d" % (
                        run_id, trial, rungroup_id)

                    config_path = write_hitfind_cfg(params, dbobj, trial_id,
                                                    trial, rungroup_id)
                    if submit_job(params, dbobj, trial_id, trial, rungroup_id,
                                  run, config_path):
                        pass

                    cmd = "INSERT INTO %s_jobs (runs_id, trials_id, rungroups_id, status) VALUES (%d, %d, %d, '%s')" % (
                        params.experiment_tag, run_id, trial_id, rungroup_id,
                        "submitted")
                    cursor = dbobj.cursor()
                    cursor.execute(cmd)
                    dbobj.commit()

        time.sleep(10)
示例#21
0
    def event(self, evt, env):
        """The event() function is called for every L1Accept transition.
    XXX more?

    Previously, common-mode correction was applied only after initial
    threshold filtering.  Since the common_mode class applies the
    (lengthy) common-mode correction immediately after reading the
    image from the stream, this optimisation is currently not
    (elegantly) doable.

    @param evt Event data object, a configure object
    @param env Environment object
    """

        super(mod_hitfind, self).event(evt, env)
        if (evt.get("skip_event")):
            return

        # This module only applies to detectors for which a distance is
        # available.
        distance = cspad_tbx.env_distance(self.address, env, self._detz_offset)
        if distance is None:
            self.nfail += 1
            self.logger.warning("event(): no distance, shot skipped")
            evt.put(skip_event_flag(), "skip_event")
            return

        device = cspad_tbx.address_split(self.address)[2]

        # ***** HITFINDING ***** XXX For hitfinding it may be interesting
        # to look at the fraction of subzero pixels in the dark-corrected
        # image.
        if (self.m_threshold is not None):
            # If a threshold value is given it can be applied in one of three ways:
            #    1.  Apply it over the whole image
            if (self.m_roi is None and self.m_distl_min_peaks is None):
                vmax = flex.max(self.cspad_img)
                if (vmax < self.m_threshold):
                    if not self.m_negate_hits:
                        # Tell downstream modules to skip this event if the threshold was not met.
                        evt.put(skip_event_flag(), "skip_event")
                        return
                elif self.m_negate_hits:
                    evt.put(skip_event_flag(), "skip_event")
                    return

            #    2. Apply threshold over a rectangular region of interest.
            elif (self.m_roi is not None):
                vmax = flex.max(self.cspad_img[self.m_roi[2]:self.m_roi[3],
                                               self.m_roi[0]:self.m_roi[1]])
                if (vmax < self.m_threshold):
                    if not self.m_negate_hits:
                        evt.put(skip_event_flag(), "skip_event")
                        return
                elif self.m_negate_hits:
                    evt.put(skip_event_flag(), "skip_event")
                    return

            #    3. Determine the spotfinder spots within the central ASICS, and accept the
            #       image as a hit if there are m_distl_min_peaks exceeding m_threshold.
            #       As a further requirement, the peaks must exceed 2.5 * the 90-percentile
            #       pixel value of the central ASICS.  This filter was added to avoid high-background
            #       false positives.
            elif (self.m_distl_min_peaks is not None):
                if device == 'marccd':
                    self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0]
                    self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1]
                elif device == 'Rayonix':
                    self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0]
                    self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1]

                peak_heights, outvalue = self.distl_filter(
                    self.address,
                    self.cspad_img.iround(),  # XXX correct?
                    distance,
                    self.timestamp,
                    self.wavelength)
                if ('permissive' in self.m_distl_flags):
                    number_of_accepted_peaks = (peak_heights >
                                                self.m_threshold).count(True)
                else:
                    number_of_accepted_peaks = ((
                        peak_heights > self.m_threshold).__and__(
                            outvalue == 0)).count(True)

                sec, ms = cspad_tbx.evt_time(evt)
                evt_time = sec + ms / 1000
                self.stats_logger.info("BRAGG %.3f %d" %
                                       (evt_time, number_of_accepted_peaks))

                skip_event = False
                if number_of_accepted_peaks < self.m_distl_min_peaks:
                    self.logger.info(
                        "Subprocess %02d: Spotfinder NO  HIT image #%05d @ %s; %d spots > %d"
                        % (env.subprocess(), self.nshots, self.timestamp,
                           number_of_accepted_peaks, self.m_threshold))

                    if not self.m_negate_hits:
                        skip_event = True
                else:
                    self.logger.info(
                        "Subprocess %02d: Spotfinder YES HIT image #%05d @ %s; %d spots > %d"
                        % (env.subprocess(), self.nshots, self.timestamp,
                           number_of_accepted_peaks, self.m_threshold))

                    if self.m_negate_hits:
                        skip_event = True

                if skip_event:
                    if self.m_db_logging:
                        # log misses to the database
                        self.queue_entry(
                            (self.trial, evt.run(), "%.3f" % evt_time,
                             number_of_accepted_peaks, distance, self.sifoil,
                             self.wavelength, False, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                             0, 0, self.m_db_tags))
                    evt.put(skip_event_flag(), "skip_event")
                    return
                # the indexer will log this hit when it is ran. Bug: if the spotfinder is ran by itself, this
                # hit will not be logged in the db.
                evt.put(number_of_accepted_peaks, 'sfspots')

        self.logger.info("Subprocess %02d: process image #%05d @ %s" %
                         (env.subprocess(), self.nshots, self.timestamp))

        # See r17537 of mod_average.py.
        if device == 'Cspad':
            pixel_size = cspad_tbx.pixel_size
            saturated_value = cspad_tbx.cspad_saturated_value
        elif device == 'marccd':
            pixel_size = evt.get("marccd_pixel_size")
            saturated_value = evt.get("marccd_saturated_value")
        elif device == 'Rayonix':
            pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size)
            saturated_value = rayonix_tbx.rayonix_saturated_value

        d = cspad_tbx.dpack(
            active_areas=self.active_areas,
            address=self.address,
            beam_center_x=pixel_size * self.beam_center[0],
            beam_center_y=pixel_size * self.beam_center[1],
            data=self.cspad_img.iround(),  # XXX ouch!
            distance=distance,
            pixel_size=pixel_size,
            saturated_value=saturated_value,
            timestamp=self.timestamp,
            wavelength=self.wavelength,
            xtal_target=self.m_xtal_target)

        if (self.m_dispatch == "index"):
            import sys
            from xfel.cxi.integrate_image_api import integrate_one_image
            info = integrate_one_image(
                d,
                integration_dirname=self.m_integration_dirname,
                integration_basename=self.m_integration_basename)
            sys.stdout = sys.__stdout__
            sys.stderr = sys.__stderr__

            indexed = info is not None and hasattr(info, 'spotfinder_results')
            if self.m_progress_logging:
                if self.m_db_version == 'v1':
                    if indexed:
                        # integration pickle dictionary is available here as info.last_saved_best
                        if info.last_saved_best[
                                "identified_isoform"] is not None:
                            #print info.last_saved_best.keys()
                            from cxi_xdr_xes.cftbx.cspad_ana import db
                            dbobj = db.dbconnect(self.m_db_host,
                                                 self.m_db_name,
                                                 self.m_db_user,
                                                 self.m_db_password)
                            cursor = dbobj.cursor()
                            if info.last_saved_best[
                                    "identified_isoform"] in self.isoforms:
                                PM, indices, miller_id = self.isoforms[
                                    info.last_saved_best["identified_isoform"]]
                            else:
                                from xfel.xpp.progress_support import progress_manager
                                PM = progress_manager(info.last_saved_best,
                                                      self.m_db_experiment_tag,
                                                      self.m_trial_id,
                                                      self.m_rungroup_id,
                                                      evt.run())
                                indices, miller_id = PM.get_HKL(cursor)
                                # cache these as they don't change for a given isoform
                                self.isoforms[info.last_saved_best[
                                    "identified_isoform"]] = PM, indices, miller_id
                            if self.m_sql_buffer_size > 1:
                                self.queue_progress_entry(
                                    PM.scale_frame_detail(self.timestamp,
                                                          cursor,
                                                          do_inserts=False))
                            else:
                                PM.scale_frame_detail(self.timestamp,
                                                      cursor,
                                                      do_inserts=True)
                                dbobj.commit()
                                cursor.close()
                                dbobj.close()
                elif self.m_db_version == 'v2':
                    key_low = 'cctbx.xfel.radial_average.two_theta_low'
                    key_high = 'cctbx.xfel.radial_average.two_theta_high'
                    tt_low = evt.get(key_low)
                    tt_high = evt.get(key_high)

                    from xfel.ui.db.dxtbx_db import log_frame
                    if indexed:
                        n_spots = len(info.spotfinder_results.images[
                            info.frames[0]]['spots_total'])
                    else:
                        sfspots = evt.get('sfspots')
                        if sfspots is None:
                            if info is None or not isinstance(info, int):
                                n_spots = 0
                            else:
                                n_spots = info
                        else:
                            n_spots = sfspots

                    if indexed:
                        known_setting = info.horizons_phil.known_setting
                        indexed_setting = info.organizer.info[
                            'best_integration']['counter']
                        if known_setting is None or known_setting == indexed_setting:
                            from xfel.command_line.frame_unpickler import construct_reflection_table_and_experiment_list
                            c = construct_reflection_table_and_experiment_list(
                                info.last_saved_best,
                                None,
                                pixel_size,
                                proceed_without_image=True)
                            c.assemble_experiments()
                            c.assemble_reflections()
                            log_frame(c.experiment_list, c.reflections,
                                      self.db_params, evt.run(), n_spots,
                                      self.timestamp, tt_low, tt_high)
                        else:
                            print(
                                "Not logging %s, wrong bravais setting (expecting %d, got %d)"
                                % (self.timestamp, known_setting,
                                   indexed_setting))
                    else:
                        log_frame(None, None, self.db_params, evt.run(),
                                  n_spots, self.timestamp, tt_low, tt_high)

            if self.m_db_logging:
                sec, ms = cspad_tbx.evt_time(evt)
                evt_time = sec + ms / 1000
                sfspots = evt.get('sfspots')
                if sfspots is None:
                    if indexed:
                        n_spots = len(info.spotfinder_results.images[
                            info.frames[0]]['spots_total'])
                    else:
                        n_spots = 0
                else:
                    n_spots = sfspots

                if indexed:
                    mosaic_bloc_rotation = info.last_saved_best.get(
                        'ML_half_mosaicity_deg', [0])[0]
                    mosaic_block_size = info.last_saved_best.get(
                        'ML_domain_size_ang', [0])[0]
                    ewald_proximal_volume = info.last_saved_best.get(
                        'ewald_proximal_volume', [0])[0]

                    obs = info.last_saved_best['observations'][0]
                    cell_a, cell_b, cell_c, cell_alpha, cell_beta, cell_gamma = obs.unit_cell(
                    ).parameters()
                    pointgroup = info.last_saved_best['pointgroup']
                    resolution = obs.d_min()
                else:
                    mosaic_bloc_rotation = mosaic_block_size = ewald_proximal_volume = cell_a = cell_b = cell_c = \
                      cell_alpha = cell_beta = cell_gamma = spacegroup = resolution = 0

                self.queue_entry(
                    (self.trial, evt.run(), "%.3f" % evt_time, n_spots,
                     distance, self.sifoil, self.wavelength, indexed,
                     mosaic_bloc_rotation, mosaic_block_size,
                     ewald_proximal_volume, pointgroup, cell_a, cell_b, cell_c,
                     cell_alpha, cell_beta, cell_gamma, resolution,
                     self.m_db_tags))

            if (not indexed):
                evt.put(skip_event_flag(), "skip_event")
                return

        elif (self.m_dispatch == "nop"):
            pass

        elif (self.m_dispatch == "view"):  #interactive image viewer

            args = ["indexing.data=dummy"]
            detector_format_version = detector_format_function(
                self.address, evt.GetTime())
            if detector_format_version is not None:
                args += [
                    "distl.detector_format_version=%" % detector_format_version
                ]

            from xfel.phil_preferences import load_cxi_phil
            horizons_phil = load_cxi_phil(self.m_xtal_target, args)
            horizons_phil.indexing.data = d

            from xfel.cxi import display_spots
            display_spots.parameters.horizons_phil = horizons_phil
            display_spots.wrapper_of_callback().display(
                horizons_phil.indexing.data)

        elif (self.m_dispatch == "spots"):  #interactive spotfinder viewer

            args = ["indexing.data=dummy"]
            detector_format_version = detector_format_function(
                self.address, evt.GetTime())
            if detector_format_version is not None:
                args += [
                    "distl.detector_format_version=%s" %
                    detector_format_version
                ]

            from xfel.phil_preferences import load_cxi_phil
            horizons_phil = load_cxi_phil(self.m_xtal_target, args)
            horizons_phil.indexing.data = d

            from xfel.cxi import display_spots
            display_spots.parameters.horizons_phil = horizons_phil

            from rstbx.new_horizons.index import pre_indexing_validation, pack_names
            pre_indexing_validation(horizons_phil)
            imagefile_arguments = pack_names(horizons_phil)
            horizons_phil.persist.show()
            from spotfinder.applications import signal_strength
            info = signal_strength.run_signal_strength_core(
                horizons_phil, imagefile_arguments)

            work = display_spots.wrapper_of_callback(info)
            work.display_with_callback(horizons_phil.indexing.data)

        elif (self.m_dispatch == "write_dict"):
            self.logger.warning(
                "event(): deprecated dispatch 'write_dict', use mod_dump instead"
            )
            if (self.m_out_dirname is not None
                    or self.m_out_basename is not None):
                cspad_tbx.dwritef(d, self.m_out_dirname, self.m_out_basename)

        # Diagnostic message emitted only when all the processing is done.
        if (env.subprocess() >= 0):
            self.logger.info("Subprocess %02d: accepted #%05d @ %s" %
                             (env.subprocess(), self.nshots, self.timestamp))
        else:
            self.logger.info("Accepted #%05d @ %s" %
                             (self.nshots, self.timestamp))