def beginjob(self, evt, env): """The beginjob() function does one-time initialisation from event- or environment data. It is called at an XTC configure transition. @param evt Event data object, a configure object @param env Environment object """ super(average_mixin, self).beginjob(evt, env) if self.dark_img is not None and self.hot_threshold is not None: self.hot_threshold *= flex.median(self.dark_img.as_1d()) self.logger.info("HOT THRESHOLD: %.2f" % self.hot_threshold) self.logger.info("Number of pixels above hot threshold: %i" % \ (self.dark_img > self.hot_threshold).count(True)) self._nfail = 0 self._nmemb = 0 # The time_base metadata item is a two-long array of seconds and # milliseconds, and it must be recorded only once. It indicates # the base time, which is subtracted from all per-shot times. # Assuming an average run length of ten minutes, five minutes past # the start of a run is a good base time. self._lock.acquire() if 'time_base' not in self._metadata.keys(): self._metadata['time_base'] = (cspad_tbx.evt_time(evt)[0] + 5 * 60, 500) self._lock.release()
def event(self, evt, env): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. Read the evr codes for this event and save the timestamp if it has an evr code we are looking for. @param evt Event data object, a configure object @param env Environment object """ if (evt.get("skip_event")): return evr = psana.Detector('evr0') codes = evr.eventCodes(evt) timestamp = cspad_tbx.evt_timestamp( cspad_tbx.evt_time(evt)) # human readable format logging.info("mod_event_code, rank: %d, timestamp: %s, code_list: %s" % (self.rank, timestamp, ",".join(["%d" % c for c in codes]))) # Save this timestamp if it has an event_code w are looking for for alist, code in zip(self.alist_names, self.event_codes): if code in codes: self.timestamps_d[alist].append(timestamp)
def event(self, evt, env): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ self.nshots += 1 if (evt.get("skip_event")): return if (self.timestamps_list is not None): t = cspad_tbx.evt_time(evt) if (t is None): self.logger.warning( "event(): no timestamp, shot skipped. Shot: %s" % self.nshots) evt.put(skip_event_flag(), "skip_event") return elif (self.negate and t in self.timestamps_list): evt.put(skip_event_flag(), "skip_event") return elif (not self.negate and t not in self.timestamps_list): evt.put(skip_event_flag(), "skip_event") return else: t = cspad_tbx.evt_time(evt) if (t is None): self.logger.warning( "event(): no timestamp, shot skipped. Shot: %s" % self.nshots) evt.put(skip_event_flag(), "skip_event") return if (self.negate and self._tir(t, self.timestamps_interval)): evt.put(skip_event_flag(), "skip_event") return elif (not self.negate and not self._tir(t, self.timestamps_interval)): evt.put(skip_event_flag(), "skip_event") return self.logger.info("event(): event accepted. Shot: %s, TS: %s" % (self.nshots, cspad_tbx.evt_timestamp(t))) self.naccepted += 1
def event(self, evt, env): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ self.nshots += 1 if (evt.get("skip_event")): return # Get time as a (seconds, milliseconds) tuple. t = cspad_tbx.evt_time(evt) if (t is None): self.logger.warning("event(): no timestamp, shot skipped") evt.put(skip_event_flag(), "skip_event") return # Update laser status. self.laser_1.set_status(cspad_tbx.env_laser_status(env, laser_id=1), t) self.laser_4.set_status(cspad_tbx.env_laser_status(env, laser_id=4), t) t1 = self.laser_1.ms_since_last_status_change(t) t4 = self.laser_4.ms_since_last_status_change(t) if (self.laser_4.status or (t4 is not None and t4 < self._wait) or (t1 is not None and t1 < self._wait)): # If laser 4 is on or was switched off less than self._wait ms # ago, the shot falls in the "other" category. If laser 1 # changed its state less than self._wait ms ago the shot falls # in the "other" category. if (self._filter != "other"): evt.put(skip_event_flag(), "skip_event") return elif (self.laser_1.status): # If laser 1 is on the shot falls in the "light" category. if (self._filter != "light"): evt.put(skip_event_flag(), "skip_event") return elif (not self.laser_1.status): # If laser 1 is off the shot falls in the "dark" category. if (self._filter != "dark"): evt.put(skip_event_flag(), "skip_event") return else: # NOTREACHED self.logger.error("Could not determine shot category") raise RuntimeError("XXX") self.naccepted += 1
def event(self, evt, env): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ self.nshots += 1 if (evt.get("skip_event")): return if (self.timestamps_list is not None): t = cspad_tbx.evt_time(evt) if (t is None): self.logger.warning("event(): no timestamp, shot skipped. Shot: %s"%self.nshots) evt.put(skip_event_flag(), "skip_event") return elif (self.negate and t in self.timestamps_list): evt.put(skip_event_flag(), "skip_event") return elif (not self.negate and t not in self.timestamps_list): evt.put(skip_event_flag(), "skip_event") return else: t = cspad_tbx.evt_time(evt) if (t is None): self.logger.warning("event(): no timestamp, shot skipped. Shot: %s"%self.nshots) evt.put(skip_event_flag(), "skip_event") return if (self.negate and self._tir(t, self.timestamps_interval)): evt.put(skip_event_flag(), "skip_event") return elif (not self.negate and not self._tir(t, self.timestamps_interval)): evt.put(skip_event_flag(), "skip_event") return self.logger.info("event(): event accepted. Shot: %s, TS: %s"%(self.nshots,cspad_tbx.evt_timestamp(t))) self.naccepted += 1
def _filter_event(self, evt, flux_min, flux_max, peak_ratio_min, peak_ratio_max): all_data = evt.get(psana.Camera.FrameV1, self.src) if all_data is None: print("No data") return False, None, None, None, None, None print(cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)), end=' ') data = np.array(all_data.data16().astype(np.int32)) if self.dark_pickle is None: self.dark_pickle = np.zeros(data.shape) if self.bg_roi is None: dc_offset = None dc_offset_mean = 0.0 else: xmin, xmax, ymin, ymax = self.bg_roi dc_offset = data[ymin:ymax, xmin:xmax] - self.dark_pickle[ ymin:ymax, xmin:xmax] #dc: direct current dc_offset_mean = np.mean(dc_offset) if self.roi is None: xmin = 0 ymin = 0 ymax, xmax = data.shape else: xmin, xmax, ymin, ymax = self.roi data_signal = data[ymin:ymax, xmin:xmax] - self.dark_pickle[ymin:ymax, xmin:xmax] data = data_signal - dc_offset_mean # make a 1D trace spectrum = np.sum(data, 0) flux = np.sum(spectrum) if self.peak_range is None: print("Run", evt.run(), "flux:", flux) else: peak = spectrum[self.peak_range[0] - xmin:self.peak_range[1] - xmin] peak_max = np.sum(peak) print("Run", evt.run(), "peak max:", peak_max, "at", np.argmax(peak) + xmin, "px", "flux:", flux, "m/f:", peak_max / flux) if flux_min is not None and flux < flux_min: return False, None, None, None, None, None if flux_max is not None and flux >= flux_max: return False, None, None, None, None, None if peak_ratio_min is not None and peak_max / flux < peak_ratio_min: return False, None, None, None, None, None if peak_ratio_max is not None and peak_max / flux >= peak_ratio_max: return False, None, None, None, None, None all_data = all_data.data16().astype(np.int32) return True, data, spectrum, dc_offset, all_data - self.dark_pickle, all_data
def event(self,evt,evn): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ if (evt.get("skip_event")): return self.n_total += 1 ts = cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) accept = self.filter.filter_event(evt, self.selected_filter)[0] if not accept: print "Skipping event", ts, ": didn't pass filter", self.selected_filter evt.put(skip_event_flag(), "skip_event") return print "Accepting event", ts, ": passed filter", self.selected_filter self.n_accepted += 1
def event(self, evt, evn): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ if (evt.get("skip_event")): return self.n_total += 1 ts = cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) accept = self.filter.filter_event(evt, self.selected_filter)[0] if not accept: print "Skipping event", ts, ": didn't pass filter", self.selected_filter evt.put(skip_event_flag(), "skip_event") return print "Accepting event", ts, ": passed filter", self.selected_filter self.n_accepted += 1
def _filter_event(self, evt, flux_min, flux_max, peak_ratio_min, peak_ratio_max): all_data = evt.get(psana.Camera.FrameV1, self.src) if all_data is None: print "No data" return False, None, None, None, None, None print cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)), if self.bg_roi is None: dc_offset = None dc_offset_mean = 0.0 else: xmin, xmax, ymin, ymax = self.bg_roi dc_offset = np.array(all_data.data16().astype(np.int32))[ymin:ymax,xmin:xmax] - self.dark_pickle[ymin:ymax,xmin:xmax] #dc: direct current dc_offset_mean = np.mean(dc_offset) data = np.array(all_data.data16().astype(np.int32)) if self.roi is None: xmin = 0; ymin = 0 ymax, xmax = data.shape else: xmin, xmax, ymin, ymax = self.roi data_signal = data[ymin:ymax,xmin:xmax] - self.dark_pickle[ymin:ymax,xmin:xmax] data = data_signal - dc_offset_mean # make a 1D trace spectrum = np.sum(data, 0) flux = np.sum(spectrum) if self.peak_range is None: print "Run", evt.run(), "flux:", flux else: peak = spectrum[self.peak_range[0]-xmin:self.peak_range[1]-xmin] peak_max = np.sum(peak) print "Run", evt.run(), "peak max:", peak_max, "at", np.argmax(peak)+xmin, "px", "flux:", flux, "m/f:", peak_max/flux if flux_min is not None and flux < flux_min: return False, None, None, None, None, None if flux_max is not None and flux >= flux_max: return False, None, None, None, None, None if peak_ratio_min is not None and peak_max/flux < peak_ratio_min: return False, None, None, None, None, None if peak_ratio_max is not None and peak_max/flux >= peak_ratio_max: return False, None, None, None, None, None all_data = all_data.data16().astype(np.int32) return True, data, spectrum, dc_offset, all_data - self.dark_pickle, all_data
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ super(mod_xes, self).event(evt, env) if (evt.get("skip_event")): return if self.roi is not None: pixels = self.cspad_img[self.roi[2]:self.roi[3], self.roi[0]:self.roi[1]] dark_mask = self.dark_mask[self.roi[2]:self.roi[3], self.roi[0]:self.roi[1]] pixels = pixels.as_1d().select(dark_mask.as_1d()) else: pixels = self.cspad_img.as_1d().select(self.dark_mask.as_1d()) stats = scitbx.math.basic_statistics(pixels.as_double()) s, ms = cspad_tbx.evt_time(evt) evt_time = s + ms/1000 self.stats_logger.info("SKEWNESS %.3f %s" %(evt_time, stats.skew)) self.stats_logger.info("KURTOSIS %.3f %s" %(evt_time, stats.kurtosis))
def beginjob(self, evt, env): """The beginjob() function does one-time initialisation from event- or environment data. It is called at an XTC configure transition. @param evt Event data object, a configure object @param env Environment object """ super(common_mode_correction, self).beginjob(evt, env) # Load the dark image and ensure it is signed and at least 32 bits # wide, since it will be used for differencing. If a dark image # is provided, a standard deviation image is required, and all the # ADU scales must match up. # # XXX Can we zap all the ADU_SCALE stuff? # # XXX Do we really need to store the substituted values in the # instance here? At least self._dark_path is referenced later on. # # Note that this will load the dark, standard deviation and gain # once (SEVERAL TIMES) for each process, but the gain is that we # can do substitutions. But it is only done at the beginning of # the job. self.dark_img = None if self._dark_path is not None: self._dark_path = cspad_tbx.getOptEvalOrString( cspad_tbx.pathsubst(self._dark_path, evt, env)) assert self._dark_stddev_path is not None dark_dict = easy_pickle.load(self._dark_path) #assert "ADU_SCALE" not in dark_dict # force use of recalculated dark self.dark_img = dark_dict['DATA'] assert isinstance(self.dark_img, flex.double) self._dark_stddev_path = cspad_tbx.getOptEvalOrString( cspad_tbx.pathsubst(self._dark_stddev_path, evt, env)) self.dark_stddev = easy_pickle.load(self._dark_stddev_path)['DATA'] assert isinstance(self.dark_stddev, flex.double) self.dark_mask = (self.dark_stddev > 0) # Load the mask image and ensure it is signed and at least 32 bits # wide, since it will be used for differencing. self.gain_map = None if self._gain_map_path is not None: self._gain_map_path = cspad_tbx.getOptEvalOrString( cspad_tbx.pathsubst(self._gain_map_path, evt, env)) self.gain_map = easy_pickle.load(self._gain_map_path)['DATA'] if self.gain_map_level is not None: sel = flex.bool([bool(f) for f in self.gain_map]) sel.reshape(flex.grid(self.gain_map.focus())) self.gain_map = self.gain_map.set_selected(~sel, self.gain_map_level) self.gain_map = self.gain_map.set_selected(sel, 1) assert isinstance(self.gain_map, flex.double) self.mask_img = None if self._mask_path is not None: self._mask_path = cspad_tbx.getOptEvalOrString( cspad_tbx.pathsubst(self._mask_path, evt, env)) self.mask_img = easy_pickle.load(self._mask_path)['DATA'] assert isinstance(self.mask_img, flex.double) \ or isinstance(self.mask_img, flex.int) if self.address == 'XppGon-0|marccd-0': #mod_mar.py will set these during its event function self.active_areas = None self.beam_center = None elif self.address == 'XppEndstation-0|Rayonix-0' or \ self.address == 'MfxEndstation-0|Rayonix-0': assert self.override_beam_x is not None assert self.override_beam_y is not None from xfel.cxi.cspad_ana import rayonix_tbx maxx, maxy = rayonix_tbx.get_rayonix_detector_dimensions(self.bin_size) if self.crop_rayonix: bx = int(round(self.override_beam_x)) by = int(round(self.override_beam_y)) minsize = min([bx,by,maxx-bx,maxy-by]) self.beam_center = minsize,minsize self.active_areas = flex.int([0,0,2*minsize,2*minsize]) self.rayonix_crop_slice = slice(by-minsize,by+minsize), slice(bx-minsize,bx+minsize) else: self.beam_center = self.override_beam_x,self.override_beam_y self.active_areas = flex.int([0,0,maxx,maxy]) elif self.address == 'XppGon-0|Cspad-0': evt_time = cspad_tbx.evt_time(evt) # tuple of seconds, milliseconds timestamp = cspad_tbx.evt_timestamp(evt_time) # human readable format from iotbx.detectors.cspad_detector_formats import detector_format_version, reverse_timestamp from xfel.cxi.cspad_ana.cspad_tbx import xpp_active_areas version_lookup = detector_format_version(self.address, reverse_timestamp(timestamp)[0]) assert version_lookup is not None self.active_areas = xpp_active_areas[version_lookup]['active_areas'] self.beam_center = [1765 // 2, 1765 // 2] else: (self.beam_center, self.active_areas) = cspad_tbx.cbcaa( cspad_tbx.getConfig(self.address, env), self.sections)
def average(argv=None): if argv == None: argv = sys.argv[1:] try: from mpi4py import MPI except ImportError: raise Sorry("MPI not found") command_line = (libtbx.option_parser.option_parser(usage=""" %s [-p] -c config -x experiment -a address -r run -d detz_offset [-o outputdir] [-A averagepath] [-S stddevpath] [-M maxpath] [-n numevents] [-s skipnevents] [-v] [-m] [-b bin_size] [-X override_beam_x] [-Y override_beam_y] [-D xtc_dir] [-f] [-g gain_mask_value] [--min] [--minpath minpath] To write image pickles use -p, otherwise the program writes CSPAD CBFs. Writing CBFs requires the geometry to be already deployed. Examples: cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 Use one process on the current node to process all the events from run 25 of experiment cxi49812, using a detz_offset of 571. mpirun -n 16 cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 As above, using 16 cores on the current node. bsub -a mympi -n 100 -o average.out -q psanaq cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 -o cxi49812 As above, using the psanaq and 100 cores, putting the log in average.out and the output images in the folder cxi49812. """ % libtbx.env.dispatcher_name).option( None, "--as_pickle", "-p", action="store_true", default=False, dest="as_pickle", help="Write results as image pickle files instead of cbf files" ).option( None, "--raw_data", "-R", action="store_true", default=False, dest="raw_data", help= "Disable psana corrections such as dark pedestal subtraction or common mode (cbf only)" ).option( None, "--background_pickle", "-B", default=None, dest="background_pickle", help="" ).option( None, "--config", "-c", type="string", default=None, dest="config", metavar="PATH", help="psana config file" ).option( None, "--experiment", "-x", type="string", default=None, dest="experiment", help="experiment name (eg cxi84914)" ).option( None, "--run", "-r", type="int", default=None, dest="run", help="run number" ).option( None, "--address", "-a", type="string", default="CxiDs2.0:Cspad.0", dest="address", help="detector address name (eg CxiDs2.0:Cspad.0)" ).option( None, "--detz_offset", "-d", type="float", default=None, dest="detz_offset", help= "offset (in mm) from sample interaction region to back of CSPAD detector rail (CXI), or detector distance (XPP)" ).option( None, "--outputdir", "-o", type="string", default=".", dest="outputdir", metavar="PATH", help="Optional path to output directory for output files" ).option( None, "--averagebase", "-A", type="string", default="{experiment!l}_avg-r{run:04d}", dest="averagepath", metavar="PATH", help= "Path to output average image without extension. String substitution allowed" ).option( None, "--stddevbase", "-S", type="string", default="{experiment!l}_stddev-r{run:04d}", dest="stddevpath", metavar="PATH", help= "Path to output standard deviation image without extension. String substitution allowed" ).option( None, "--maxbase", "-M", type="string", default="{experiment!l}_max-r{run:04d}", dest="maxpath", metavar="PATH", help= "Path to output maximum projection image without extension. String substitution allowed" ).option( None, "--numevents", "-n", type="int", default=None, dest="numevents", help="Maximum number of events to process. Default: all" ).option( None, "--skipevents", "-s", type="int", default=0, dest="skipevents", help="Number of events in the beginning of the run to skip. Default: 0" ).option( None, "--verbose", "-v", action="store_true", default=False, dest="verbose", help="Print more information about progress" ).option( None, "--pickle-optical-metrology", "-m", action="store_true", default=False, dest="pickle_optical_metrology", help= "If writing pickle files, use the optical metrology in the experiment's calib directory" ).option( None, "--bin_size", "-b", type="int", default=None, dest="bin_size", help="Rayonix detector bin size" ).option( None, "--override_beam_x", "-X", type="float", default=None, dest="override_beam_x", help="Rayonix detector beam center x coordinate" ).option( None, "--override_beam_y", "-Y", type="float", default=None, dest="override_beam_y", help="Rayonix detector beam center y coordinate" ).option( None, "--calib_dir", "-C", type="string", default=None, dest="calib_dir", metavar="PATH", help="calibration directory" ).option( None, "--pickle_calib_dir", "-P", type="string", default=None, dest="pickle_calib_dir", metavar="PATH", help= "pickle calibration directory specification. Replaces --calib_dir functionality." ).option( None, "--xtc_dir", "-D", type="string", default=None, dest="xtc_dir", metavar="PATH", help="xtc stream directory" ).option( None, "--use_ffb", "-f", action="store_true", default=False, dest="use_ffb", help= "Use the fast feedback filesystem at LCLS. Only for the active experiment!" ).option( None, "--gain_mask_value", "-g", type="float", default=None, dest="gain_mask_value", help= "Ratio between low and high gain pixels, if CSPAD in mixed-gain mode. Only used in CBF averaging mode." ).option( None, "--min", None, action="store_true", default=False, dest="do_minimum_projection", help="Output a minimum projection" ).option( None, "--minpath", None, type="string", default="{experiment!l}_min-r{run:04d}", dest="minpath", metavar="PATH", help= "Path to output minimum image without extension. String substitution allowed" )).process(args=argv) if len(command_line.args) > 0 or \ command_line.options.as_pickle is None or \ command_line.options.experiment is None or \ command_line.options.run is None or \ command_line.options.address is None or \ command_line.options.detz_offset is None or \ command_line.options.averagepath is None or \ command_line.options.stddevpath is None or \ command_line.options.maxpath is None or \ command_line.options.pickle_optical_metrology is None: command_line.parser.show_help() return # set this to sys.maxint to analyze all events if command_line.options.numevents is None: maxevents = sys.maxsize else: maxevents = command_line.options.numevents comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() if command_line.options.config is not None: psana.setConfigFile(command_line.options.config) dataset_name = "exp=%s:run=%d:smd" % (command_line.options.experiment, command_line.options.run) if command_line.options.xtc_dir is not None: if command_line.options.use_ffb: raise Sorry("Cannot specify the xtc_dir and use SLAC's ffb system") dataset_name += ":dir=%s" % command_line.options.xtc_dir elif command_line.options.use_ffb: # as ffb is only at SLAC, ok to hardcode /reg/d here dataset_name += ":dir=/reg/d/ffb/%s/%s/xtc" % ( command_line.options.experiment[0:3], command_line.options.experiment) if command_line.options.calib_dir is not None: psana.setOption('psana.calib-dir', command_line.options.calib_dir) ds = psana.DataSource(dataset_name) address = command_line.options.address src = psana.Source('DetInfo(%s)' % address) nevent = np.array([0.]) if command_line.options.background_pickle is not None: background = easy_pickle.load( command_line.options.background_pickle)['DATA'].as_numpy_array() for run in ds.runs(): runnumber = run.run() if not command_line.options.as_pickle: psana_det = psana.Detector(address, ds.env()) # list of all events if command_line.options.skipevents > 0: print("Skipping first %d events" % command_line.options.skipevents) elif "Rayonix" in command_line.options.address: print("Skipping first image in the Rayonix detector" ) # Shuttering issue command_line.options.skipevents = 1 for i, evt in enumerate(run.events()): if i % size != rank: continue if i < command_line.options.skipevents: continue if i >= maxevents: break if i % 10 == 0: print('Rank', rank, 'processing event', i) #print "Event #",rank*mylength+i," has id:",evt.get(EventId) if 'Rayonix' in command_line.options.address or 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address: data = evt.get(psana.Camera.FrameV1, src) if data is None: print("No data") continue data = data.data16().astype(np.float64) elif command_line.options.as_pickle: data = evt.get(psana.ndarray_float64_3, src, 'image0') else: # get numpy array, 32x185x388 from xfel.cftbx.detector.cspad_cbf_tbx import get_psana_corrected_data if command_line.options.raw_data: data = get_psana_corrected_data(psana_det, evt, use_default=False, dark=False, common_mode=None, apply_gain_mask=False, per_pixel_gain=False) else: if command_line.options.gain_mask_value is None: data = get_psana_corrected_data(psana_det, evt, use_default=True) else: data = get_psana_corrected_data( psana_det, evt, use_default=False, dark=True, common_mode=None, apply_gain_mask=True, gain_mask_value=command_line.options. gain_mask_value, per_pixel_gain=False) if data is None: print("No data") continue if command_line.options.background_pickle is not None: data -= background if 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address: distance = np.array([0.0]) wavelength = np.array([1.0]) else: d = cspad_tbx.env_distance(address, run.env(), command_line.options.detz_offset) if d is None: print("No distance, using distance", command_line.options.detz_offset) assert command_line.options.detz_offset is not None if 'distance' not in locals(): distance = np.array([command_line.options.detz_offset]) else: distance += command_line.options.detz_offset else: if 'distance' in locals(): distance += d else: distance = np.array([float(d)]) w = cspad_tbx.evt_wavelength(evt) if w is None: print("No wavelength") if 'wavelength' not in locals(): wavelength = np.array([1.0]) else: if 'wavelength' in locals(): wavelength += w else: wavelength = np.array([w]) t = cspad_tbx.evt_time(evt) if t is None: print("No timestamp, skipping shot") continue if 'timestamp' in locals(): timestamp += t[0] + (t[1] / 1000) else: timestamp = np.array([t[0] + (t[1] / 1000)]) if 'sum' in locals(): sum += data else: sum = np.array(data, copy=True) if 'sumsq' in locals(): sumsq += data * data else: sumsq = data * data if 'maximum' in locals(): maximum = np.maximum(maximum, data) else: maximum = np.array(data, copy=True) if command_line.options.do_minimum_projection: if 'minimum' in locals(): minimum = np.minimum(minimum, data) else: minimum = np.array(data, copy=True) nevent += 1 #sum the images across mpi cores if size > 1: print("Synchronizing rank", rank) totevent = np.zeros(nevent.shape) comm.Reduce(nevent, totevent) if rank == 0 and totevent[0] == 0: raise Sorry("No events found in the run") sumall = np.zeros(sum.shape).astype(sum.dtype) comm.Reduce(sum, sumall) sumsqall = np.zeros(sumsq.shape).astype(sumsq.dtype) comm.Reduce(sumsq, sumsqall) maxall = np.zeros(maximum.shape).astype(maximum.dtype) comm.Reduce(maximum, maxall, op=MPI.MAX) if command_line.options.do_minimum_projection: minall = np.zeros(maximum.shape).astype(minimum.dtype) comm.Reduce(minimum, minall, op=MPI.MIN) waveall = np.zeros(wavelength.shape).astype(wavelength.dtype) comm.Reduce(wavelength, waveall) distall = np.zeros(distance.shape).astype(distance.dtype) comm.Reduce(distance, distall) timeall = np.zeros(timestamp.shape).astype(timestamp.dtype) comm.Reduce(timestamp, timeall) if rank == 0: if size > 1: print("Synchronized") # Accumulating floating-point numbers introduces errors, # which may cause negative variances. Since a two-pass # approach is unacceptable, the standard deviation is # clamped at zero. mean = sumall / float(totevent[0]) variance = (sumsqall / float(totevent[0])) - (mean**2) variance[variance < 0] = 0 stddev = np.sqrt(variance) wavelength = waveall[0] / totevent[0] distance = distall[0] / totevent[0] pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value timestamp = timeall[0] / totevent[0] timestamp = (int(timestamp), timestamp % int(timestamp) * 1000) timestamp = cspad_tbx.evt_timestamp(timestamp) if command_line.options.as_pickle: extension = ".pickle" else: extension = ".cbf" dest_paths = [ cspad_tbx.pathsubst(command_line.options.averagepath + extension, evt, ds.env()), cspad_tbx.pathsubst(command_line.options.stddevpath + extension, evt, ds.env()), cspad_tbx.pathsubst(command_line.options.maxpath + extension, evt, ds.env()) ] if command_line.options.do_minimum_projection: dest_paths.append( cspad_tbx.pathsubst(command_line.options.minpath + extension, evt, ds.env())) dest_paths = [ os.path.join(command_line.options.outputdir, path) for path in dest_paths ] if 'Rayonix' in command_line.options.address: all_data = [mean, stddev, maxall] if command_line.options.do_minimum_projection: all_data.append(minall) from xfel.cxi.cspad_ana import rayonix_tbx pixel_size = rayonix_tbx.get_rayonix_pixel_size( command_line.options.bin_size) beam_center = [ command_line.options.override_beam_x, command_line.options.override_beam_y ] active_areas = flex.int([0, 0, mean.shape[1], mean.shape[0]]) split_address = cspad_tbx.address_split(address) old_style_address = split_address[0] + "-" + split_address[ 1] + "|" + split_address[2] + "-" + split_address[3] for data, path in zip(all_data, dest_paths): print("Saving", path) d = cspad_tbx.dpack( active_areas=active_areas, address=old_style_address, beam_center_x=pixel_size * beam_center[0], beam_center_y=pixel_size * beam_center[1], data=flex.double(data), distance=distance, pixel_size=pixel_size, saturated_value=rayonix_tbx.rayonix_saturated_value, timestamp=timestamp, wavelength=wavelength) easy_pickle.dump(path, d) elif 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address: all_data = [mean, stddev, maxall] split_address = cspad_tbx.address_split(address) old_style_address = split_address[0] + "-" + split_address[ 1] + "|" + split_address[2] + "-" + split_address[3] if command_line.options.do_minimum_projection: all_data.append(minall) for data, path in zip(all_data, dest_paths): d = cspad_tbx.dpack(address=old_style_address, data=flex.double(data), distance=distance, pixel_size=0.1, timestamp=timestamp, wavelength=wavelength) print("Saving", path) easy_pickle.dump(path, d) elif command_line.options.as_pickle: split_address = cspad_tbx.address_split(address) old_style_address = split_address[0] + "-" + split_address[ 1] + "|" + split_address[2] + "-" + split_address[3] xpp = 'xpp' in address.lower() if xpp: evt_time = cspad_tbx.evt_time( evt) # tuple of seconds, milliseconds timestamp = cspad_tbx.evt_timestamp( evt_time) # human readable format from iotbx.detectors.cspad_detector_formats import detector_format_version, reverse_timestamp from xfel.cxi.cspad_ana.cspad_tbx import xpp_active_areas version_lookup = detector_format_version( old_style_address, reverse_timestamp(timestamp)[0]) assert version_lookup is not None active_areas = xpp_active_areas[version_lookup]['active_areas'] beam_center = [1765 // 2, 1765 // 2] else: if command_line.options.pickle_calib_dir is not None: metro_path = command_line.options.pickle_calib_dir elif command_line.options.pickle_optical_metrology: from xfel.cftbx.detector.cspad_cbf_tbx import get_calib_file_path metro_path = get_calib_file_path(run.env(), address, run) else: metro_path = libtbx.env.find_in_repositories( "xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0") sections = parse_calib.calib2sections(metro_path) beam_center, active_areas = cspad_tbx.cbcaa( cspad_tbx.getConfig(address, ds.env()), sections) class fake_quad(object): def __init__(self, q, d): self.q = q self.d = d def quad(self): return self.q def data(self): return self.d if xpp: quads = [ fake_quad(i, mean[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] mean = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads=quads) mean = flex.double(mean.astype(np.float64)) quads = [ fake_quad(i, stddev[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] stddev = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads=quads) stddev = flex.double(stddev.astype(np.float64)) quads = [ fake_quad(i, maxall[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] maxall = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads=quads) maxall = flex.double(maxall.astype(np.float64)) if command_line.options.do_minimum_projection: quads = [ fake_quad(i, minall[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] minall = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads=quads) minall = flex.double(minall.astype(np.float64)) else: quads = [ fake_quad(i, mean[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] mean = cspad_tbx.CsPadDetector(address, evt, ds.env(), sections, quads=quads) mean = flex.double(mean.astype(np.float64)) quads = [ fake_quad(i, stddev[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] stddev = cspad_tbx.CsPadDetector(address, evt, ds.env(), sections, quads=quads) stddev = flex.double(stddev.astype(np.float64)) quads = [ fake_quad(i, maxall[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] maxall = cspad_tbx.CsPadDetector(address, evt, ds.env(), sections, quads=quads) maxall = flex.double(maxall.astype(np.float64)) if command_line.options.do_minimum_projection: quads = [ fake_quad(i, minall[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] minall = cspad_tbx.CsPadDetector(address, evt, ds.env(), sections, quads=quads) minall = flex.double(minall.astype(np.float64)) all_data = [mean, stddev, maxall] if command_line.options.do_minimum_projection: all_data.append(minall) for data, path in zip(all_data, dest_paths): print("Saving", path) d = cspad_tbx.dpack(active_areas=active_areas, address=old_style_address, beam_center_x=pixel_size * beam_center[0], beam_center_y=pixel_size * beam_center[1], data=data, distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=timestamp, wavelength=wavelength) easy_pickle.dump(path, d) else: # load a header only cspad cbf from the slac metrology from xfel.cftbx.detector import cspad_cbf_tbx import pycbf base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology( run, address) if base_dxtbx is None: raise Sorry("Couldn't load calibration file for run %d" % run.run()) all_data = [mean, stddev, maxall] if command_line.options.do_minimum_projection: all_data.append(minall) for data, path in zip(all_data, dest_paths): print("Saving", path) cspad_img = cspad_cbf_tbx.format_object_from_data( base_dxtbx, data, distance, wavelength, timestamp, address, round_to_int=False) cspad_img._cbf_handle.write_widefile(path, pycbf.CBF,\ pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)
nevents = min(len(times), max_events) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [times[i] for i in xrange(nevents) if (i + rank) % size == 0] for i, t in enumerate(mytimes): evt = run.event(t) accepted, data, spectrum, dc_offset, all_data, all_data_raw = spf.filter_event( evt, filter_name) if not accepted: continue print cspad_tbx.evt_timestamp( cspad_tbx.evt_time(evt)), "Publishing data for event", i #header = "Event %d, m/f: %7.7f, f: %d"%(i, peak_max/flux, flux) header = "Event %d" % (i) if rank == 0: fee = Image(header, "FEE", data) # make a 2D plot publish.send("FEE", fee) # send to the display spectrumplot = XYPlot(header, 'summed 1D trace', range(data.shape[1]), spectrum) # make a 1D plot publish.send("SPECTRUM", spectrumplot) # send to the display fee = Image(header, "DC_OFFSET", dc_offset) # make a 2D plot publish.send("DC_OFFSET", fee) # send to the display
def event(self, evt, env): """The event() function is called for every L1Accept transition. For now, log error and set bogus value to allow stuff to continue -- must check for the bogosity later XXX The dead time of the detector complicates checking how often things are updated! Move this to the ring buffer? @param evt Event data object, a configure object @param env Environment object """ from pyana.event import Event from acqiris_ext import acqiris_integrate, apd_hitfind super(mod_ledge, self).event(evt, env) if evt.status() != Event.Normal: pass # XXX return -- Never skip because arrays will end up # different length, so ignore this? # Get the time of the event, in fractional seconds since the # epoch. This is needed for all subsequent history-keeping, and # is hence determined first. XXX Is history-keeping even # justified? time = cspad_tbx.evt_time(evt) if time is None: time = float("nan") else: time = time[0] + time[1] / 1e3 self._timestamp.append(time) # The repetition rate is currently just used for sanity checking. repetition_rate = cspad_tbx.evt_repetition_rate(evt) if repetition_rate is None: repetition_rate = float("nan") self._repetition_rate.append(repetition_rate) # Get the I0. No need to warn about it here, it will be done once # the image is written out. I0 = cspad_tbx.evt_pulse_energy(evt) if I0 is None: I0 = float("nan") self._I0.append(I0) # Get the FEE energy. Average the two readings before and after # attenuation separately. XXX What are the units? It look like # it could be mJ? fee_before = 0.5 * sum(evt.getFeeGasDet()[0:2]) if fee_before is None: fee_before = float("nan") self._fee_before.append(fee_before) fee_after = 0.5 * sum(evt.getFeeGasDet()[2:4]) if fee_after is None: fee_after = float("nan") self._fee_after.append(fee_after) # XXX Just a check: this is what xtcexplorer does: fee_energy = evt.get(xtc.TypeId.Type.Id_FEEGasDetEnergy) if fee_energy is not None: assert ( evt.getFeeGasDet()[0] == fee_energy.f_11_ENRC and evt.getFeeGasDet()[1] == fee_energy.f_12_ENRC and evt.getFeeGasDet()[2] == fee_energy.f_21_ENRC and evt.getFeeGasDet()[3] == fee_energy.f_22_ENRC ) """ # For Bill: expect 84240 data points for r0054 # # grep "^BILL_POINT" | cut -d' ' -f2,3,4,5,6 > t.dat # gnuplot> m=0.1 ; k=-0.01e-8; f(x) = k * x + m # gnuplot> fit f(x) "t.dat" using ($3):($5) via k,m if not hasattr(self, '_gmd_seqno'): self._gmd_seqno = 0 gmd = evt.get(key=xtc.TypeId.Type.Id_GMD) if gmd is None: return acq_apd = evt.getAcqValue('SxrEndstation-0|Acqiris-1', 0, env) if acq_apd is not None and acq_apd.waveform() is not None: w = acq_apd.waveform() baseline = numpy.mean(w[0:(w.shape[0] / 5)]) peak = numpy.min(w[(w.shape[0] / 5):w.shape[0]]) self._gmd_seqno += 1 print "BILL_POINT %d %s %s %s %s" % (self._gmd_seqno, repr(gmd.fBgValuePerSample), repr(gmd.fCorrectedSumPerPulse), repr(gmd.fRelativeEnergyPerPulse), repr(peak - baseline)) return """ """ # XXX Record injector motion--note that they cannot be added--see # Ray's email. injector_micos_xyz = cspad_tbx.env_pv3_get( env, ['SXR:EXP:MZM:%02d:ENCPOSITIONGET' % i for i in [1, 2, 3]]) if injector_micos_xyz is None: self.logger.error("No micos injector motor positions") injector_micos_xyz = (float('nan'), float('nan'), float('nan')) self._injector_micos_xyz.append(injector_micos_xyz) injector_rough_xyz = cspad_tbx.env_pv3_get( env, ['SXR:EXP:MMS:%02d.RBV' % i for i in [1, 2, 3]]) if injector_rough_xyz is None: self.logger.error("No rough injector motor positions") injector_rough_xyz = (float('nan'), float('nan'), float('nan')) self._injector_rough_xyz.append(injector_rough_xyz) # Injector power supplies XXX There is a third PSU, no? # # The -5kV supply # SXR:EXP:SHV:VHS6:CH0:VoltageMeasure # SXR:EXP:SHV:VHS6:CH0:CurrentMeasure # # The plus 5kV supply # SXR:EXP:SHV:VHS2:CH0:VoltageMeasure # SXR:EXP:SHV:VHS2:CH0:CurrentMeasure injector_plus_current = cspad_tbx.env_pv1_get( env, 'SXR:EXP:SHV:VHS6:CH0:CurrentMeasure') if injector_plus_current is None: self.logger.error("No plus-motor current") injector_plus_current = -1 self._injector_plus_current.append(injector_plus_current) injector_plus_voltage = cspad_tbx.env_pv1_get( env, 'SXR:EXP:SHV:VHS6:CH0:VoltageMeasure') if injector_plus_voltage is None: self.logger.error("No plus-motor voltage") injector_plus_voltage = -1 self._injector_plus_voltage.append(injector_plus_voltage) injector_minus_current = cspad_tbx.env_pv1_get( env, 'SXR:EXP:SHV:VHS2:CH0:CurrentMeasure') if injector_minus_current is None: self.logger.error("No minus-motor current") injector_minus_current = -1 self._injector_minus_current.append(injector_minus_current) injector_minus_voltage = cspad_tbx.env_pv1_get( env, 'SXR:EXP:SHV:VHS2:CH0:VoltageMeasure') if injector_minus_voltage is None: self.logger.error("No minus-motor voltage") injector_minus_voltage = -1 self._injector_minus_voltage.append(injector_minus_voltage) """ """ # The spectrometer motor positions are just used for sanity # checking. spectrometer_xyz = cspad_tbx.env_spectrometer_xyz_sxr(env) if spectrometer_xyz is None: self.logger.error("No spectrometer motor positions") spectrometer_xyz = (float('nan'), float('nan'), float('nan')) self._spectrometer_xyz.append(spectrometer_xyz) """ # Get the pulse energy after monochromator, and fall back on the # pre-monochromator energy if the former is absent. Record in # list for mean and stddev. XXX Verify that the wavelength after # the monochromator is updated at around 1 Hz. # # For the publication an offset and scale were calibrated. wavelength = cspad_tbx.env_wavelength_sxr(evt, env) if wavelength is None: wavelength = cspad_tbx.evt_wavelength(evt) if wavelength is None: energy = float("nan") else: energy = 12398.4187 / wavelength self._energy.append(energy) self._history_energy.push(time, energy) # XXX Not necessary?! """ # Laser shutters XXX need to sort out laser numbering XXX Laser # power stuff? XXX Position of polarizer/analyser shutters = cspad_tbx.env_laser_shutters(env) #print "Got shutters", shutters """ # Read out the diode traces from the via the Acqiris. XXX In any # case, the APD and the more sensitive Opto Diode in the monitor # tank (i.e. the transmission diode) should be anti-correlated, so # check it! The entire trace always covers 10 us. XXX Could this # be figured out from xtc.TypeId.Type.Id_AcqConfig? # # XXX This appears to be suboptimal: look at the # skewness-transform for the APD to sort this out. acq_apd = evt.getAcqValue("SxrEndstation-0|Acqiris-1", 0, env) acq_apd_integral = float("nan") if acq_apd is not None: waveform = acq_apd.waveform() if waveform is not None: # With a 40k-point trace, one should integrate from 18200 to # 18400. waveform = waveform.flatten() nmemb = len(waveform) // 200 if nmemb > 0: acq_apd_integral = acqiris_integrate(flex.double(waveform), 91 * nmemb, 100 * nmemb, nmemb) self._acq_apd_integral.append(acq_apd_integral) if evt.expNum() == 208: # Opto diode address for L632. acq_opto_diode = evt.getAcqValue("SxrEndstation-0|Acqiris-1", 1, env) elif evt.expNum() == 363: # Opto diode address for LB68. acq_opto_diode = evt.getAcqValue("SxrEndstation-0|Acqiris-2", 2, env) acq_opto_diode_integral = float("nan") if acq_opto_diode is not None: waveform = acq_opto_diode.waveform() if waveform is not None: # With a 40k-point trace, one should integrate from 16000 to # 24000. With a 20k-point trace, a suitable integration # region is bounded by 8000 and 12000. There is no need for # thresholding, because the integral of the Opto Diode will # not be used for hit finding. XXX What are the "misses" we # record on the Opto Diode? XXX The direct beam is completely # gone after it hits the sample, because soft X-rays. waveform = waveform.flatten() nmemb = len(waveform) // 5 if nmemb > 0: acq_opto_diode_integral = acqiris_integrate(flex.double(waveform), 2 * nmemb, 4 * nmemb, nmemb) self._acq_opto_diode_integral.append(acq_opto_diode_integral) # Sanity check: verify that the timestamps for the two Acqiris # traces are similar enough. if acq_apd is not None and acq_opto_diode is not None: assert ( len(acq_apd.timestamps()) == len(acq_opto_diode.timestamps()) and numpy.any(numpy.abs(acq_apd.timestamps() - acq_opto_diode.timestamps())) < 1e-6 ) # self.logger.info("DIODE INTEGRALS: %f %f %f" % (I0, acq_apd_integral, acq_opto_diode_integral)) """ import matplotlib.pyplot as plt hit_array_apd = apd_hitfind( flex.double(acq_apd.waveform()), len(acq_apd.waveform()) // 5) hit_array_opto_diode = apd_hitfind( flex.double(acq_opto_diode.waveform()), len(acq_opto_diode.waveform()) // 5) fig = plt.figure() ax = fig.add_subplot(111) #ax.plot( # range(len(acq_apd.timestamps())), acq_apd.waveform()) ax.plot( range(len(acq_opto_diode.timestamps())), acq_opto_diode.waveform()[0, :]) plt.show() fig = plt.figure() ax = fig.add_subplot(111) #ax.plot( # acq_apd.timestamps()[0:len(hit_array_apd)], hit_array) ax.plot( acq_opto_diode.timestamps()[0:len(hit_array_opto_diode)], hit_array) plt.show() """ # Determine whether the beam hit the sample, and register the # outcome. If not using any diodes for hit-finding, every shot is # assumed to be a hit. XXX Unfortunately, this crucial piece is # very unreliable. The threshold for the APD needs to be # verified--inspect all the histograms. XXX hitfind_flags is # probable better as a module parameter. # hitfind_flags = 0x3 hitfind_flags = 0 hit = False if not hitfind_flags: hit = True elif hitfind_flags & 0x1 and acq_apd_integral > 0.2: hit = True self._hit.append(hit) # Always proceed all the way through (even if some shots have # invalid values of e.g. I0) because images are precious. XXX # Must reset counters before returning! XXX What about skipping # all of the above if display is True? if self.cspad_img is not None: self._nframes += 1 """ # The spectrometer should not move! t = (self._spectrometer_xyz - self._spectrometer_xyz.mean()).rms_length() print "Spectrometer displacement", t # Fine/rough motor position deviations from the mean. See Ray's # email. t = (self._injector_micos_xyz - self._injector_micos_xyz.mean()).rms_length() print "Injector micos displacement", t t = (self._injector_rough_xyz - self._injector_rough_xyz.mean()).rms_length() print "Injector rough displacement", t # Injector motor position means and deviations if self._injector_plus_current.size() > 1: t = flex.mean_and_variance(self._injector_plus_current) print "Injector plus current mean %10e stddev %10e" % \ (t.mean(), t.unweighted_sample_standard_deviation()) if self._injector_plus_voltage.size() > 1: t = flex.mean_and_variance(self._injector_plus_voltage) print "Injector plus voltage mean %10e stddev %10e" % \ (t.mean(), t.unweighted_sample_standard_deviation()) if self._injector_minus_current.size() > 1: t = flex.mean_and_variance(self._injector_minus_current) print "Injector minus current mean %10e stddev %10e" % \ (t.mean(), t.unweighted_sample_standard_deviation()) if self._injector_minus_voltage.size() > 1: t = flex.mean_and_variance(self._injector_minus_voltage) print "Injector minus voltage mean %10e stddev %10e" % \ (t.mean(), t.unweighted_sample_standard_deviation()) """ # Energy statistics are collected from all shots, regardless of # whether they are hits or not. Since this statistic mentions # the frame number, it should be reported first. XXX The energy # should have a really small standard deviation. Check # self._energy.size() and self._history_energy.frequency() XXX # verify that it works for one data point. (energy_mean, energy_stddev, energy_nmemb, n) = self._filtered_stats( lambda x: not math.isnan(x) and x > 0, self._energy ) if n > 0: self.logger.warning("%d shots have undefined energy" % n) (I0_mean, I0_stddev, I0_nmemb, n) = self._filtered_stats(lambda x: not math.isnan(x), self._I0) if n > 0: self.logger.warning("%d shots have undefined I0" % n) self.logger.info( "Frame %d: E=%.3f+/-%.3f (N=%d) I0=%.0f+/-%.0f (N=%d)" % (self._nframes, energy_mean, energy_stddev, energy_nmemb, I0_mean, I0_stddev, I0_nmemb) ) # Sanity check: unless changed while integrating the frame, the # repetition rate should have a standard deviation of zero. dt = self._timestamp[-1] - self._timestamp[0] rr_mean = rr_observed = rr_stddev = 0 if dt > 0: rr_observed = (len(self._timestamp) - 1) / dt rr = filter(lambda x: not math.isnan(x) and x > 0, self._repetition_rate) if len(rr) > 1: rr_stats = flex.mean_and_variance(flex.double(rr)) rr_mean = rr_stats.mean() rr_stddev = rr_stats.unweighted_sample_standard_deviation() self.logger.info( "Repetition rate: %.3f Hz (observed), %.3f+/-%.3f Hz (expected)" % (rr_observed, rr_mean, rr_stddev) ) # Compare observed and configured exposure time. config = cspad_tbx.getConfig(self.address, env) exposure_time = 0 if config is not None and dt > 0 and len(self._timestamp) > 0: exposure_time = dt * (len(self._timestamp) + 1) / len(self._timestamp) self.logger.info( "Exposure time: %.3f s (observed), %.3f s (configured)" % (exposure_time, config.exposureTime()) ) # Compute the leading dead time, the time between starting the # readout of the previous frame and the arrival of the shot # immediately following it. This is an interesting statistic, # no matter what. XXX Maybe look at its distribution? dead_time = 0 if rr_observed > 0 and hasattr(self, "_previous_readout_time"): dead_time = self._timestamp[0] - self._previous_readout_time - 1 / rr_observed if math.isnan(dead_time): dead_time = 0 self.logger.info("Dead time: %.3f s" % dead_time) self._previous_readout_time = self._timestamp[-1] assert time == self._timestamp[-1] # XXX ZAP once one run survives it! # Flag blank images (i.e. images that had no hits), because # these may interesting for background subtraction. hits = self._hit.count(True) self.logger.info("Hit rate: %d/%d (%.2f%%)" % (hits, self._hit.size(), 100 * hits / self._hit.size())) if hits == 0: self.logger.info("Frame %d is blank" % self._nframes) # Get the normalisation factor by summing up I0 for all hits. # Invalid and non-positive values of I0 are treated as zeroes. # XXX Make this kind of summing a function of its own. I0 = sum(filter(lambda x: not math.isnan(x) and x > 0, self._I0.select(self._hit))) I0_all = sum(filter(lambda x: not math.isnan(x) and x > 0, self._I0)) fee_before_all = sum(filter(lambda x: not math.isnan(x) and x > 0, self._fee_before)) fee_after_all = sum(filter(lambda x: not math.isnan(x) and x > 0, self._fee_after)) # Register the template to the image and locate the regions of # interest based on the registration parameters. XXX Should # also give contrast: fit 2D-Gaussian to peak and report its # standard deviations and fit? if self._template is not None: gamma = lewis(self._template, self.cspad_img) p = flex.max_index(gamma) peak = ( p // gamma.focus()[1] - self._template.focus()[0] + 1, p % gamma.focus()[1] - self._template.focus()[1] + 1, ) # """ ### REFERENCE CHECK ### from os.path import dirname, isdir, join from scipy import io mat_dirname = dirname(cspad_tbx.pathsubst(self._mat_path, evt, env, frame_number=self._nframes)) if not isdir(mat_dirname): makedirs(mat_dirname) io.savemat( file_name=join(mat_dirname, "cross-check-%05d.mat" % self._nframes), mdict=dict( image=self.cspad_img.as_numpy_array(), template=self._template.as_numpy_array(), gamma=gamma.as_numpy_array(), peak=numpy.array(peak), ), appendmat=False, do_compression=True, oned_as="column", ) return ### REFERENCE CHECK ### # """ else: # Alternative: position everything with respect to the frame # origin. peak = (0, 0) # XXX Come up with a better way to handle the offsets! They # really do depend on the template, and should therefore be # packaged with it. self.logger.info("Template registration anchor point (%d, %d)" % (peak[0], peak[1])) roi = [] if evt.expNum() == 208: # Regions of interest for L632 (experiment number 208). XXX # Could perhaps migrate the template matching here instead? # The left, middle, and right manganese signals. XXX Extend the # rightmost ROI three pixels in upward direction (see runs 145 # and onwards, also note narrower slit)? roi.append((peak[0] + 59, peak[1] - 24, 12, 5)) roi.append((peak[0] + 61, peak[1] + 28, 12, 4)) roi.append((peak[0] + 61, peak[1] + 79, 12, 5)) # Two background regions between the manganese spots, with the # same total area as the signal. roi.append((peak[0] + 62, peak[1] + 1, 8, 8)) roi.append((peak[0] + 63, peak[1] + 51, 8, 8)) # The left and right direct reflections from the Si substrate # (i.e. the areas between the zone plates). These were the # features used for template registration. roi.append((peak[0], peak[1], 40, 10)) roi.append((peak[0], peak[1] + 50, 40, 9)) # Spot between the direct reflections. XXX What is this? roi.append((peak[0] + 1, peak[1] + 23, 22, 13)) # The horizontal slit, where the direct reflection occurs. This # is fixed. XXX Verify this! roi.append((22, 0, 41, 128)) # Background stripe, below the manganese spots. This is fixed # to the bottom of the detector. roi.append((104, 0, 20, 128)) elif evt.expNum() == 363: # Regions of interest for LB68 (experiment number 363). # 0-pixel are active, 255-pixel are inactive from scipy.misc import imread # Dec 5, 2013 (09:00 - 21:00): initial estimates from r0010 """ roi.append((peak[0] + 14, peak[1] + 138 + 23, 25, 50 - 25)) roi.append((peak[0] + 45, peak[1] + 138 + 23, 25, 50 - 25)) roi.append((peak[0] + 78, peak[1] + 137 + 23, 25, 50 - 25)) roi.append((peak[0] + 111, peak[1] + 137 + 23, 25, 50 - 25)) roi.append((peak[0] + 144, peak[1] + 137 + 23, 25, 50 - 25)) roi.append((peak[0] + 177, peak[1] + 136 + 23, 25, 50 - 25)) roi.append((peak[0] + 210, peak[1] + 136 + 23, 25, 50 - 25)) roi.append((peak[0] + 243, peak[1] + 136 + 23, 25, 50 - 25)) roi.append((peak[0] + 278, peak[1] + 135 + 23, 25, 50 - 25)) roi.append((peak[0] + 312, peak[1] + 135 + 23, 25, 50 - 25)) roi.append((peak[0] + 344, peak[1] + 135 + 23, 25, 50 - 25)) roi.append((peak[0] + 376, peak[1] + 135 + 23, 25, 50 - 25)) roi.append((peak[0] + 408, peak[1] + 135 + 23, 25, 50 - 25)) roi.append((peak[0] + 442, peak[1] + 135 + 23, 25, 50 - 25)) roi.append((peak[0] + 475, peak[1] + 135 + 23, 25, 50 - 25)) """ # Dec 6, 2013 (09:00 - 21:00): rough estimates """ roi.append((peak[0] + 0, peak[1] + 25, 512, 25)) # bkg roi.append((peak[0] + 0, peak[1] + 135, 512, 25)) # oxygen roi.append((peak[0] + 0, peak[1] + 160, 512, 25)) # signal roi.append((peak[0] + 0, peak[1] + 300, 512, 130)) # zeroth order """ # Dec 7, 2013 (09:00 - 21:00): overlap between oxygen and # signal. Will loose some signal. """ roi.append((peak[0] + 0, peak[1] + 25, 512, 25)) # bkg roi.append((peak[0] + 0, peak[1] + 135, 512, 50)) # oxygen roi.append((peak[0] + 0, peak[1] + 185, 512, 40)) # signal roi.append((peak[0] + 0, peak[1] + 270, 512, 170)) # zeroth order """ """ # Dec 7 2013 (09:00 - 21:00): binary masks stored in PNG # images. roi.append((peak[0] + 0, peak[1] + 25, 512, 25)) # bkg roi.append((peak[0] + 0, peak[1] + 135, 512, 25)) # oxygen #roi_image = flex.float( # imread('/reg/neh/home1/hattne/myrelease/LB68-r0039-max-mask.png', # flatten=True)) #roi_image = flex.float( # imread('/reg/neh/home1/hattne/myrelease/LB68-r0039-std-mask.png', # flatten=True)) roi_image = flex.float( imread('/reg/neh/home1/hattne/myrelease/LB68-r0052-avg-mask.png', flatten=True)) roi_image = (255 - roi_image) #roi.append((0, 0, self.cspad_img.focus()[0], self.cspad_img.focus()[1])) roi.append(roi_image) roi.append((peak[0] + 0, peak[1] + 270, 512, 170)) # zeroth order """ # Dec 9, 2013 (09:00 - 21:00) # """ roi.append((peak[0] + 0, peak[1] + 25, 512, 25)) # bkg roi.append((peak[0] + 0, peak[1] + 135, 512, 25)) # oxygen # roi.append((peak[0] + 0, peak[1] + 160, 512, 25)) # signal roi_image = flex.float(imread("/reg/neh/home1/hattne/myrelease/LB68-r0067-max-mask.png", flatten=True)) roi.append(roi_image) roi.append((peak[0] + 0, peak[1] + 240, 512, 180)) # zeroth order # """ else: self.logger.error( "No regions of interest for %s (experiment number %d)" % (env.experiment(), evt.expNum()) ) # Clip the regions of interest to the actual image. If the ROI # does not overlap with the image at all, set its width and # height to zero. XXX Do the integration here as well? for i in range(len(roi)): if not isinstance(roi[i], tuple): continue r = roi[i] if ( r[0] + r[2] < 0 or r[0] >= self.cspad_img.focus()[0] or r[1] + r[3] < 0 or r[1] >= self.cspad_img.focus()[1] ): roi[i] = (r[0], r[1], 0, 0) continue r = roi[i] if r[0] < 0: roi[i] = (0, r[1], r[2] + r[0], r[3]) r = roi[i] if r[1] < 0: roi[i] = (r[0], 0, r[2], r[3] + r[1]) r = roi[i] if r[0] + r[2] > self.cspad_img.focus()[0]: roi[i] = (r[0], r[1], self.cspad_img.focus()[0] - r[0], r[3]) r = roi[i] if r[1] + r[3] > self.cspad_img.focus()[1]: roi[i] = (r[0], r[1], r[2], self.cspad_img.focus()[1] - r[1]) # Sum up intensities in all regions of interest, and keep track # of the actual number of pixels summed. The common_mode module # takes care of dark-subtraction. XXX Would like to estimate # sigma for spot, like in spotfinder/LABELIT. I = flex.double(len(roi)) I_nmemb = flex.int(len(roi)) for i in range(len(roi)): if isinstance(roi[i], flex.float): sel = roi[i].as_1d() < 128 I[i] = flex.sum(self.cspad_img.as_1d().select(sel)) I_nmemb[i] = sel.count(True) continue if roi[i][2] <= 0 or roi[i][3] <= 0: I[i] = 0 I_nmemb[i] = 0 else: I[i] = flex.sum( self.cspad_img.matrix_copy_block( i_row=roi[i][0], i_column=roi[i][1], n_rows=roi[i][2], n_columns=roi[i][3] ) ) I_nmemb[i] = roi[i][2] * roi[i][3] """ # Sanity check: white out the region of interest. self.cspad_img.matrix_paste_block_in_place( block=flex.double(flex.grid(roi[i][2], roi[i][3])), i_row=roi[i][0], i_column=roi[i][1]) """ acq_apd_sum = sum(filter(lambda x: not math.isnan(x) and x > 0, self._acq_apd_integral.select(self._hit))) acq_opto_diode_sum = sum( filter(lambda x: not math.isnan(x) and x > 0, self._acq_opto_diode_integral.select(self._hit)) ) acq_apd_sum_all = sum(filter(lambda x: not math.isnan(x) and x > 0, self._acq_apd_integral)) acq_opto_diode_sum_all = sum(filter(lambda x: not math.isnan(x) and x > 0, self._acq_opto_diode_integral)) # Append the data point to the stream: shots, hits, energy, and # I. XXX OrderedDict requires Python 2.7, could fall back on # regular Dict at the price of non-deterministic column order. from collections import OrderedDict csv_dict = OrderedDict( [ ("n_frames", self._hit.size()), ("n_hits", hits), ("I0", I0), ("I0_all", I0_all), ("fee_before_all", fee_before_all), ("fee_after_all", fee_after_all), ("energy_mean", energy_mean), ("acq_apd_sum", acq_apd_sum), ("acq_apd_sum_all", acq_apd_sum_all), ("acq_opto_diode_sum", acq_opto_diode_sum), ("acq_opto_diode_sum_all", acq_opto_diode_sum_all), ] ) for (i, item) in enumerate(zip(roi, I, I_nmemb)): key = "roi_" + ("bkg", "oxygen", "manganese", "zeroth_order")[i] csv_dict["%s_nmemb" % key] = item[2] if isinstance(item[0], tuple): csv_dict["%s_ss_start" % key] = item[0][0] csv_dict["%s_fs_start" % key] = item[0][1] csv_dict["%s_ss_size" % key] = item[0][2] csv_dict["%s_fs_size" % key] = item[0][3] else: csv_dict["%s_ss_start" % key] = 0 csv_dict["%s_fs_start" % key] = 0 csv_dict["%s_ss_size" % key] = item[0].focus()[0] csv_dict["%s_fs_size" % key] = item[0].focus()[1] csv_dict["%s_I" % key] = item[1] # XXX assert that keys match up with what's in the file already? # Or exploit the error-reporting mechanism already implemented? # Write the header. XXX How to control the order of the # columns? if not hasattr(self, "_csv"): from csv import DictWriter self._csv = DictWriter(self._stream_table, csv_dict.keys()) self._csv.writerow({key: key for key in csv_dict.keys()}) self._csv.writerow(csv_dict) # Output the non-normalised image and all other relevant data to # a binary MATLAB file. XXX What if scipy is not available? from os import makedirs, path from scipy import io mat_path = cspad_tbx.pathsubst(self._mat_path, evt, env, frame_number=self._nframes) if not path.isdir(path.dirname(mat_path)): makedirs(path.dirname(mat_path)) io.savemat( file_name=mat_path, mdict=dict( DATA=self.cspad_img.as_numpy_array(), DIODES=numpy.array((acq_apd_sum, acq_apd_sum_all, acq_opto_diode_sum, acq_opto_diode_sum_all)), ENERGY=energy_mean, HITS=numpy.array((hits, self._hit.size())), I0=numpy.array((I0, I0_all)), INTENSITIES=numpy.array(I), ROIS=numpy.array([r for r in roi if isinstance(r, tuple)]), ), appendmat=False, do_compression=True, oned_as="column", ) # Optionally update the image in the viewer. See mod_view. if self._display: from time import localtime, strftime # Copy over regions of interest to shared multiprocessing # array. XXX Flip to honour wxPython convention. for i in range(len(roi)): if not isinstance(roi[i], tuple): continue self._roi[4 * i + 0] = roi[i][1] self._roi[4 * i + 1] = roi[i][0] self._roi[4 * i + 2] = roi[i][3] self._roi[4 * i + 3] = roi[i][2] time_str = strftime("%H:%M:%S", localtime(evt.getTime().seconds())) title = "r%04d@%s: frame %d on %s" % (evt.run(), time_str, self._nframes, self.address) # XXX No distance in the Andor experiment. So don't bother # with the fictional beam center, distance, and saturation # value? See also mod_average.endjob() img_obj = ( dict( BEAM_CENTER=(0, 0), DATA=self.cspad_img, DETECTOR_ADDRESS=self.address, DISTANCE=10, # XXX Evil kludge to keep dxtbx happy! PIXEL_SIZE=13.5e-3, # XXX Hard-coded, again! SATURATED_VALUE=10000, TIME_TUPLE=cspad_tbx.evt_time(evt), WAVELENGTH=12398.4187 / energy, ), title, ) while not self._queue.empty(): if not self._proc.is_alive(): evt.setStatus(Event.Stop) return while True: try: self._queue.put(img_obj, timeout=1) break except Exception: # Queue.Full: pass self._reset_counters() return
def event(self, evt, evn): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ #import pdb; pdb.set_trace() if (evt.get("skip_event")): return # check if FEE data is one or two dimensional data = evt.get(Camera.FrameV1, self.src) if data is None: one_D = True data = evt.get(Bld.BldDataSpectrometerV1, self.src) else: one_D = False # get event timestamp timestamp = cspad_tbx.evt_timestamp( cspad_tbx.evt_time(evt)) # human readable format if data is None: self.nnodata += 1 #self.logger.warning("event(): No spectrum data") evt.put(skip_event_flag(), "skip_event") if timestamp is None: evt.put(skip_event_flag(), "skip_event") self.logger.warning("event(): No TIMESTAMP, skipping shot") elif data is not None: self.nshots += 1 # get data as array and split into two half to find each peak if one_D: data = np.array(data.hproj().astype(np.float64)) if 'dark' in locals(): data = data - self.dark spectrum = data spectrum1 = data[:data.shape[0] // 2] spectrum2 = data[data.shape[0] // 2:] else: data = np.array(data.data16().astype(np.float64)) if 'dark' in locals(): data = data - self.dark data_split1 = data[:, :data.shape[1] // 2] data_split2 = data[:, data.shape[1] // 2:] # make a 1D trace of entire spectrum and each half to find peaks spectrum = np.sum(data, 0) / data.shape[0] spectrum1 = np.sum(data_split1, 0) / data_split1.shape[0] spectrum2 = np.sum(data_split2, 0) / data_split2.shape[0] peak_one = np.max(spectrum1) peak_two = np.max(spectrum2) peak_one_position = np.argmax(spectrum1) peak_two_position = np.argmax(spectrum2) + len(spectrum2) # define the limits of the regions between the two peaks peak_one_lower_limit = self.peak_one_position_min - self.peak_one_width peak_one_upper_limit = self.peak_one_position_max + self.peak_one_width peak_two_lower_limit = self.peak_two_position_min - self.peak_two_width peak_two_upper_limit = self.peak_two_position_max + self.peak_two_width # the x-coordinate of the weighted center of peak region weighted_peak_one_positions = [] for i in range(peak_one_lower_limit, peak_one_upper_limit): weighted_peak_one_positions.append(spectrum[i] * i) weighted_sum_peak_one = sum(weighted_peak_one_positions) weighted_peak_one_center_position = weighted_sum_peak_one // sum( spectrum[peak_one_lower_limit:peak_one_upper_limit]) weighted_peak_two_positions = [] for i in range(peak_two_lower_limit, peak_two_upper_limit): weighted_peak_two_positions.append(spectrum[i] * i) weighted_sum_peak_two = sum(weighted_peak_two_positions) weighted_peak_two_center_position = weighted_sum_peak_two // sum( spectrum[peak_two_lower_limit:peak_two_upper_limit]) # normalized integrated peaks int_peak_one = np.sum( spectrum[peak_one_lower_limit:self.peak_one_position_max] ) / len(spectrum[peak_one_lower_limit:self.peak_one_position_max]) int_peak_two = np.sum( spectrum[peak_two_lower_limit:self.peak_two_position_max] ) / len(spectrum[peak_two_lower_limit:self.peak_two_position_max]) # normalized integrated regions between the peaks int_left_region = np.sum(spectrum[0:peak_one_lower_limit]) / len( spectrum[0:peak_one_lower_limit]) int_middle_region = np.sum( spectrum[peak_one_upper_limit:peak_two_lower_limit]) / len( spectrum[peak_one_upper_limit:peak_two_lower_limit]) int_right_region = np.sum(spectrum[peak_two_upper_limit:]) / len( spectrum[:peak_two_upper_limit]) # now to do the filtering if peak_one / peak_two < self.peak_ratio or peak_one / peak_two > 1 / self.peak_ratio: print "event(): too low" evt.put(skip_event_flag(), "skip_event") return if weighted_peak_two_center_position < self.peak_two_position_min or weighted_peak_two_center_position > self.peak_two_position_max: print "event(): out of range high energy peak" evt.put(skip_event_flag(), "skip_event") return if weighted_peak_one_center_position < self.peak_one_position_min or weighted_peak_one_center_position > self.peak_one_position_max: print "event(): out of range low energy peak" evt.put(skip_event_flag(), "skip_event") return if not one_D and (int_left_region / int_peak_one > self.normalized_peak_to_noise_ratio): print "event(): noisy left of low energy peak" evt.put(skip_event_flag(), "skip_event") return if not one_D and (int_middle_region / int_peak_one > self.normalized_peak_to_noise_ratio): print "event(): noisy middle" evt.put(skip_event_flag(), "skip_event") return if not one_D and (int_middle_region / int_peak_two > self.normalized_peak_to_noise_ratio): print "event(): noisy middle" evt.put(skip_event_flag(), "skip_event") return if not one_D and (int_right_region / int_peak_two > self.normalized_peak_to_noise_ratio): print "event(): noisy right of high energy peak" evt.put(skip_event_flag(), "skip_event") return #iron edge at 738 pixels on FFE detetor if one_D and (spectrum[self.iron_edge_position] >= spectrum[weighted_peak_one_center_position]): print "event(): peak at iron edge" evt.put(skip_event_flag(), "skip_event") return if one_D and (spectrum[self.iron_edge_position] >= spectrum[weighted_peak_two_center_position]): print "event(): peak at iron edge" evt.put(skip_event_flag(), "skip_event") return #self.logger.info("TIMESTAMP %s accepted" %timestamp) self.naccepted += 1 self.ntwo_color += 1 print "%d Two Color shots" % self.ntwo_color
def event(self,evt,evn): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ #import pdb; pdb.set_trace() if (evt.get("skip_event")): return # check if FEE data is one or two dimensional data = evt.get(Camera.FrameV1, self.src) if data is None: one_D = True data = evt.get(Bld.BldDataSpectrometerV1, self.src) else: one_D = False # get event timestamp timestamp = cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) # human readable format if data is None: self.nnodata +=1 #self.logger.warning("event(): No spectrum data") evt.put(skip_event_flag(),"skip_event") if timestamp is None: evt.put(skip_event_flag(),"skip_event") #self.logger.warning("event(): No TIMESTAMP, skipping shot") elif data is not None: self.nshots +=1 # get data as array and split into two half to find each peak if one_D: # filtering out outlier spikes in FEE data data = np.array(data.hproj().astype(np.float64)) for i in range(len(data)): if data[i]>1000000000: data[i]=data[i]-(2**32) if self.dark is not None: data = data - self.dark spectrum = data spectrum1 = data[:data.shape[0]//2] spectrum2 = data[data.shape[0]//2:] else: data = np.array(data.data16().astype(np.int32)) if self.dark is not None: data = data - self.dark data = np.double(data) data_split1 = data[:,:data.shape[1]//2] data_split2 = data[:,data.shape[1]//2:] # make a 1D trace of entire spectrum and each half to find peaks spectrum = np.sum(data,0)/data.shape[0] spectrum1 = np.sum(data_split1,0)/data_split1.shape[0] spectrum2 = np.sum(data_split2,0)/data_split2.shape[0] if not one_D: # the x-coordinate of the weighted center of peak region weighted_peak_one_positions = [] for i in range(self.peak_one_range_min,self.peak_one_range_max): weighted_peak_one_positions.append(spectrum[i]*i) weighted_sum_peak_one = np.sum(weighted_peak_one_positions) weighted_peak_one_center_position = weighted_sum_peak_one/np.sum(spectrum[self.peak_one_range_min:self.peak_one_range_max]) weighted_peak_two_positions = [] for i in range(self.peak_two_range_min,self.peak_two_range_max): weighted_peak_two_positions.append(spectrum[i]*i) weighted_sum_peak_two = np.sum(weighted_peak_two_positions) weighted_peak_two_center_position = weighted_sum_peak_two/np.sum(spectrum[self.peak_two_range_min:self.peak_two_range_max]) # normalized integrated regions between the peaks #int_left_region = np.sum(spectrum[weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2:(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)]) int_left_region = np.sum(spectrum[:weighted_peak_two_center_position/2]) #int_left_region_norm = np.sum(spectrum[weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2:(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)])/len(spectrum[weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2:(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)]) int_left_region_norm = np.sum(spectrum[:weighted_peak_two_center_position/2])/len(spectrum[:weighted_peak_two_center_position/2]) int_right_region = np.sum(spectrum[self.peak_two_range_max:]) int_right_region_norm = np.sum(spectrum[self.peak_two_range_max:])/len(spectrum[self.peak_two_range_max:]) # normalized integrated peaks int_peak_one = np.sum(spectrum[(weighted_peak_one_center_position-len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2):(weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)]) int_peak_one_norm = np.sum(spectrum[(weighted_peak_one_center_position-len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2):(weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)])/len(spectrum[(weighted_peak_one_center_position-len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2):(weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)]) int_peak_two = np.sum(spectrum[(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2):(weighted_peak_two_center_position+len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)]) int_peak_two_norm = np.sum(spectrum[(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2):(weighted_peak_two_center_position+len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)])/len(spectrum[(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2):(weighted_peak_two_center_position+len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)]) if not one_D: if int_peak_one_norm/int_peak_two_norm > self.peak_ratio: print("event(): inflection peak too high") evt.put(skip_event_flag(), "skip_event") return if int_left_region_norm > self.normalized_peak_to_noise_ratio*int_peak_two_norm: print("event(): noisy left of low energy peak") evt.put(skip_event_flag(), "skip_event") return if int_right_region_norm > self.normalized_peak_to_noise_ratio*int_peak_two_norm: print("event(): noisy right of high energy peak") evt.put(skip_event_flag(), "skip_event") return #self.logger.info("TIMESTAMP %s accepted" %timestamp) self.naccepted += 1 self.ntwo_color += 1 print("%d Remote shot" %self.ntwo_color) print("%s Remote timestamp" %timestamp)
def event(self, evt, env): """The event() function is called for every L1Accept transition. XXX more? Previously, common-mode correction was applied only after initial threshold filtering. Since the common_mode class applies the (lengthy) common-mode correction immediately after reading the image from the stream, this optimisation is currently not (elegantly) doable. @param evt Event data object, a configure object @param env Environment object """ super(mod_hitfind, self).event(evt, env) if (evt.get("skip_event")): return # This module only applies to detectors for which a distance is # available. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return device = cspad_tbx.address_split(self.address)[2] # ***** HITFINDING ***** XXX For hitfinding it may be interesting # to look at the fraction of subzero pixels in the dark-corrected # image. if (self.m_threshold is not None): # If a threshold value is given it can be applied in one of three ways: # 1. Apply it over the whole image if (self.m_roi is None and self.m_distl_min_peaks is None): vmax = flex.max(self.cspad_img) if (vmax < self.m_threshold): if not self.m_negate_hits: # Tell downstream modules to skip this event if the threshold was not met. evt.put(skip_event_flag(), "skip_event") return elif self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return # 2. Apply threshold over a rectangular region of interest. elif (self.m_roi is not None): vmax = flex.max(self.cspad_img[self.m_roi[2]:self.m_roi[3], self.m_roi[0]:self.m_roi[1]]) if (vmax < self.m_threshold): if not self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return elif self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return # 3. Determine the spotfinder spots within the central ASICS, and accept the # image as a hit if there are m_distl_min_peaks exceeding m_threshold. # As a further requirement, the peaks must exceed 2.5 * the 90-percentile # pixel value of the central ASICS. This filter was added to avoid high-background # false positives. elif (self.m_distl_min_peaks is not None): if device == 'marccd': self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0] self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1] elif device == 'Rayonix': self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0] self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1] peak_heights, outvalue = self.distl_filter( self.address, self.cspad_img.iround(), # XXX correct? distance, self.timestamp, self.wavelength) if ('permissive' in self.m_distl_flags): number_of_accepted_peaks = (peak_heights > self.m_threshold).count(True) else: number_of_accepted_peaks = (( peak_heights > self.m_threshold).__and__( outvalue == 0)).count(True) sec, ms = cspad_tbx.evt_time(evt) evt_time = sec + ms / 1000 self.stats_logger.info("BRAGG %.3f %d" % (evt_time, number_of_accepted_peaks)) skip_event = False if number_of_accepted_peaks < self.m_distl_min_peaks: self.logger.info( "Subprocess %02d: Spotfinder NO HIT image #%05d @ %s; %d spots > %d" % (env.subprocess(), self.nshots, self.timestamp, number_of_accepted_peaks, self.m_threshold)) if not self.m_negate_hits: skip_event = True else: self.logger.info( "Subprocess %02d: Spotfinder YES HIT image #%05d @ %s; %d spots > %d" % (env.subprocess(), self.nshots, self.timestamp, number_of_accepted_peaks, self.m_threshold)) if self.m_negate_hits: skip_event = True if skip_event: if self.m_db_logging: # log misses to the database self.queue_entry( (self.trial, evt.run(), "%.3f" % evt_time, number_of_accepted_peaks, distance, self.sifoil, self.wavelength, False, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, self.m_db_tags)) evt.put(skip_event_flag(), "skip_event") return # the indexer will log this hit when it is ran. Bug: if the spotfinder is ran by itself, this # hit will not be logged in the db. evt.put(number_of_accepted_peaks, 'sfspots') self.logger.info("Subprocess %02d: process image #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) # See r17537 of mod_average.py. if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': pixel_size = evt.get("marccd_pixel_size") saturated_value = evt.get("marccd_saturated_value") elif device == 'Rayonix': pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size) saturated_value = rayonix_tbx.rayonix_saturated_value d = cspad_tbx.dpack( active_areas=self.active_areas, address=self.address, beam_center_x=pixel_size * self.beam_center[0], beam_center_y=pixel_size * self.beam_center[1], data=self.cspad_img.iround(), # XXX ouch! distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=self.timestamp, wavelength=self.wavelength, xtal_target=self.m_xtal_target) if (self.m_dispatch == "index"): import sys from xfel.cxi.integrate_image_api import integrate_one_image info = integrate_one_image( d, integration_dirname=self.m_integration_dirname, integration_basename=self.m_integration_basename) sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ indexed = info is not None and hasattr(info, 'spotfinder_results') if self.m_progress_logging: if self.m_db_version == 'v1': if indexed: # integration pickle dictionary is available here as info.last_saved_best if info.last_saved_best[ "identified_isoform"] is not None: #print info.last_saved_best.keys() from cxi_xdr_xes.cftbx.cspad_ana import db dbobj = db.dbconnect(self.m_db_host, self.m_db_name, self.m_db_user, self.m_db_password) cursor = dbobj.cursor() if info.last_saved_best[ "identified_isoform"] in self.isoforms: PM, indices, miller_id = self.isoforms[ info.last_saved_best["identified_isoform"]] else: from xfel.xpp.progress_support import progress_manager PM = progress_manager(info.last_saved_best, self.m_db_experiment_tag, self.m_trial_id, self.m_rungroup_id, evt.run()) indices, miller_id = PM.get_HKL(cursor) # cache these as they don't change for a given isoform self.isoforms[info.last_saved_best[ "identified_isoform"]] = PM, indices, miller_id if self.m_sql_buffer_size > 1: self.queue_progress_entry( PM.scale_frame_detail(self.timestamp, cursor, do_inserts=False)) else: PM.scale_frame_detail(self.timestamp, cursor, do_inserts=True) dbobj.commit() cursor.close() dbobj.close() elif self.m_db_version == 'v2': key_low = 'cctbx.xfel.radial_average.two_theta_low' key_high = 'cctbx.xfel.radial_average.two_theta_high' tt_low = evt.get(key_low) tt_high = evt.get(key_high) from xfel.ui.db.dxtbx_db import log_frame if indexed: n_spots = len(info.spotfinder_results.images[ info.frames[0]]['spots_total']) else: sfspots = evt.get('sfspots') if sfspots is None: if info is None or not isinstance(info, int): n_spots = 0 else: n_spots = info else: n_spots = sfspots if indexed: known_setting = info.horizons_phil.known_setting indexed_setting = info.organizer.info[ 'best_integration']['counter'] if known_setting is None or known_setting == indexed_setting: from xfel.command_line.frame_unpickler import construct_reflection_table_and_experiment_list c = construct_reflection_table_and_experiment_list( info.last_saved_best, None, pixel_size, proceed_without_image=True) c.assemble_experiments() c.assemble_reflections() log_frame(c.experiment_list, c.reflections, self.db_params, evt.run(), n_spots, self.timestamp, tt_low, tt_high) else: print( "Not logging %s, wrong bravais setting (expecting %d, got %d)" % (self.timestamp, known_setting, indexed_setting)) else: log_frame(None, None, self.db_params, evt.run(), n_spots, self.timestamp, tt_low, tt_high) if self.m_db_logging: sec, ms = cspad_tbx.evt_time(evt) evt_time = sec + ms / 1000 sfspots = evt.get('sfspots') if sfspots is None: if indexed: n_spots = len(info.spotfinder_results.images[ info.frames[0]]['spots_total']) else: n_spots = 0 else: n_spots = sfspots if indexed: mosaic_bloc_rotation = info.last_saved_best.get( 'ML_half_mosaicity_deg', [0])[0] mosaic_block_size = info.last_saved_best.get( 'ML_domain_size_ang', [0])[0] ewald_proximal_volume = info.last_saved_best.get( 'ewald_proximal_volume', [0])[0] obs = info.last_saved_best['observations'][0] cell_a, cell_b, cell_c, cell_alpha, cell_beta, cell_gamma = obs.unit_cell( ).parameters() pointgroup = info.last_saved_best['pointgroup'] resolution = obs.d_min() else: mosaic_bloc_rotation = mosaic_block_size = ewald_proximal_volume = cell_a = cell_b = cell_c = \ cell_alpha = cell_beta = cell_gamma = spacegroup = resolution = 0 self.queue_entry( (self.trial, evt.run(), "%.3f" % evt_time, n_spots, distance, self.sifoil, self.wavelength, indexed, mosaic_bloc_rotation, mosaic_block_size, ewald_proximal_volume, pointgroup, cell_a, cell_b, cell_c, cell_alpha, cell_beta, cell_gamma, resolution, self.m_db_tags)) if (not indexed): evt.put(skip_event_flag(), "skip_event") return elif (self.m_dispatch == "nop"): pass elif (self.m_dispatch == "view"): #interactive image viewer args = ["indexing.data=dummy"] detector_format_version = detector_format_function( self.address, evt.GetTime()) if detector_format_version is not None: args += [ "distl.detector_format_version=%" % detector_format_version ] from xfel.phil_preferences import load_cxi_phil horizons_phil = load_cxi_phil(self.m_xtal_target, args) horizons_phil.indexing.data = d from xfel.cxi import display_spots display_spots.parameters.horizons_phil = horizons_phil display_spots.wrapper_of_callback().display( horizons_phil.indexing.data) elif (self.m_dispatch == "spots"): #interactive spotfinder viewer args = ["indexing.data=dummy"] detector_format_version = detector_format_function( self.address, evt.GetTime()) if detector_format_version is not None: args += [ "distl.detector_format_version=%s" % detector_format_version ] from xfel.phil_preferences import load_cxi_phil horizons_phil = load_cxi_phil(self.m_xtal_target, args) horizons_phil.indexing.data = d from xfel.cxi import display_spots display_spots.parameters.horizons_phil = horizons_phil from rstbx.new_horizons.index import pre_indexing_validation, pack_names pre_indexing_validation(horizons_phil) imagefile_arguments = pack_names(horizons_phil) horizons_phil.persist.show() from spotfinder.applications import signal_strength info = signal_strength.run_signal_strength_core( horizons_phil, imagefile_arguments) work = display_spots.wrapper_of_callback(info) work.display_with_callback(horizons_phil.indexing.data) elif (self.m_dispatch == "write_dict"): self.logger.warning( "event(): deprecated dispatch 'write_dict', use mod_dump instead" ) if (self.m_out_dirname is not None or self.m_out_basename is not None): cspad_tbx.dwritef(d, self.m_out_dirname, self.m_out_basename) # Diagnostic message emitted only when all the processing is done. if (env.subprocess() >= 0): self.logger.info("Subprocess %02d: accepted #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) else: self.logger.info("Accepted #%05d @ %s" % (self.nshots, self.timestamp))
def average(argv=None): if argv == None: argv = sys.argv[1:] try: from mpi4py import MPI except ImportError: raise Sorry("MPI not found") command_line = (libtbx.option_parser.option_parser( usage=""" %s [-p] -c config -x experiment -a address -r run -d detz_offset [-o outputdir] [-A averagepath] [-S stddevpath] [-M maxpath] [-n numevents] [-s skipnevents] [-v] [-m] [-b bin_size] [-X override_beam_x] [-Y override_beam_y] [-D xtc_dir] [-f] To write image pickles use -p, otherwise the program writes CSPAD CBFs. Writing CBFs requires the geometry to be already deployed. Examples: cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 Use one process on the current node to process all the events from run 25 of experiment cxi49812, using a detz_offset of 571. mpirun -n 16 cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 As above, using 16 cores on the current node. bsub -a mympi -n 100 -o average.out -q psanaq cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 -o cxi49812 As above, using the psanaq and 100 cores, putting the log in average.out and the output images in the folder cxi49812. """ % libtbx.env.dispatcher_name) .option(None, "--as_pickle", "-p", action="store_true", default=False, dest="as_pickle", help="Write results as image pickle files instead of cbf files") .option(None, "--config", "-c", type="string", default=None, dest="config", metavar="PATH", help="psana config file") .option(None, "--experiment", "-x", type="string", default=None, dest="experiment", help="experiment name (eg cxi84914)") .option(None, "--run", "-r", type="int", default=None, dest="run", help="run number") .option(None, "--address", "-a", type="string", default="CxiDs2.0:Cspad.0", dest="address", help="detector address name (eg CxiDs2.0:Cspad.0)") .option(None, "--detz_offset", "-d", type="float", default=None, dest="detz_offset", help="offset (in mm) from sample interaction region to back of CSPAD detector rail (CXI), or detector distance (XPP)") .option(None, "--outputdir", "-o", type="string", default=".", dest="outputdir", metavar="PATH", help="Optional path to output directory for output files") .option(None, "--averagebase", "-A", type="string", default="{experiment!l}_avg-r{run:04d}", dest="averagepath", metavar="PATH", help="Path to output average image without extension. String substitution allowed") .option(None, "--stddevbase", "-S", type="string", default="{experiment!l}_stddev-r{run:04d}", dest="stddevpath", metavar="PATH", help="Path to output standard deviation image without extension. String substitution allowed") .option(None, "--maxbase", "-M", type="string", default="{experiment!l}_max-r{run:04d}", dest="maxpath", metavar="PATH", help="Path to output maximum projection image without extension. String substitution allowed") .option(None, "--numevents", "-n", type="int", default=None, dest="numevents", help="Maximum number of events to process. Default: all") .option(None, "--skipevents", "-s", type="int", default=0, dest="skipevents", help="Number of events in the beginning of the run to skip. Default: 0") .option(None, "--verbose", "-v", action="store_true", default=False, dest="verbose", help="Print more information about progress") .option(None, "--pickle-optical-metrology", "-m", action="store_true", default=False, dest="pickle_optical_metrology", help="If writing pickle files, use the optical metrology in the experiment's calib directory") .option(None, "--bin_size", "-b", type="int", default=None, dest="bin_size", help="Rayonix detector bin size") .option(None, "--override_beam_x", "-X", type="float", default=None, dest="override_beam_x", help="Rayonix detector beam center x coordinate") .option(None, "--override_beam_y", "-Y", type="float", default=None, dest="override_beam_y", help="Rayonix detector beam center y coordinate") .option(None, "--calib_dir", "-C", type="string", default=None, dest="calib_dir", metavar="PATH", help="calibration directory") .option(None, "--xtc_dir", "-D", type="string", default=None, dest="xtc_dir", metavar="PATH", help="xtc stream directory") .option(None, "--use_ffb", "-f", action="store_true", default=False, dest="use_ffb", help="Use the fast feedback filesystem at LCLS. Only for the active experiment!") ).process(args=argv) if len(command_line.args) > 0 or \ command_line.options.as_pickle is None or \ command_line.options.experiment is None or \ command_line.options.run is None or \ command_line.options.address is None or \ command_line.options.detz_offset is None or \ command_line.options.averagepath is None or \ command_line.options.stddevpath is None or \ command_line.options.maxpath is None or \ command_line.options.pickle_optical_metrology is None: command_line.parser.show_help() return # set this to sys.maxint to analyze all events if command_line.options.numevents is None: maxevents = sys.maxint else: maxevents = command_line.options.numevents comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() if command_line.options.config is not None: psana.setConfigFile(command_line.options.config) dataset_name = "exp=%s:run=%d:idx"%(command_line.options.experiment, command_line.options.run) if command_line.options.xtc_dir is not None: if command_line.options.use_ffb: raise Sorry("Cannot specify the xtc_dir and use SLAC's ffb system") dataset_name += ":dir=%s"%command_line.options.xtc_dir elif command_line.options.use_ffb: # as ffb is only at SLAC, ok to hardcode /reg/d here dataset_name += ":dir=/reg/d/ffb/%s/%s/xtc"%(command_line.options.experiment[0:3],command_line.options.experiment) ds = psana.DataSource(dataset_name) address = command_line.options.address src = psana.Source('DetInfo(%s)'%address) if not command_line.options.as_pickle: psana_det = psana.Detector(address, ds.env()) nevent = np.array([0.]) for run in ds.runs(): runnumber = run.run() # list of all events if command_line.options.skipevents > 0: print "Skipping first %d events"%command_line.options.skipevents times = run.times()[command_line.options.skipevents:] nevents = min(len(times),maxevents) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [times[i] for i in xrange(nevents) if (i+rank)%size == 0] for i in xrange(len(mytimes)): if i%10==0: print 'Rank',rank,'processing event',rank*len(mytimes)+i,', ',i,'of',len(mytimes) evt = run.event(mytimes[i]) #print "Event #",rank*mylength+i," has id:",evt.get(EventId) if 'Rayonix' in command_line.options.address: data = evt.get(Camera.FrameV1,src) if data is None: print "No data" continue data=data.data16().astype(np.float64) elif command_line.options.as_pickle: data = evt.get(psana.ndarray_float64_3, src, 'image0') else: # get numpy array, 32x185x388 data = psana_det.calib(evt) # applies psana's complex run-dependent calibrations if data is None: print "No data" continue d = cspad_tbx.env_distance(address, run.env(), command_line.options.detz_offset) if d is None: print "No distance, skipping shot" continue if 'distance' in locals(): distance += d else: distance = np.array([float(d)]) w = cspad_tbx.evt_wavelength(evt) if w is None: print "No wavelength, skipping shot" continue if 'wavelength' in locals(): wavelength += w else: wavelength = np.array([w]) t = cspad_tbx.evt_time(evt) if t is None: print "No timestamp, skipping shot" continue if 'timestamp' in locals(): timestamp += t[0] + (t[1]/1000) else: timestamp = np.array([t[0] + (t[1]/1000)]) if 'sum' in locals(): sum+=data else: sum=np.array(data, copy=True) if 'sumsq' in locals(): sumsq+=data*data else: sumsq=data*data if 'maximum' in locals(): maximum=np.maximum(maximum,data) else: maximum=np.array(data, copy=True) nevent += 1 #sum the images across mpi cores if size > 1: print "Synchronizing rank", rank totevent = np.zeros(nevent.shape) comm.Reduce(nevent,totevent) if rank == 0 and totevent[0] == 0: raise Sorry("No events found in the run") sumall = np.zeros(sum.shape).astype(sum.dtype) comm.Reduce(sum,sumall) sumsqall = np.zeros(sumsq.shape).astype(sumsq.dtype) comm.Reduce(sumsq,sumsqall) maxall = np.zeros(maximum.shape).astype(maximum.dtype) comm.Reduce(maximum,maxall, op=MPI.MAX) waveall = np.zeros(wavelength.shape).astype(wavelength.dtype) comm.Reduce(wavelength,waveall) distall = np.zeros(distance.shape).astype(distance.dtype) comm.Reduce(distance,distall) timeall = np.zeros(timestamp.shape).astype(timestamp.dtype) comm.Reduce(timestamp,timeall) if rank==0: if size > 1: print "Synchronized" # Accumulating floating-point numbers introduces errors, # which may cause negative variances. Since a two-pass # approach is unacceptable, the standard deviation is # clamped at zero. mean = sumall / float(totevent[0]) variance = (sumsqall / float(totevent[0])) - (mean**2) variance[variance < 0] = 0 stddev = np.sqrt(variance) wavelength = waveall[0] / totevent[0] distance = distall[0] / totevent[0] pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value timestamp = timeall[0] / totevent[0] timestamp = (int(timestamp), timestamp % int(timestamp) * 1000) timestamp = cspad_tbx.evt_timestamp(timestamp) if command_line.options.as_pickle: extension = ".pickle" else: extension = ".cbf" dest_paths = [cspad_tbx.pathsubst(command_line.options.averagepath + extension, evt, ds.env()), cspad_tbx.pathsubst(command_line.options.stddevpath + extension, evt, ds.env()), cspad_tbx.pathsubst(command_line.options.maxpath + extension, evt, ds.env())] dest_paths = [os.path.join(command_line.options.outputdir, path) for path in dest_paths] if 'Rayonix' in command_line.options.address: from xfel.cxi.cspad_ana import rayonix_tbx pixel_size = rayonix_tbx.get_rayonix_pixel_size(command_line.options.bin_size) beam_center = [command_line.options.override_beam_x,command_line.options.override_beam_y] detector_dimensions = rayonix_tbx.get_rayonix_detector_dimensions(command_line.options.bin_size) active_areas = flex.int([0,0,detector_dimensions[0],detector_dimensions[1]]) split_address = cspad_tbx.address_split(address) old_style_address = split_address[0] + "-" + split_address[1] + "|" + split_address[2] + "-" + split_address[3] for data, path in zip([mean, stddev, maxall], dest_paths): print "Saving", path d = cspad_tbx.dpack( active_areas=active_areas, address=old_style_address, beam_center_x=pixel_size * beam_center[0], beam_center_y=pixel_size * beam_center[1], data=flex.double(data), distance=distance, pixel_size=pixel_size, saturated_value=rayonix_tbx.rayonix_saturated_value, timestamp=timestamp, wavelength=wavelength) easy_pickle.dump(path, d) elif command_line.options.as_pickle: split_address = cspad_tbx.address_split(address) old_style_address = split_address[0] + "-" + split_address[1] + "|" + split_address[2] + "-" + split_address[3] xpp = 'xpp' in address.lower() if xpp: evt_time = cspad_tbx.evt_time(evt) # tuple of seconds, milliseconds timestamp = cspad_tbx.evt_timestamp(evt_time) # human readable format from xfel.detector_formats import detector_format_version, reverse_timestamp from xfel.cxi.cspad_ana.cspad_tbx import xpp_active_areas version_lookup = detector_format_version(old_style_address, reverse_timestamp(timestamp)[0]) assert version_lookup is not None active_areas = xpp_active_areas[version_lookup]['active_areas'] beam_center = [1765 // 2, 1765 // 2] else: if command_line.options.calib_dir is not None: metro_path = command_line.options.calib_dir elif command_line.options.pickle_optical_metrology: from xfel.cftbx.detector.cspad_cbf_tbx import get_calib_file_path metro_path = get_calib_file_path(run.env(), address, run) else: metro_path = libtbx.env.find_in_repositories("xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0") sections = parse_calib.calib2sections(metro_path) beam_center, active_areas = cspad_tbx.cbcaa( cspad_tbx.getConfig(address, ds.env()), sections) class fake_quad(object): def __init__(self, q, d): self.q = q self.d = d def quad(self): return self.q def data(self): return self.d if xpp: quads = [fake_quad(i, mean[i*8:(i+1)*8,:,:]) for i in xrange(4)] mean = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads = quads) mean = flex.double(mean.astype(np.float64)) quads = [fake_quad(i, stddev[i*8:(i+1)*8,:,:]) for i in xrange(4)] stddev = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads = quads) stddev = flex.double(stddev.astype(np.float64)) quads = [fake_quad(i, maxall[i*8:(i+1)*8,:,:]) for i in xrange(4)] maxall = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads = quads) maxall = flex.double(maxall.astype(np.float64)) else: quads = [fake_quad(i, mean[i*8:(i+1)*8,:,:]) for i in xrange(4)] mean = cspad_tbx.CsPadDetector( address, evt, ds.env(), sections, quads=quads) mean = flex.double(mean.astype(np.float64)) quads = [fake_quad(i, stddev[i*8:(i+1)*8,:,:]) for i in xrange(4)] stddev = cspad_tbx.CsPadDetector( address, evt, ds.env(), sections, quads=quads) stddev = flex.double(stddev.astype(np.float64)) quads = [fake_quad(i, maxall[i*8:(i+1)*8,:,:]) for i in xrange(4)] maxall = cspad_tbx.CsPadDetector( address, evt, ds.env(), sections, quads=quads) maxall = flex.double(maxall.astype(np.float64)) for data, path in zip([mean, stddev, maxall], dest_paths): print "Saving", path d = cspad_tbx.dpack( active_areas=active_areas, address=old_style_address, beam_center_x=pixel_size * beam_center[0], beam_center_y=pixel_size * beam_center[1], data=data, distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=timestamp, wavelength=wavelength) easy_pickle.dump(path, d) else: # load a header only cspad cbf from the slac metrology from xfel.cftbx.detector import cspad_cbf_tbx import pycbf base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(run, address) if base_dxtbx is None: raise Sorry("Couldn't load calibration file for run %d"%run.run()) for data, path in zip([mean, stddev, maxall], dest_paths): print "Saving", path cspad_img = cspad_cbf_tbx.format_object_from_data(base_dxtbx, data, distance, wavelength, timestamp, address) cspad_img._cbf_handle.write_widefile(path, pycbf.CBF,\ pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)
def event(self,evt,evn): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ #import pdb; pdb.set_trace() if (evt.get("skip_event")): return # check if FEE data is one or two dimensional data = evt.get(Camera.FrameV1, self.src) if data is None: one_D = True data = evt.get(Bld.BldDataSpectrometerV1, self.src) else: one_D = False # get event timestamp timestamp = cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) # human readable format if data is None: self.nnodata +=1 #self.logger.warning("event(): No spectrum data") evt.put(skip_event_flag(),"skip_event") if timestamp is None: evt.put(skip_event_flag(),"skip_event") self.logger.warning("event(): No TIMESTAMP, skipping shot") elif data is not None: self.nshots +=1 # get data as array and split into two half to find each peak if one_D: data = np.array(data.hproj().astype(np.float64)) if 'dark' in locals(): data = data - self.dark spectrum = data spectrum1 = data[:data.shape[0]//2] spectrum2 = data[data.shape[0]//2:] else: data = np.array(data.data16().astype(np.float64)) if 'dark' in locals(): data = data - self.dark data_split1 = data[:,:data.shape[1]//2] data_split2 = data[:,data.shape[1]//2:] # make a 1D trace of entire spectrum and each half to find peaks spectrum = np.sum(data,0)/data.shape[0] spectrum1 = np.sum(data_split1,0)/data_split1.shape[0] spectrum2 = np.sum(data_split2,0)/data_split2.shape[0] peak_one = np.max(spectrum1) peak_two = np.max(spectrum2) peak_one_position = np.argmax(spectrum1) peak_two_position = np.argmax(spectrum2) + len(spectrum2) # define the limits of the regions between the two peaks peak_one_lower_limit = self.peak_one_position_min - self.peak_one_width peak_one_upper_limit = self.peak_one_position_max + self.peak_one_width peak_two_lower_limit = self.peak_two_position_min - self.peak_two_width peak_two_upper_limit = self.peak_two_position_max + self.peak_two_width # the x-coordinate of the weighted center of peak region weighted_peak_one_positions = [] for i in xrange(peak_one_lower_limit,peak_one_upper_limit): weighted_peak_one_positions.append(spectrum[i]*i) weighted_sum_peak_one = sum(weighted_peak_one_positions) weighted_peak_one_center_position = weighted_sum_peak_one//sum(spectrum[peak_one_lower_limit:peak_one_upper_limit]) weighted_peak_two_positions = [] for i in xrange(peak_two_lower_limit,peak_two_upper_limit): weighted_peak_two_positions.append(spectrum[i]*i) weighted_sum_peak_two = sum(weighted_peak_two_positions) weighted_peak_two_center_position = weighted_sum_peak_two//sum(spectrum[peak_two_lower_limit:peak_two_upper_limit]) # normalized integrated peaks int_peak_one = np.sum(spectrum[peak_one_lower_limit:self.peak_one_position_max])/len(spectrum[peak_one_lower_limit:self.peak_one_position_max]) int_peak_two = np.sum(spectrum[peak_two_lower_limit:self.peak_two_position_max])/len(spectrum[peak_two_lower_limit:self.peak_two_position_max]) # normalized integrated regions between the peaks int_left_region = np.sum(spectrum[0:peak_one_lower_limit])/len(spectrum[0:peak_one_lower_limit]) int_middle_region = np.sum(spectrum[peak_one_upper_limit:peak_two_lower_limit])/len(spectrum[peak_one_upper_limit:peak_two_lower_limit]) int_right_region = np.sum(spectrum[peak_two_upper_limit:])/len(spectrum[:peak_two_upper_limit]) # now to do the filtering if peak_one/peak_two < self.peak_ratio or peak_one/peak_two > 1/self.peak_ratio: print "event(): too low" evt.put(skip_event_flag(), "skip_event") return if weighted_peak_two_center_position < self.peak_two_position_min or weighted_peak_two_center_position > self.peak_two_position_max: print "event(): out of range high energy peak" evt.put(skip_event_flag(), "skip_event") return if weighted_peak_one_center_position < self.peak_one_position_min or weighted_peak_one_center_position > self.peak_one_position_max: print "event(): out of range low energy peak" evt.put(skip_event_flag(), "skip_event") return if not one_D and (int_left_region/int_peak_one > self.normalized_peak_to_noise_ratio): print "event(): noisy left of low energy peak" evt.put(skip_event_flag(), "skip_event") return if not one_D and (int_middle_region/int_peak_one > self.normalized_peak_to_noise_ratio): print "event(): noisy middle" evt.put(skip_event_flag(), "skip_event") return if not one_D and (int_middle_region/int_peak_two > self.normalized_peak_to_noise_ratio): print "event(): noisy middle" evt.put(skip_event_flag(), "skip_event") return if not one_D and (int_right_region/int_peak_two > self.normalized_peak_to_noise_ratio): print "event(): noisy right of high energy peak" evt.put(skip_event_flag(), "skip_event") return #iron edge at 738 pixels on FFE detetor if one_D and (spectrum[self.iron_edge_position]>=spectrum[weighted_peak_one_center_position]): print "event(): peak at iron edge" evt.put(skip_event_flag(), "skip_event") return if one_D and (spectrum[self.iron_edge_position]>=spectrum[weighted_peak_two_center_position]): print "event(): peak at iron edge" evt.put(skip_event_flag(), "skip_event") return #self.logger.info("TIMESTAMP %s accepted" %timestamp) self.naccepted += 1 self.ntwo_color += 1 print "%d Two Color shots" %self.ntwo_color
def run(self): """ Process all images assigned to this thread """ params, options = self.parser.parse_args(show_diff_phil=True) if params.input.experiment is None or params.input.run_num is None or params.input.address is None: raise Usage(self.usage) if params.format.file_format == "cbf": if params.format.cbf.detz_offset is None: raise Usage(self.usage) elif params.format.file_format == "pickle": if params.format.pickle.cfg is None: raise Usage(self.usage) else: raise Usage(self.usage) if not os.path.exists(params.output.output_dir): raise Sorry("Output path not found:" + params.output.output_dir) # Save the paramters self.params = params self.options = options from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.Get_rank() # each process in MPI has a unique id, 0-indexed size = comm.Get_size() # size: number of processes running in this job # set up psana if params.format.file_format == "pickle": psana.setConfigFile(params.format.pickle.cfg) dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment, params.input.run_num) ds = psana.DataSource(dataset_name) if params.format.file_format == "cbf": src = psana.Source("DetInfo(%s)" % params.input.address) psana_det = psana.Detector(params.input.address, ds.env()) # set this to sys.maxint to analyze all events if params.dispatch.max_events is None: max_events = sys.maxint else: max_events = params.dispatch.max_events for run in ds.runs(): if params.format.file_format == "cbf": # load a header only cspad cbf from the slac metrology base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(run, params.input.address) if base_dxtbx is None: raise Sorry("Couldn't load calibration file for run %d" % run.run()) if params.format.cbf.gain_mask_value is not None: gain_mask = psana_det.gain_mask(gain=params.format.cbf.gain_mask_value) # list of all events times = run.times() nevents = min(len(times), max_events) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [times[i] for i in xrange(nevents) if (i + rank) % size == 0] for i in xrange(len(mytimes)): evt = run.event(mytimes[i]) id = evt.get(psana.EventId) print "Event #", i, " has id:", id timestamp = cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) # human readable format if timestamp is None: print "No timestamp, skipping shot" continue t = timestamp s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[17:19] + t[20:23] print "Processing shot", s if params.format.file_format == "pickle": if evt.get("skip_event"): print "Skipping event", id continue # the data needs to have already been processed and put into the event by psana data = evt.get(params.format.pickle.out_key) if data is None: print "No data" continue # set output paths according to the templates path = os.path.join(params.output.output_dir, "shot-" + s + ".pickle") print "Saving", path easy_pickle.dump(path, data) elif params.format.file_format == "cbf": # get numpy array, 32x185x388 data = psana_det.calib(evt) # applies psana's complex run-dependent calibrations if params.format.cbf.gain_mask_value is not None: # apply gain mask data *= gain_mask distance = cspad_tbx.env_distance(params.input.address, run.env(), params.format.cbf.detz_offset) if distance is None: print "No distance, skipping shot" continue if self.params.format.cbf.override_energy is None: wavelength = cspad_tbx.evt_wavelength(evt) if wavelength is None: print "No wavelength, skipping shot" continue else: wavelength = 12398.4187 / self.params.format.cbf.override_energy # stitch together the header, data and metadata into the final dxtbx format object cspad_img = cspad_cbf_tbx.format_object_from_data( base_dxtbx, data, distance, wavelength, timestamp, params.input.address ) path = os.path.join(params.output.output_dir, "shot-" + s + ".cbf") print "Saving", path # write the file import pycbf cspad_img._cbf_handle.write_widefile( path, pycbf.CBF, pycbf.MIME_HEADERS | pycbf.MSG_DIGEST | pycbf.PAD_4K, 0 ) run.end() ds.end()
def event(self, evt, env): """The event() function is called for every L1Accept transition. XXX more? Previously, common-mode correction was applied only after initial threshold filtering. Since the common_mode class applies the (lengthy) common-mode correction immediately after reading the image from the stream, this optimisation is currently not (elegantly) doable. @param evt Event data object, a configure object @param env Environment object """ super(mod_hitfind, self).event(evt, env) if (evt.get("skip_event")): return # This module only applies to detectors for which a distance is # available. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return device = cspad_tbx.address_split(self.address)[2] # ***** HITFINDING ***** XXX For hitfinding it may be interesting # to look at the fraction of subzero pixels in the dark-corrected # image. if (self.m_threshold is not None): # If a threshold value is given it can be applied in one of three ways: # 1. Apply it over the whole image if (self.m_roi is None and self.m_distl_min_peaks is None): vmax = flex.max(self.cspad_img) if (vmax < self.m_threshold): if not self.m_negate_hits: # Tell downstream modules to skip this event if the threshold was not met. evt.put(skip_event_flag(), "skip_event") return elif self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return # 2. Apply threshold over a rectangular region of interest. elif (self.m_roi is not None): vmax = flex.max(self.cspad_img[self.m_roi[2]:self.m_roi[3], self.m_roi[0]:self.m_roi[1]]) if (vmax < self.m_threshold): if not self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return elif self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return # 3. Determine the spotfinder spots within the central ASICS, and accept the # image as a hit if there are m_distl_min_peaks exceeding m_threshold. # As a further requirement, the peaks must exceed 2.5 * the 90-percentile # pixel value of the central ASICS. This filter was added to avoid high-background # false positives. elif (self.m_distl_min_peaks is not None): if device == 'marccd': self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0] self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1] elif device == 'Rayonix': self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0] self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1] peak_heights,outvalue = self.distl_filter( self.address, self.cspad_img.iround(), # XXX correct? distance, self.timestamp, self.wavelength) if ('permissive' in self.m_distl_flags): number_of_accepted_peaks = (peak_heights > self.m_threshold).count(True) else: number_of_accepted_peaks = ((peak_heights > self.m_threshold).__and__(outvalue==0)).count(True) sec,ms = cspad_tbx.evt_time(evt) evt_time = sec + ms/1000 self.stats_logger.info("BRAGG %.3f %d" %(evt_time, number_of_accepted_peaks)) skip_event = False if number_of_accepted_peaks < self.m_distl_min_peaks: self.logger.info("Subprocess %02d: Spotfinder NO HIT image #%05d @ %s; %d spots > %d" %( env.subprocess(), self.nshots, self.timestamp, number_of_accepted_peaks, self.m_threshold)) if not self.m_negate_hits: skip_event = True else: self.logger.info("Subprocess %02d: Spotfinder YES HIT image #%05d @ %s; %d spots > %d" %( env.subprocess(), self.nshots, self.timestamp, number_of_accepted_peaks, self.m_threshold)) if self.m_negate_hits: skip_event = True if skip_event: if self.m_db_logging: # log misses to the database self.queue_entry((self.trial, evt.run(), "%.3f"%evt_time, number_of_accepted_peaks, distance, self.sifoil, self.wavelength, False, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, self.m_db_tags)) evt.put(skip_event_flag(), "skip_event") return # the indexer will log this hit when it is ran. Bug: if the spotfinder is ran by itself, this # hit will not be logged in the db. evt.put(number_of_accepted_peaks, 'sfspots') self.logger.info("Subprocess %02d: process image #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) # See r17537 of mod_average.py. if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': pixel_size = evt.get("marccd_pixel_size") saturated_value = evt.get("marccd_saturated_value") elif device == 'Rayonix': pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size) saturated_value = rayonix_tbx.rayonix_saturated_value d = cspad_tbx.dpack( active_areas=self.active_areas, address=self.address, beam_center_x=pixel_size * self.beam_center[0], beam_center_y=pixel_size * self.beam_center[1], data=self.cspad_img.iround(), # XXX ouch! distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=self.timestamp, wavelength=self.wavelength, xtal_target=self.m_xtal_target) if (self.m_dispatch == "index"): import sys from xfel.cxi.integrate_image_api import integrate_one_image info = integrate_one_image(d, integration_dirname = self.m_integration_dirname, integration_basename = self.m_integration_basename) sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ indexed = info is not None if indexed and self.m_progress_logging: # integration pickle dictionary is available here as info.last_saved_best if info.last_saved_best["identified_isoform"] is not None: #print info.last_saved_best.keys() from cxi_xdr_xes.cftbx.cspad_ana import db dbobj = db.dbconnect(self.m_db_host, self.m_db_name, self.m_db_user, self.m_db_password) cursor = dbobj.cursor() if info.last_saved_best["identified_isoform"] in self.isoforms: PM, indices, miller_id = self.isoforms[info.last_saved_best["identified_isoform"]] else: from xfel.xpp.progress_support import progress_manager PM = progress_manager(info.last_saved_best,self.m_db_experiment_tag, self.m_trial_id, self.m_rungroup_id, evt.run()) indices, miller_id = PM.get_HKL(cursor) # cache these as they don't change for a given isoform self.isoforms[info.last_saved_best["identified_isoform"]] = PM, indices, miller_id if self.m_sql_buffer_size > 1: self.queue_progress_entry(PM.scale_frame_detail(self.timestamp,cursor,do_inserts=False)) else: PM.scale_frame_detail(self.timestamp,cursor,do_inserts=True) dbobj.commit() cursor.close() dbobj.close() if self.m_db_logging: sec,ms = cspad_tbx.evt_time(evt) evt_time = sec + ms/1000 sfspots = evt.get('sfspots') if sfspots is None: if indexed: n_spots = len(info.spotfinder_results.images[info.frames[0]]['spots_total']) else: n_spots = 0 else: n_spots = sfspots if indexed: mosaic_bloc_rotation = info.last_saved_best.get('ML_half_mosaicity_deg', [0])[0] mosaic_block_size = info.last_saved_best.get('ML_domain_size_ang', [0])[0] ewald_proximal_volume = info.last_saved_best.get('ewald_proximal_volume', [0])[0] obs = info.last_saved_best['observations'][0] cell_a, cell_b, cell_c, cell_alpha, cell_beta, cell_gamma = obs.unit_cell().parameters() pointgroup = info.last_saved_best['pointgroup'] resolution = obs.d_min() else: mosaic_bloc_rotation = mosaic_block_size = ewald_proximal_volume = cell_a = cell_b = cell_c = \ cell_alpha = cell_beta = cell_gamma = spacegroup = resolution = 0 self.queue_entry((self.trial, evt.run(), "%.3f"%evt_time, n_spots, distance, self.sifoil, self.wavelength, indexed, mosaic_bloc_rotation, mosaic_block_size, ewald_proximal_volume, pointgroup, cell_a, cell_b, cell_c, cell_alpha, cell_beta, cell_gamma, resolution, self.m_db_tags)) if (not indexed): evt.put(skip_event_flag(), "skip_event") return elif (self.m_dispatch == "nop"): pass elif (self.m_dispatch == "view"): #interactive image viewer args = ["indexing.data=dummy"] detector_format_version = detector_format_function( self.address, evt.GetTime()) if detector_format_version is not None: args += ["distl.detector_format_version=%" % detector_format_version] from xfel.phil_preferences import load_cxi_phil horizons_phil = load_cxi_phil(self.m_xtal_target, args) horizons_phil.indexing.data = d from xfel.cxi import display_spots display_spots.parameters.horizons_phil = horizons_phil display_spots.wrapper_of_callback().display(horizons_phil.indexing.data) elif (self.m_dispatch == "spots"): #interactive spotfinder viewer args = ["indexing.data=dummy"] detector_format_version = detector_format_function( self.address, evt.GetTime()) if detector_format_version is not None: args += ["distl.detector_format_version=%s" % detector_format_version] from xfel.phil_preferences import load_cxi_phil horizons_phil = load_cxi_phil(self.m_xtal_target, args) horizons_phil.indexing.data = d from xfel.cxi import display_spots display_spots.parameters.horizons_phil = horizons_phil from rstbx.new_horizons.index import pre_indexing_validation,pack_names pre_indexing_validation(horizons_phil) imagefile_arguments = pack_names(horizons_phil) horizons_phil.persist.show() from spotfinder.applications import signal_strength info = signal_strength.run_signal_strength_core(horizons_phil,imagefile_arguments) work = display_spots.wrapper_of_callback(info) work.display_with_callback(horizons_phil.indexing.data) elif (self.m_dispatch == "write_dict"): self.logger.warning( "event(): deprecated dispatch 'write_dict', use mod_dump instead") if (self.m_out_dirname is not None or self.m_out_basename is not None): cspad_tbx.dwritef(d, self.m_out_dirname, self.m_out_basename) # Diagnostic message emitted only when all the processing is done. if (env.subprocess() >= 0): self.logger.info("Subprocess %02d: accepted #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) else: self.logger.info("Accepted #%05d @ %s" % (self.nshots, self.timestamp))
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ super(average_mixin, self).event(evt, env) if evt.get('skip_event'): return # Get the distance for the detectors that should have it, and set # it to NaN for those that should not. if self.detector == 'CxiDs1' or \ self.detector == 'CxiDs2' or \ self.detector == 'CxiDsd' or \ self.detector == 'XppGon': distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self._nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), 'skip_event') return else: distance = float('nan') if ("skew" in self.flags): # Take out inactive pixels if self.roi is not None: pixels = self.cspad_img[self.roi[2]:self.roi[3], self.roi[0]:self.roi[1]] dark_mask = self.dark_mask[self.roi[2]:self.roi[3], self.roi[0]:self.roi[1]] pixels = pixels.as_1d().select(dark_mask.as_1d()) else: pixels = self.cspad_img.as_1d().select(self.dark_mask.as_1d()).as_double() stats = scitbx.math.basic_statistics(pixels.as_double()) #stats.show() self.logger.info("skew: %.3f" %stats.skew) self.logger.info("kurtosis: %.3f" %stats.kurtosis) if 0: from matplotlib import pyplot hist_min, hist_max = flex.min(flex_cspad_img.as_double()), flex.max(flex_cspad_img.as_double()) print hist_min, hist_max n_slots = 100 n, bins, patches = pyplot.hist(flex_cspad_img.as_1d().as_numpy_array(), bins=n_slots, range=(hist_min, hist_max)) pyplot.show() # XXX This skew threshold probably needs fine-tuning skew_threshold = 0.35 if stats.skew < skew_threshold: self._nfail += 1 self.logger.warning("event(): skew < %f, shot skipped" % skew_threshold) evt.put(skip_event_flag(), 'skip_event') return #self.cspad_img *= stats.skew if ("inactive" in self.flags): self.cspad_img.set_selected(self.dark_stddev <= 0, 0) if ("noelastic" in self.flags): ELASTIC_THRESHOLD = self.elastic_threshold self.cspad_img.set_selected(self.cspad_img > ELASTIC_THRESHOLD, 0) if self.hot_threshold is not None: HOT_THRESHOLD = self.hot_threshold self.cspad_img.set_selected(self.dark_img > HOT_THRESHOLD, 0) if self.gain_map is not None and self.gain_threshold is not None: # XXX comparing each pixel to a moving average would probably be better # since the gain should vary approximately smoothly over different areas # of the detector GAIN_THRESHOLD = self.gain_threshold #self.logger.debug( #"rejecting: %i" %(self.gain_map > GAIN_THRESHOLD).count(True)) self.cspad_img.set_selected(self.gain_map > GAIN_THRESHOLD, 0) if ("nonoise" in self.flags): NOISE_THRESHOLD = self.noise_threshold self.cspad_img.set_selected(self.cspad_img < NOISE_THRESHOLD, 0) if ("sigma_scaling" in self.flags): self.do_sigma_scaling() if ("symnoise" in self.flags): SYMNOISE_THRESHOLD = self.symnoise_threshold self.cspad_img.set_selected((-SYMNOISE_THRESHOLD < self.cspad_img) & ( self.cspad_img < SYMNOISE_THRESHOLD), 0) if ("output" in self.flags): try: import cPickle as pickle except ImportError: import pickle import os if (not os.path.isdir(self.pickle_dirname)): os.makedirs(self.pickle_dirname) flexdata = flex.int(self.cspad_img.astype(numpy.int32)) d = cspad_tbx.dpack( address=self.address, data=flexdata, timestamp=cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) ) G = open(os.path.join(".",self.pickle_dirname)+"/"+self.pickle_basename, "ab") pickle.dump(d,G,pickle.HIGHEST_PROTOCOL) G.close() if self.photon_threshold is not None and self.two_photon_threshold is not None: self.do_photon_counting() if self.background_path is not None: self.cspad_img -= self.background_img # t and self._sum_time are a two-long arrays of seconds and # milliseconds which hold time with respect to the base time. t = [t1 - t2 for (t1, t2) in zip(cspad_tbx.evt_time(evt), self._metadata['time_base'])] if self._nmemb == 0: # The peers metadata item is a bit field where a bit is set if # the partial sum from the corresponding worker process is # pending. If this is the first frame a worker process sees, # set its corresponding bit in the bit field since it will # contribute a partial sum. if env.subprocess() >= 0: self._lock.acquire() if 'peers' in self._metadata.keys(): self._metadata['peers'] |= (1 << env.subprocess()) else: self._metadata['peers'] = (1 << env.subprocess()) self._lock.release() self._sum_distance = distance self._sum_time = (t[0], t[1]) self._sum_wavelength = self.wavelength if self._have_max: self._max_img = self.cspad_img.deep_copy() if self._have_mean: self._sum_img = self.cspad_img.deep_copy() if self._have_std: self._ssq_img = flex.pow2(self.cspad_img) else: self._sum_distance += distance self._sum_time = (self._sum_time[0] + t[0], self._sum_time[1] + t[1]) self._sum_wavelength += self.wavelength if self._have_max: sel = (self.cspad_img > self._max_img).as_1d() self._max_img.as_1d().set_selected( sel, self.cspad_img.as_1d().select(sel)) if self._have_mean: self._sum_img += self.cspad_img if self._have_std: self._ssq_img += flex.pow2(self.cspad_img) self._nmemb += 1
def event(self, evt, env): """The event() function is called for every L1Accept transition. The event() function does not log shots skipped due to incompleteness in order to keep the output streams clean. Instead, the number of skipped shots is reported by endjob(). @param evt Event data object, a configure object @param env Environment object """ if (evt.get("skip_event")): return # XXX This hardcodes the address for the front detector! detz = cspad_tbx.env_detz('CxiDs1-0|Cspad-0', env) if (detz is None): self.m_no_detz += 1 sifoil = cspad_tbx.env_sifoil(env) if (sifoil is None): self.m_no_sifoil += 1 timestamp = cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) if (timestamp is None): self.m_no_timestamp += 1 wavelength = cspad_tbx.evt_wavelength(evt) if (wavelength is None): self.m_no_wavelength += 1 if (detz is None or sifoil is None or timestamp is None or wavelength is None): self.m_nfail += 1 return if (detz != self.m_detz): if (self.m_detz is None): self.m_logger.info("%s: initial detz % 8.4f" % (timestamp, detz)) else: self.m_logger.info("%s: detz % 8.4f -> % 8.4f" % (timestamp, self.m_detz, detz)) self.m_detz = detz if (self.m_detz_max is None or detz > self.m_detz_max): self.m_detz_max = detz if (self.m_detz_min is None or detz < self.m_detz_min): self.m_detz_min = detz if (sifoil != self.m_sifoil): if (self.m_sifoil is None): self.m_logger.info("%s: initial Si-foil % 8d" % (timestamp, sifoil)) else: self.m_logger.info("%s: Si-foil % 8d -> % 8d" % (timestamp, self.m_sifoil, sifoil)) self.m_sifoil = sifoil if (self.m_sifoil_max is None or sifoil > self.m_sifoil_max): self.m_sifoil_max = sifoil if (self.m_sifoil_min is None or sifoil < self.m_sifoil_min): self.m_sifoil_min = sifoil # Accumulate the sum and the squared sum of the shifted the # wavelength. The shift is taken as the first wavelength # encountered. This may be more accurate than accumulating raw # values [Chan et al. (1983) Am. Stat. 37, 242-247]. if (self.m_nmemb == 0): self.m_wavelength_shift = wavelength self.m_wavelength_sum = (wavelength - self.m_wavelength_shift) self.m_wavelength_sumsq = (wavelength - self.m_wavelength_shift)**2 self.m_nmemb = 1 else: self.m_wavelength_sum += (wavelength - self.m_wavelength_shift) self.m_wavelength_sumsq += (wavelength - self.m_wavelength_shift)**2 self.m_nmemb += 1
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ super(average_mixin, self).event(evt, env) if evt.get('skip_event'): return # Get the distance for the detectors that should have it, and set # it to NaN for those that should not. if self.detector == 'CxiDs1' or \ self.detector == 'CxiDs2' or \ self.detector == 'CxiDsd' or \ self.detector == 'XppGon': distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self._nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), 'skip_event') return else: distance = float('nan') if ("skew" in self.flags): # Take out inactive pixels if self.roi is not None: pixels = self.cspad_img[self.roi[2]:self.roi[3], self.roi[0]:self.roi[1]] dark_mask = self.dark_mask[self.roi[2]:self.roi[3], self.roi[0]:self.roi[1]] pixels = pixels.as_1d().select(dark_mask.as_1d()) else: pixels = self.cspad_img.as_1d().select(self.dark_mask.as_1d()).as_double() stats = scitbx.math.basic_statistics(pixels.as_double()) #stats.show() self.logger.info("skew: %.3f" %stats.skew) self.logger.info("kurtosis: %.3f" %stats.kurtosis) if 0: from matplotlib import pyplot hist_min, hist_max = flex.min(flex_cspad_img.as_double()), flex.max(flex_cspad_img.as_double()) print hist_min, hist_max n_slots = 100 n, bins, patches = pyplot.hist(flex_cspad_img.as_1d().as_numpy_array(), bins=n_slots, range=(hist_min, hist_max)) pyplot.show() # XXX This skew threshold probably needs fine-tuning skew_threshold = 0.35 if stats.skew < skew_threshold: self._nfail += 1 self.logger.warning("event(): skew < %f, shot skipped" % skew_threshold) evt.put(skip_event_flag(), 'skip_event') return #self.cspad_img *= stats.skew if ("inactive" in self.flags): self.cspad_img.set_selected(self.dark_stddev <= 0, 0) if ("noelastic" in self.flags): ELASTIC_THRESHOLD = self.elastic_threshold self.cspad_img.set_selected(self.cspad_img > ELASTIC_THRESHOLD, 0) if self.hot_threshold is not None: HOT_THRESHOLD = self.hot_threshold self.cspad_img.set_selected(self.dark_img > HOT_THRESHOLD, 0) if self.gain_map is not None and self.gain_threshold is not None: # XXX comparing each pixel to a moving average would probably be better # since the gain should vary approximately smoothly over different areas # of the detector GAIN_THRESHOLD = self.gain_threshold #self.logger.debug( #"rejecting: %i" %(self.gain_map > GAIN_THRESHOLD).count(True)) self.cspad_img.set_selected(self.gain_map > GAIN_THRESHOLD, 0) if ("nonoise" in self.flags): NOISE_THRESHOLD = self.noise_threshold self.cspad_img.set_selected(self.cspad_img < NOISE_THRESHOLD, 0) if ("sigma_scaling" in self.flags): self.do_sigma_scaling() if ("symnoise" in self.flags): SYMNOISE_THRESHOLD = self.symnoise_threshold self.cspad_img.set_selected((-SYMNOISE_THRESHOLD < self.cspad_img) & ( self.cspad_img < SYMNOISE_THRESHOLD), 0) if ("output" in self.flags): import pickle,os if (not os.path.isdir(self.pickle_dirname)): os.makedirs(self.pickle_dirname) flexdata = flex.int(self.cspad_img.astype(numpy.int32)) d = cspad_tbx.dpack( address=self.address, data=flexdata, timestamp=cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) ) G = open(os.path.join(".",self.pickle_dirname)+"/"+self.pickle_basename, "ab") pickle.dump(d,G,pickle.HIGHEST_PROTOCOL) G.close() if self.photon_threshold is not None and self.two_photon_threshold is not None: self.do_photon_counting() if self.background_path is not None: self.cspad_img -= self.background_img # t and self._sum_time are a two-long arrays of seconds and # milliseconds which hold time with respect to the base time. t = [t1 - t2 for (t1, t2) in zip(cspad_tbx.evt_time(evt), self._metadata['time_base'])] if self._nmemb == 0: # The peers metadata item is a bit field where a bit is set if # the partial sum from the corresponding worker process is # pending. If this is the first frame a worker process sees, # set its corresponding bit in the bit field since it will # contribute a partial sum. if env.subprocess() >= 0: self._lock.acquire() if 'peers' in self._metadata.keys(): self._metadata['peers'] |= (1 << env.subprocess()) else: self._metadata['peers'] = (1 << env.subprocess()) self._lock.release() self._sum_distance = distance self._sum_time = (t[0], t[1]) self._sum_wavelength = self.wavelength if self._have_max: self._max_img = self.cspad_img.deep_copy() if self._have_mean: self._sum_img = self.cspad_img.deep_copy() if self._have_std: self._ssq_img = flex.pow2(self.cspad_img) else: self._sum_distance += distance self._sum_time = (self._sum_time[0] + t[0], self._sum_time[1] + t[1]) self._sum_wavelength += self.wavelength if self._have_max: sel = (self.cspad_img > self._max_img).as_1d() self._max_img.as_1d().set_selected( sel, self.cspad_img.as_1d().select(sel)) if self._have_mean: self._sum_img += self.cspad_img if self._have_std: self._ssq_img += flex.pow2(self.cspad_img) self._nmemb += 1
ds = psana.DataSource(dsname) det = PyDetector(psana.Source('rayonix'),ds.env()) images=[] for ievt,evt in enumerate(ds.events()): if det.raw(evt) is not None: image = det.raw(evt) image = flex.int(image.astype(np.intc)) lambda_wavelength = 12398./(ds.env().epicsStore().value("SIOC:SYS0:ML00:AO627")) pixelSize = (170./3840.)*2. #standard 2x2 binning saturation=65535 #maybe thus the trustable value should be lower? #eventTime=evt.get(psana.EventId).time()[0]+evt.get(psana.EventId).time()[1]*1e-9 # Determine timestamp using cspad_tbx evt_time = cspad_tbx.evt_time(evt) evt_timestamp = cspad_tbx.evt_timestamp(evt_time) #this assumes that we will get the correct numbers for one position #then we will use the robot position to get relative numbers assuming #the beam centr stays about the same robX=ds.env().epicsStore().value("robot_x") robY=ds.env().epicsStore().value("robot_y") robZ=ds.env().epicsStore().value("robot_z") #beamX=602.015+(ds.env().epicsStore().value("robot_x")) #beamY=51.682+(ds.env().epicsStore().value("robot_y")) #beamDist=-563.083+60.+ (ds.env().epicsStore().value("robot_z")) beamX = 87.96 beamY = 85.57
def event(self,evt,evn): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ #import pdb; pdb.set_trace() if (evt.get("skip_event")): return # check if FEE data is one or two dimensional data = evt.get(Camera.FrameV1, self.src) if data is None: one_D = True data = evt.get(Bld.BldDataSpectrometerV1, self.src) else: one_D = False # get event timestamp timestamp = cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) # human readable format if data is None: self.nnodata +=1 #self.logger.warning("event(): No spectrum data") evt.put(skip_event_flag(),"skip_event") if timestamp is None: evt.put(skip_event_flag(),"skip_event") #self.logger.warning("event(): No TIMESTAMP, skipping shot") elif data is not None: self.nshots +=1 # get data as array and split into two half to find each peak if one_D: # filtering out outlier spikes in FEE data data = np.array(data.hproj().astype(np.float64)) for i in xrange(len(data)): if data[i]>1000000000: data[i]=data[i]-(2**32) if self.dark is not None: data = data - self.dark spectrum = data spectrum1 = data[:data.shape[0]//2] spectrum2 = data[data.shape[0]//2:] else: data = np.array(data.data16().astype(np.int32)) if self.dark is not None: data = data - self.dark data = np.double(data) data_split1 = data[:,:data.shape[1]//2] data_split2 = data[:,data.shape[1]//2:] # make a 1D trace of entire spectrum and each half to find peaks spectrum = np.sum(data,0)/data.shape[0] spectrum1 = np.sum(data_split1,0)/data_split1.shape[0] spectrum2 = np.sum(data_split2,0)/data_split2.shape[0] if not one_D: # the x-coordinate of the weighted center of peak region weighted_peak_one_positions = [] for i in xrange(self.peak_one_range_min,self.peak_one_range_max): weighted_peak_one_positions.append(spectrum[i]*i) weighted_sum_peak_one = np.sum(weighted_peak_one_positions) weighted_peak_one_center_position = weighted_sum_peak_one/np.sum(spectrum[self.peak_one_range_min:self.peak_one_range_max]) weighted_peak_two_positions = [] for i in xrange(self.peak_two_range_min,self.peak_two_range_max): weighted_peak_two_positions.append(spectrum[i]*i) weighted_sum_peak_two = np.sum(weighted_peak_two_positions) weighted_peak_two_center_position = weighted_sum_peak_two/np.sum(spectrum[self.peak_two_range_min:self.peak_two_range_max]) # normalized integrated regions between the peaks #int_left_region = np.sum(spectrum[weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2:(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)]) int_left_region = np.sum(spectrum[:weighted_peak_two_center_position/2]) #int_left_region_norm = np.sum(spectrum[weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2:(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)])/len(spectrum[weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2:(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)]) int_left_region_norm = np.sum(spectrum[:weighted_peak_two_center_position/2])/len(spectrum[:weighted_peak_two_center_position/2]) int_right_region = np.sum(spectrum[self.peak_two_range_max:]) int_right_region_norm = np.sum(spectrum[self.peak_two_range_max:])/len(spectrum[self.peak_two_range_max:]) # normalized integrated peaks int_peak_one = np.sum(spectrum[(weighted_peak_one_center_position-len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2):(weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)]) int_peak_one_norm = np.sum(spectrum[(weighted_peak_one_center_position-len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2):(weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)])/len(spectrum[(weighted_peak_one_center_position-len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2):(weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)]) int_peak_two = np.sum(spectrum[(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2):(weighted_peak_two_center_position+len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)]) int_peak_two_norm = np.sum(spectrum[(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2):(weighted_peak_two_center_position+len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)])/len(spectrum[(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2):(weighted_peak_two_center_position+len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)]) if not one_D: if int_peak_one_norm/int_peak_two_norm > self.peak_ratio: print "event(): inflection peak too high" evt.put(skip_event_flag(), "skip_event") return if int_left_region_norm > self.normalized_peak_to_noise_ratio*int_peak_two_norm: print "event(): noisy left of low energy peak" evt.put(skip_event_flag(), "skip_event") return if int_right_region_norm > self.normalized_peak_to_noise_ratio*int_peak_two_norm: print "event(): noisy right of high energy peak" evt.put(skip_event_flag(), "skip_event") return #self.logger.info("TIMESTAMP %s accepted" %timestamp) self.naccepted += 1 self.ntwo_color += 1 print "%d Remote shot" %self.ntwo_color print "%s Remote timestamp" %timestamp
def event(self, evt, evn): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ #import pdb; pdb.set_trace() if (evt.get("skip_event")): return # check if FEE data is one or two dimensional data = evt.get(Camera.FrameV1, self.src) if data is None: one_D = True data = evt.get(Bld.BldDataSpectrometerV1, self.src) else: one_D = False # get event timestamp timestamp = cspad_tbx.evt_timestamp( cspad_tbx.evt_time(evt)) # human readable format if data is None: self.nnodata += 1 #self.logger.warning("event(): No spectrum data") evt.put(skip_event_flag(), "skip_event") if timestamp is None: evt.put(skip_event_flag(), "skip_event") #self.logger.warning("event(): No TIMESTAMP, skipping shot") elif data is not None: self.nshots += 1 # get data as array and split into two half to find each peak if one_D: # filtering out outlier spikes in FEE data data = np.array(data.hproj().astype(np.float64)) for i in range(len(data)): if data[i] > 1000000000: data[i] = data[i] - (2**32) if self.dark is not None: data = data - self.dark spectrum = data spectrum1 = data[:data.shape[0] // 2] spectrum2 = data[data.shape[0] // 2:] else: data = np.array(data.data16().astype(np.int32)) if self.dark is not None: data = data - self.dark data_split1 = data[:, :data.shape[1] // 2] data_split2 = data[:, data.shape[1] // 2:] # make a 1D trace of entire spectrum and each half to find peaks spectrum = np.sum(data, 0) / data.shape[0] spectrum1 = np.sum(data_split1, 0) / data_split1.shape[0] spectrum2 = np.sum(data_split2, 0) / data_split2.shape[0] if not one_D: # the x-coordinate of the weighted center of peak region weighted_peak_one_positions = [] for i in range(self.peak_one_range_min, self.peak_one_range_max): weighted_peak_one_positions.append(spectrum[i] * i) weighted_sum_peak_one = sum(weighted_peak_one_positions) weighted_peak_one_center_position = weighted_sum_peak_one // sum( spectrum[self.peak_one_range_min:self.peak_one_range_max]) weighted_peak_two_positions = [] for i in range(self.peak_two_range_min, self.peak_two_range_max): weighted_peak_two_positions.append(spectrum[i] * i) weighted_sum_peak_two = sum(weighted_peak_two_positions) weighted_peak_two_center_position = weighted_sum_peak_two // sum( spectrum[self.peak_two_range_min:self.peak_two_range_max]) # normalized integrated regions between the peaks int_left_region_norm = np.sum(spectrum[0:( weighted_peak_one_center_position - len(spectrum[self.peak_one_range_min:self. peak_one_range_max]) / 2)]) / len(spectrum[0:( weighted_peak_one_center_position - len(spectrum[self.peak_one_range_min:self. peak_one_range_max]) / 2)]) int_middle_region_norm = np.sum( spectrum[(weighted_peak_one_center_position + len(spectrum[self.peak_one_range_min:self. peak_one_range_max]) / 2): (weighted_peak_two_center_position - len(spectrum[self.peak_two_range_min:self. peak_two_range_max]) / 2)] ) / len(spectrum[(weighted_peak_one_center_position + len(spectrum[self.peak_one_range_min:self. peak_one_range_max]) / 2): (weighted_peak_two_center_position - len(spectrum[self.peak_two_range_min:self. peak_two_range_max]) / 2)]) int_right_region_norm = np.sum(spectrum[( weighted_peak_two_center_position + len(spectrum[self.peak_two_range_min:self. peak_two_range_max]) / 2):]) / len(spectrum[( weighted_peak_two_center_position + len(spectrum[self.peak_two_range_min:self. peak_two_range_max]) / 2):]) # normalized integrated peaks int_peak_one_norm = np.sum( spectrum[(weighted_peak_one_center_position - len(spectrum[self.peak_one_range_min:self. peak_one_range_max]) / 2): (weighted_peak_one_center_position + len(spectrum[self.peak_one_range_min:self. peak_one_range_max]) / 2)] ) / len(spectrum[(weighted_peak_one_center_position - len(spectrum[self.peak_one_range_min:self. peak_one_range_max]) / 2): (weighted_peak_one_center_position + len(spectrum[self.peak_one_range_min:self. peak_one_range_max]) / 2)]) int_peak_two_norm = np.sum( spectrum[(weighted_peak_two_center_position - len(spectrum[self.peak_two_range_min:self. peak_two_range_max]) / 2): (weighted_peak_two_center_position + len(spectrum[self.peak_two_range_min:self. peak_two_range_max]) / 2)] ) / len(spectrum[(weighted_peak_two_center_position - len(spectrum[self.peak_two_range_min:self. peak_two_range_max]) / 2): (weighted_peak_two_center_position + len(spectrum[self.peak_two_range_min:self. peak_two_range_max]) / 2)]) else: # convert eV range of iron edge (7112 eV) to pixels: metal_edge_max = (self.forbidden_range_eV / 2.) / 0.059 + 738 metal_edge_min = (-self.forbidden_range_eV / 2.) / 0.059 + 738 int_metal_region = np.sum( spectrum[metal_edge_min:metal_edge_max]) #peak one region integrate: int_peak_one = np.sum(spectrum[0:metal_edge_min]) int_peak_two = np.sum(spectrum[metal_edge_max:]) # now to do the filtering if not one_D: if min(int_peak_one_norm, int_peak_two_norm) / max( int_peak_one_norm, int_peak_two_norm) < self.peak_ratio: print("event(): too low") evt.put(skip_event_flag(), "skip_event") return if (np.argmax(spectrum2) + len(spectrum2)) > ( weighted_peak_two_center_position + (len(spectrum[self.peak_two_range_min:self. peak_two_range_max]) / 2)) or (np.argmax(spectrum2) + len(spectrum2)) < ( weighted_peak_two_center_position - (len(spectrum[self.peak_two_range_min:self. peak_two_range_max]) / 2)): print("event(): out of range high energy peak") evt.put(skip_event_flag(), "skip_event") return if np.argmax(spectrum1) > ( weighted_peak_one_center_position + (len(spectrum[self.peak_one_range_min:self. peak_one_range_max]) / 2)) or np.argmax(spectrum1) < ( weighted_peak_one_center_position - (len(spectrum[self.peak_one_range_min:self. peak_one_range_max]) / 2)): print("event(): out of range low energy peak") evt.put(skip_event_flag(), "skip_event") return if int_left_region_norm / int_peak_one_norm > self.normalized_peak_to_noise_ratio: print("event(): noisy left of low energy peak") evt.put(skip_event_flag(), "skip_event") return if int_middle_region_norm / int_peak_one_norm > self.normalized_peak_to_noise_ratio: print("event(): noisy middle") evt.put(skip_event_flag(), "skip_event") return if int_middle_region_norm / int_peak_one_norm > self.normalized_peak_to_noise_ratio: print("event(): noisy middle") evt.put(skip_event_flag(), "skip_event") return if int_right_region_norm / int_peak_two_norm > self.normalized_peak_to_noise_ratio: print("event(): noisy right of high energy peak") evt.put(skip_event_flag(), "skip_event") return else: # filter for cxih8015 #iron edge at 738 pixels on FFE detetor if int_metal_region >= 0.10 * int_peak_two: print("event(): high intensity at metal edge") evt.put(skip_event_flag(), "skip_event") return if int_metal_region >= 0.10 * int_peak_one: print("event(): high intensity at metal edge") evt.put(skip_event_flag(), "skip_event") return if min(int_peak_one, int_peak_two) / max( int_peak_one, int_peak_two) < self.peak_ratio: print("event(): peak ratio too low") evt.put(skip_event_flag(), "skip_event") return #self.logger.info("TIMESTAMP %s accepted" %timestamp) self.naccepted += 1 self.ntwo_color += 1 print("%d Two Color shots" % self.ntwo_color)
def event(self, evt, env): from dxtbx.format.Registry import Registry from os.path import exists from time import sleep # Nop if there is no image. For experiments configured to have # exactly one event per calibration cycle, this should never # happen. if self._path is None: evt.put(skip_event_flag(), "skip_event") return # Skip this event if the template isn't in the path if self._template is not None and not True in [t in self._path for t in self._template.split(',')]: evt.put(skip_event_flag(), "skip_event") return if "phi" in self._path: evt.put(skip_event_flag(), "skip_event") return # Wait for the image to appear in the file system, probing for it # at exponentially increasing delays. t = 1 t_tot = 0 if not exists(self._path): self._logger.info("Waiting for path %s"%self._path) while not exists(self._path): if t_tot > 1: self._logger.info("Timeout waiting for path %s"%self._path) evt.put(skip_event_flag(), "skip_event") self._logger.info("Image not found: %s"%self._path) return sleep(t) t_tot += t t *= 2 # Find a matching Format object and instantiate it using the # given path. If the Format object does not understand the image, # try determining a new format. XXX Emits "Couldn't create a # detector model for this image". if self._fmt is None: self._fmt = Registry.find(self._path) if self._fmt is None: evt.put(skip_event_flag(), "skip_event") return img = self._fmt(self._path) if img is None: self._fmt = Registry.find(self._path) if self._fmt is None: evt.put(skip_event_flag(), "skip_event") return img = self._fmt(self._path) if img is None: evt.put(skip_event_flag(), "skip_event") return self._logger.info( "Reading %s using %s" % (self._path, self._fmt.__name__)) # Get the raw image data and convert to double precision floating # point array. XXX Why will img.get_raw_data() not work, like it # does in print_header? db = img.get_detectorbase() db.readHeader() db.read() data = db.get_raw_data().as_double() # Get the pixel size and store it for common_mode.py detector = img.get_detector()[0] ps = detector.get_pixel_size() assert ps[0] == ps[1] pixel_size = ps[0] evt.put(ps[0],"marccd_pixel_size") evt.put(detector.get_trusted_range()[1],"marccd_saturated_value") evt.put(detector.get_distance(),"marccd_distance") # If the beam center isn't provided in the config file, get it from the # image. It will probably be wrong. if self._beam_x is None or self._beam_y is None: self._beam_x, self._beam_y = detector.get_beam_centre_px(img.get_beam().get_s0()) self._beam_x = int(round(self._beam_x)) self._beam_y = int(round(self._beam_y)) # Crop the data so that the beam center is in the center of the image maxy, maxx = data.focus() minsize = min([self._beam_x,self._beam_y,maxx-self._beam_x,maxy-self._beam_y]) data = data[self._beam_y-minsize:self._beam_y+minsize,self._beam_x-minsize:self._beam_x+minsize] evt.put((minsize,minsize),"marccd_beam_center") evt.put(flex.int([0,0,minsize*2,minsize*2]),"marccd_active_areas") # Store the image in the event. evt.put(data, self._address) # Store the .mmcd file name in the event evt.put(self._mccd_name, "mccd_name") evt_time = cspad_tbx.evt_time(evt) # tuple of seconds, milliseconds timestamp = cspad_tbx.evt_timestamp(evt_time) # human readable format self._logger.info("converted %s to pickle with timestamp %s" %(self._path, timestamp)) # This should not be necessary as the machine is configured with # one event per calibration cycle. self._path = None
def event(self,evt,evn): """The event() function puts a "skip_event" object with value @c True into the event if the shot is to be skipped. @param evt Event data object, a configure object @param env Environment object """ #import pdb; pdb.set_trace() if (evt.get("skip_event")): return # check if FEE data is one or two dimensional data = evt.get(Camera.FrameV1, self.src) if data is None: one_D = True data = evt.get(Bld.BldDataSpectrometerV1, self.src) else: one_D = False # get event timestamp timestamp = cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) # human readable format if data is None: self.nnodata +=1 #self.logger.warning("event(): No spectrum data") evt.put(skip_event_flag(),"skip_event") if timestamp is None: evt.put(skip_event_flag(),"skip_event") #self.logger.warning("event(): No TIMESTAMP, skipping shot") elif data is not None: self.nshots +=1 # get data as array and split into two half to find each peak if one_D: # filtering out outlier spikes in FEE data data = np.array(data.hproj().astype(np.float64)) for i in xrange(len(data)): if data[i]>1000000000: data[i]=data[i]-(2**32) if self.dark is not None: data = data - self.dark spectrum = data spectrum1 = data[:data.shape[0]//2] spectrum2 = data[data.shape[0]//2:] else: data = np.array(data.data16().astype(np.int32)) if self.dark is not None: data = data - self.dark data_split1 = data[:,:data.shape[1]//2] data_split2 = data[:,data.shape[1]//2:] # make a 1D trace of entire spectrum and each half to find peaks spectrum = np.sum(data,0)/data.shape[0] spectrum1 = np.sum(data_split1,0)/data_split1.shape[0] spectrum2 = np.sum(data_split2,0)/data_split2.shape[0] if not one_D: # the x-coordinate of the weighted center of peak region weighted_peak_one_positions = [] for i in xrange(self.peak_one_range_min,self.peak_one_range_max): weighted_peak_one_positions.append(spectrum[i]*i) weighted_sum_peak_one = sum(weighted_peak_one_positions) weighted_peak_one_center_position = weighted_sum_peak_one//sum(spectrum[self.peak_one_range_min:self.peak_one_range_max]) weighted_peak_two_positions = [] for i in xrange(self.peak_two_range_min,self.peak_two_range_max): weighted_peak_two_positions.append(spectrum[i]*i) weighted_sum_peak_two = sum(weighted_peak_two_positions) weighted_peak_two_center_position = weighted_sum_peak_two//sum(spectrum[self.peak_two_range_min:self.peak_two_range_max]) # normalized integrated regions between the peaks int_left_region_norm = np.sum(spectrum[0:(weighted_peak_one_center_position-len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)])/len(spectrum[0:(weighted_peak_one_center_position-len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)]) int_middle_region_norm = np.sum(spectrum[(weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2):(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)])/len(spectrum[(weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2):(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)]) int_right_region_norm = np.sum(spectrum[(weighted_peak_two_center_position+len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2):])/len(spectrum[(weighted_peak_two_center_position+len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2):]) # normalized integrated peaks int_peak_one_norm = np.sum(spectrum[(weighted_peak_one_center_position-len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2):(weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)])/len(spectrum[(weighted_peak_one_center_position-len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2):(weighted_peak_one_center_position+len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)]) int_peak_two_norm = np.sum(spectrum[(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2):(weighted_peak_two_center_position+len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)])/len(spectrum[(weighted_peak_two_center_position-len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2):(weighted_peak_two_center_position+len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)]) else: # convert eV range of iron edge (7112 eV) to pixels: metal_edge_max=(self.forbidden_range_eV/2.)/0.059+738 metal_edge_min=(-self.forbidden_range_eV/2.)/0.059+738 int_metal_region = np.sum(spectrum[metal_edge_min:metal_edge_max]) #peak one region integrate: int_peak_one=np.sum(spectrum[0:metal_edge_min]) int_peak_two=np.sum(spectrum[metal_edge_max:]) # now to do the filtering if not one_D: if min(int_peak_one_norm,int_peak_two_norm)/max(int_peak_one_norm,int_peak_two_norm) < self.peak_ratio: print "event(): too low" evt.put(skip_event_flag(), "skip_event") return if (np.argmax(spectrum2)+len(spectrum2)) > (weighted_peak_two_center_position+(len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)) or (np.argmax(spectrum2)+len(spectrum2)) < (weighted_peak_two_center_position-(len(spectrum[self.peak_two_range_min:self.peak_two_range_max])/2)): print "event(): out of range high energy peak" evt.put(skip_event_flag(), "skip_event") return if np.argmax(spectrum1) > (weighted_peak_one_center_position+(len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)) or np.argmax(spectrum1) < (weighted_peak_one_center_position-(len(spectrum[self.peak_one_range_min:self.peak_one_range_max])/2)): print "event(): out of range low energy peak" evt.put(skip_event_flag(), "skip_event") return if int_left_region_norm/int_peak_one_norm > self.normalized_peak_to_noise_ratio: print "event(): noisy left of low energy peak" evt.put(skip_event_flag(), "skip_event") return if int_middle_region_norm/int_peak_one_norm > self.normalized_peak_to_noise_ratio: print "event(): noisy middle" evt.put(skip_event_flag(), "skip_event") return if int_middle_region_norm/int_peak_one_norm > self.normalized_peak_to_noise_ratio: print "event(): noisy middle" evt.put(skip_event_flag(), "skip_event") return if int_right_region_norm/int_peak_two_norm > self.normalized_peak_to_noise_ratio: print "event(): noisy right of high energy peak" evt.put(skip_event_flag(), "skip_event") return else: # filter for cxih8015 #iron edge at 738 pixels on FFE detetor if int_metal_region>=0.10*int_peak_two: print "event(): high intensity at metal edge" evt.put(skip_event_flag(), "skip_event") return if int_metal_region>=0.10*int_peak_one: print "event(): high intensity at metal edge" evt.put(skip_event_flag(), "skip_event") return if min(int_peak_one,int_peak_two)/max(int_peak_one,int_peak_two) < self.peak_ratio: print "event(): peak ratio too low" evt.put(skip_event_flag(), "skip_event") return #self.logger.info("TIMESTAMP %s accepted" %timestamp) self.naccepted += 1 self.ntwo_color += 1 print "%d Two Color shots" %self.ntwo_color
def run(args): user_phil = [] for arg in args: if os.path.isfile(arg): try: user_phil.append(parse(file_name=arg)) except Exception as e: print(str(e)) raise Sorry("Couldn't parse phil file %s" % arg) else: try: user_phil.append(parse(arg)) except Exception as e: print(str(e)) raise Sorry("Couldn't parse argument %s" % arg) params = phil_scope.fetch(sources=user_phil).extract() # cxid9114, source fee: FeeHxSpectrometer.0:Opal1000.1, downstream: CxiDg3.0:Opal1000.0 # cxig3614, source fee: FeeHxSpectrometer.0:OrcaFl40.0 src = psana.Source(params.spectra_filter.detector_address) dataset_name = "exp=%s:run=%s:idx" % (params.experiment, params.runs) print("Dataset string:", dataset_name) ds = psana.DataSource(dataset_name) spf = spectra_filter(params) if params.selected_filter == None: filter_name = params.spectra_filter.filter[0].name else: filter_name = params.selected_filter rank = 0 size = 1 max_events = sys.maxsize for run in ds.runs(): print("starting run", run.run()) # list of all events times = run.times() if params.skip_events is not None: times = times[params.skip_events:] nevents = min(len(times), max_events) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [times[i] for i in range(nevents) if (i + rank) % size == 0] for i, t in enumerate(mytimes): evt = run.event(t) accepted, data, spectrum, dc_offset, all_data, all_data_raw = spf.filter_event( evt, filter_name) if not accepted: continue print(cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)), "Publishing data for event", i) #header = "Event %d, m/f: %7.7f, f: %d"%(i, peak_max/flux, flux) header = "Event %d" % (i) if rank == 0: fee = Image(header, "FEE", data) # make a 2D plot publish.send("FEE", fee) # send to the display spectrumplot = XYPlot(header, 'summed 1D trace', list(range(data.shape[1])), spectrum) # make a 1D plot publish.send("SPECTRUM", spectrumplot) # send to the display fee = Image(header, "DC_OFFSET", dc_offset) # make a 2D plot publish.send("DC_OFFSET", fee) # send to the display fee = Image(header, "ALL_FEE", all_data) # make a 2D plot publish.send("ALL_FEE", fee) # send to the display fee = Image(header, "ALL_FEE_RAW", all_data_raw) # make a 2D plot publish.send("ALL_FEE_RAW", fee) # send to the display
def event(self, evt, env): """The event() function is called for every L1Accept transition. XXX Since the viewer is now running in a parallel process, the averaging here is now the bottleneck. @param evt Event data object, a configure object @param env Environment object """ from pyana.event import Event self.n_shots += 1 super(mod_view, self).event(evt, env) if evt.status() != Event.Normal or evt.get('skip_event'): # XXX transition return # Get the distance for the detectors that should have it, and set # it to NaN for those that should not. if self.detector == 'CxiDs1' or \ self.detector == 'CxiDsd' or \ self.detector == 'XppGon': distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return else: distance = float('nan') if not self._proc.is_alive(): evt.setStatus(Event.Stop) # Early return if the next update to the viewer is more than # self.ncollate shots away. XXX Since the common_mode.event() # function does quite a bit of processing, the savings are # probably not so big. next_update = (self.nupdate - 1) - (self.nshots - 1) % self.nupdate if (self.ncollate > 0 and next_update >= self.ncollate): return if self.sigma_scaling: self.do_sigma_scaling() if self.photon_counting: self.do_photon_counting() # Trim the disabled section from the Sc1 detector image. XXX This # is a bit of a kludge, really. # if (self.address == "CxiSc1-0|Cspad2x2-0"): # self.cspad_img = self.cspad_img[185:2 * 185, :] # Update the sum of the valid images, starting a new collation if # appropriate. This guarantees self.nvalid > 0. if (self.nvalid == 0 or self.ncollate > 0 and self.nvalid >= self.ncollate): self.img_sum = self.cspad_img self.nvalid = 1 else: self.img_sum += self.cspad_img self.nvalid += 1 # Update the viewer to display the current average image, and # start a new collation, if appropriate. if (next_update == 0): from time import localtime, strftime time_str = strftime("%H:%M:%S", localtime(evt.getTime().seconds())) title = "r%04d@%s: average of %d last images on %s" \ % (evt.run(), time_str, self.nvalid, self.address) # See also mod_average.py. device = cspad_tbx.address_split(self.address)[2] if device == 'Cspad': beam_center = self.beam_center pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': beam_center = tuple(t // 2 for t in self.img_sum.focus()) pixel_size = 0.079346 saturated_value = 2**16 - 1 # Wait for the viewer process to empty the queue before feeding # it a new image, and ensure not to hang if the viewer process # exits. Because of multithreading/multiprocessing semantics, # self._queue.empty() is unreliable. fmt = _Format(BEAM_CENTER=beam_center, DATA=self.img_sum / self.nvalid, DETECTOR_ADDRESS=self.address, DISTANCE=distance, PIXEL_SIZE=pixel_size, SATURATED_VALUE=saturated_value, TIME_TUPLE=cspad_tbx.evt_time(evt), WAVELENGTH=self.wavelength) while not self._queue.empty(): if not self._proc.is_alive(): evt.setStatus(Event.Stop) return while True: try: self._queue.put((fmt, title), timeout=1) break except Exception: pass if (self.ncollate > 0): self.nvalid = 0
def event(self, evt, env): """The event() function is called for every L1Accept transition. XXX Since the viewer is now running in a parallel process, the averaging here is now the bottleneck. @param evt Event data object, a configure object @param env Environment object """ from pyana.event import Event self.n_shots += 1 super(mod_view, self).event(evt, env) if evt.status() != Event.Normal or evt.get( 'skip_event'): # XXX transition return # Get the distance for the detectors that should have it, and set # it to NaN for those that should not. if self.detector == 'CxiDs1' or \ self.detector == 'CxiDsd' or \ self.detector == 'XppGon': distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return else: distance = float('nan') if not self._proc.is_alive(): evt.setStatus(Event.Stop) # Early return if the next update to the viewer is more than # self.ncollate shots away. XXX Since the common_mode.event() # function does quite a bit of processing, the savings are # probably not so big. next_update = (self.nupdate - 1) - (self.nshots - 1) % self.nupdate if (self.ncollate > 0 and next_update >= self.ncollate): return if self.sigma_scaling: self.do_sigma_scaling() if self.photon_counting: self.do_photon_counting() # Trim the disabled section from the Sc1 detector image. XXX This # is a bit of a kludge, really. # if (self.address == "CxiSc1-0|Cspad2x2-0"): # self.cspad_img = self.cspad_img[185:2 * 185, :] # Update the sum of the valid images, starting a new collation if # appropriate. This guarantees self.nvalid > 0. if (self.nvalid == 0 or self.ncollate > 0 and self.nvalid >= self.ncollate): self.img_sum = self.cspad_img self.nvalid = 1 else: self.img_sum += self.cspad_img self.nvalid += 1 # Update the viewer to display the current average image, and # start a new collation, if appropriate. if (next_update == 0): from time import localtime, strftime time_str = strftime("%H:%M:%S", localtime(evt.getTime().seconds())) title = "r%04d@%s: average of %d last images on %s" \ % (evt.run(), time_str, self.nvalid, self.address) # See also mod_average.py. device = cspad_tbx.address_split(self.address)[2] if device == 'Cspad': beam_center = self.beam_center pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': beam_center = tuple(t // 2 for t in self.img_sum.focus()) pixel_size = 0.079346 saturated_value = 2**16 - 1 # Wait for the viewer process to empty the queue before feeding # it a new image, and ensure not to hang if the viewer process # exits. Because of multithreading/multiprocessing semantics, # self._queue.empty() is unreliable. fmt = _Format(BEAM_CENTER=beam_center, DATA=self.img_sum / self.nvalid, DETECTOR_ADDRESS=self.address, DISTANCE=distance, PIXEL_SIZE=pixel_size, SATURATED_VALUE=saturated_value, TIME_TUPLE=cspad_tbx.evt_time(evt), WAVELENGTH=self.wavelength) while not self._queue.empty(): if not self._proc.is_alive(): evt.setStatus(Event.Stop) return while True: try: self._queue.put((fmt, title), timeout=1) break except Exception: pass if (self.ncollate > 0): self.nvalid = 0
def run(self): """ Process all images assigned to this thread """ params, options = self.parser.parse_args(show_diff_phil=True) if params.input.experiment is None or \ params.input.run_num is None or \ params.input.address is None: raise Usage(self.usage) if params.format.file_format == "cbf": if params.format.cbf.detz_offset is None: raise Usage(self.usage) elif params.format.file_format == "pickle": if params.input.cfg is None: raise Usage(self.usage) else: raise Usage(self.usage) if not os.path.exists(params.output.output_dir): raise Sorry("Output path not found:" + params.output.output_dir) #Environment variable redirect for CBFLib temporary CBF_TMP_XYZ file output if params.format.file_format == "cbf": if params.output.tmp_output_dir is None: tmp_dir = os.path.join(params.output.output_dir, '.tmp') else: tmp_dir = os.path.join(params.output.tmp_output_dir, '.tmp') if not os.path.exists(tmp_dir): with show_mail_on_error(): try: os.makedirs(tmp_dir) # Can fail if running multiprocessed - that's OK if the folder was created except OSError as e: # In Python 2, a FileExistsError is just an OSError if e.errno != errno.EEXIST: # If this OSError is not a FileExistsError raise os.environ['CBF_TMP_DIR'] = tmp_dir # Save the paramters self.params = params self.options = options from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.Get_rank( ) # each process in MPI has a unique id, 0-indexed size = comm.Get_size() # size: number of processes running in this job # set up psana if params.input.cfg is not None: psana.setConfigFile(params.input.cfg) if params.input.calib_dir is not None: psana.setOption('psana.calib-dir', params.input.calib_dir) dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment, params.input.run_num) if params.input.xtc_dir is not None: dataset_name = "exp=%s:run=%s:idx:dir=%s" % ( params.input.experiment, params.input.run_num, params.input.xtc_dir) ds = psana.DataSource(dataset_name) if params.format.file_format == "cbf": src = psana.Source('DetInfo(%s)' % params.input.address) psana_det = psana.Detector(params.input.address, ds.env()) # set this to sys.maxint to analyze all events if params.dispatch.max_events is None: max_events = sys.maxsize else: max_events = params.dispatch.max_events for run in ds.runs(): if params.format.file_format == "cbf": if params.format.cbf.mode == "cspad": # load a header only cspad cbf from the slac metrology base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology( run, params.input.address) if base_dxtbx is None: raise Sorry( "Couldn't load calibration file for run %d" % run.run()) elif params.format.cbf.mode == "rayonix": # load a header only rayonix cbf from the input parameters detector_size = rayonix_tbx.get_rayonix_detector_dimensions( ds.env()) base_dxtbx = rayonix_tbx.get_dxtbx_from_params( params.format.cbf.rayonix, detector_size) # list of all events times = run.times() if params.dispatch.selected_events: times = [ t for t in times if cspad_tbx.evt_timestamp((t.seconds(), t.nanoseconds() / 1e6)) in params.input.timestamp ] nevents = min(len(times), max_events) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [ times[i] for i in range(nevents) if (i + rank) % size == 0 ] for i in range(len(mytimes)): evt = run.event(mytimes[i]) id = evt.get(psana.EventId) print("Event #", i, " has id:", id) timestamp = cspad_tbx.evt_timestamp( cspad_tbx.evt_time(evt)) # human readable format if timestamp is None: print("No timestamp, skipping shot") continue if evt.get("skip_event") or "skip_event" in [ key.key() for key in evt.keys() ]: print("Skipping event", timestamp) continue t = timestamp s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[ 17:19] + t[20:23] print("Processing shot", s) if params.format.file_format == "pickle": if evt.get("skip_event"): print("Skipping event", id) continue # the data needs to have already been processed and put into the event by psana data = evt.get(params.format.pickle.out_key) if data is None: print("No data") continue # set output paths according to the templates path = os.path.join(params.output.output_dir, "shot-" + s + ".pickle") print("Saving", path) easy_pickle.dump(path, data) elif params.format.file_format == "cbf": if params.format.cbf.mode == "cspad": # get numpy array, 32x185x388 data = cspad_cbf_tbx.get_psana_corrected_data( psana_det, evt, use_default=False, dark=True, common_mode=None, apply_gain_mask=params.format.cbf.cspad. gain_mask_value is not None, gain_mask_value=params.format.cbf.cspad. gain_mask_value, per_pixel_gain=False) distance = cspad_tbx.env_distance( params.input.address, run.env(), params.format.cbf.detz_offset) elif params.format.cbf.mode == "rayonix": data = rayonix_tbx.get_data_from_psana_event( evt, params.input.address) distance = params.format.cbf.detz_offset if distance is None: print("No distance, skipping shot") continue if self.params.format.cbf.override_energy is None: wavelength = cspad_tbx.evt_wavelength(evt) if wavelength is None: print("No wavelength, skipping shot") continue else: wavelength = 12398.4187 / self.params.format.cbf.override_energy # stitch together the header, data and metadata into the final dxtbx format object if params.format.cbf.mode == "cspad": image = cspad_cbf_tbx.format_object_from_data( base_dxtbx, data, distance, wavelength, timestamp, params.input.address, round_to_int=False) elif params.format.cbf.mode == "rayonix": image = rayonix_tbx.format_object_from_data( base_dxtbx, data, distance, wavelength, timestamp, params.input.address) path = os.path.join(params.output.output_dir, "shot-" + s + ".cbf") print("Saving", path) # write the file import pycbf image._cbf_handle.write_widefile(path.encode(), pycbf.CBF,\ pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0) run.end() ds.end()
def run(self): """ Process all images assigned to this thread """ params, options = self.parser.parse_args(show_diff_phil=True) if params.input.experiment is None or \ params.input.run_num is None or \ params.input.address is None: raise Usage(self.usage) if params.format.file_format == "cbf": if params.format.cbf.detz_offset is None: raise Usage(self.usage) elif params.format.file_format == "pickle": if params.format.pickle.cfg is None: raise Usage(self.usage) else: raise Usage(self.usage) if not os.path.exists(params.output.output_dir): raise Sorry("Output path not found:" + params.output.output_dir) # Save the paramters self.params = params self.options = options from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.Get_rank( ) # each process in MPI has a unique id, 0-indexed size = comm.Get_size() # size: number of processes running in this job # set up psana if params.format.file_format == "pickle": psana.setConfigFile(params.format.pickle.cfg) dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment, params.input.run_num) ds = psana.DataSource(dataset_name) if params.format.file_format == "cbf": src = psana.Source('DetInfo(%s)' % params.input.address) psana_det = psana.Detector(params.input.address, ds.env()) # set this to sys.maxint to analyze all events if params.dispatch.max_events is None: max_events = sys.maxint else: max_events = params.dispatch.max_events for run in ds.runs(): if params.format.file_format == "cbf": # load a header only cspad cbf from the slac metrology base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology( run, params.input.address) if base_dxtbx is None: raise Sorry("Couldn't load calibration file for run %d" % run.run()) # list of all events times = run.times() nevents = min(len(times), max_events) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [ times[i] for i in xrange(nevents) if (i + rank) % size == 0 ] for i in xrange(len(mytimes)): evt = run.event(mytimes[i]) id = evt.get(psana.EventId) print "Event #", i, " has id:", id timestamp = cspad_tbx.evt_timestamp( cspad_tbx.evt_time(evt)) # human readable format if timestamp is None: print "No timestamp, skipping shot" continue t = timestamp s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[ 17:19] + t[20:23] print "Processing shot", s if params.format.file_format == "pickle": if evt.get("skip_event"): print "Skipping event", id continue # the data needs to have already been processed and put into the event by psana data = evt.get(params.format.pickle.out_key) if data is None: print "No data" continue # set output paths according to the templates path = os.path.join(params.output.output_dir, "shot-" + s + ".pickle") print "Saving", path easy_pickle.dump(path, data) elif params.format.file_format == "cbf": # get numpy array, 32x185x388 data = cspad_cbf_tbx.get_psana_corrected_data( psana_det, evt, use_default=False, dark=True, common_mode=None, apply_gain_mask=params.format.cbf.gain_mask_value is not None, gain_mask_value=params.format.cbf.gain_mask_value, per_pixel_gain=False) distance = cspad_tbx.env_distance( params.input.address, run.env(), params.format.cbf.detz_offset) if distance is None: print "No distance, skipping shot" continue if self.params.format.cbf.override_energy is None: wavelength = cspad_tbx.evt_wavelength(evt) if wavelength is None: print "No wavelength, skipping shot" continue else: wavelength = 12398.4187 / self.params.format.cbf.override_energy # stitch together the header, data and metadata into the final dxtbx format object cspad_img = cspad_cbf_tbx.format_object_from_data( base_dxtbx, data, distance, wavelength, timestamp, params.input.address) path = os.path.join(params.output.output_dir, "shot-" + s + ".cbf") print "Saving", path # write the file import pycbf cspad_img._cbf_handle.write_widefile(path, pycbf.CBF,\ pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0) run.end() ds.end()
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ # Increase the event counter, even if this event is to be skipped. self.nshots += 1 if (evt.get("skip_event")): return sifoil = cspad_tbx.env_sifoil(env) if self.check_beam_status and sifoil is None: self.nfail += 1 self.logger.warning("event(): no Si-foil thickness, shot skipped") evt.put(skip_event_flag(), "skip_event") return if (self.sifoil is not None and self.sifoil != sifoil): self.logger.warning( "event(): Si-foil changed mid-run: % 8i -> % 8d" % (self.sifoil, sifoil)) self.sifoil = sifoil if self.verbose: self.logger.info("Si-foil thickness: %i" % sifoil) self.evt_time = cspad_tbx.evt_time( evt) # tuple of seconds, milliseconds self.timestamp = cspad_tbx.evt_timestamp( self.evt_time) # human readable format if (self.timestamp is None): self.nfail += 1 self.logger.warning("event(): no timestamp, shot skipped") evt.put(skip_event_flag(), "skip_event") return if self.verbose: self.logger.info(self.timestamp) if self.override_energy is None: self.wavelength = cspad_tbx.evt_wavelength(evt, self.delta_k) if self.wavelength is None: if self.check_beam_status: self.nfail += 1 self.logger.warning("event(): no wavelength, shot skipped") evt.put(skip_event_flag(), "skip_event") return else: self.wavelength = 0 else: self.wavelength = 12398.4187 / self.override_energy if self.verbose: self.logger.info("Wavelength: %.4f" % self.wavelength) self.pulse_length = cspad_tbx.evt_pulse_length(evt) if self.pulse_length is None: if self.check_beam_status: self.nfail += 1 self.logger.warning("event(): no pulse length, shot skipped") evt.put(skip_event_flag(), "skip_event") return else: self.pulse_length = 0 if self.verbose: self.logger.info("Pulse length: %s" % self.pulse_length) self.beam_charge = cspad_tbx.evt_beam_charge(evt) if self.beam_charge is None: if self.check_beam_status: self.nfail += 1 self.logger.warning("event(): no beam charge, shot skipped") evt.put(skip_event_flag(), "skip_event") return else: self.beam_charge = 0 if self.verbose: self.logger.info("Beam charge: %s" % self.beam_charge) self.injector_xyz = cspad_tbx.env_injector_xyz(env) #if self.injector_xyz is not None: #self.logger.info("injector_z: %i" %self.injector_xyz[2].value) self.laser_1_status.set_status( cspad_tbx.env_laser_status(env, laser_id=1), self.evt_time) self.laser_4_status.set_status( cspad_tbx.env_laser_status(env, laser_id=4), self.evt_time) self.laser_1_ms_since_change = self.laser_1_status.ms_since_last_status_change( self.evt_time) self.laser_4_ms_since_change = self.laser_4_status.ms_since_last_status_change( self.evt_time) if self.verbose: if self.laser_1_ms_since_change is not None: self.logger.info("ms since laser 1 status change: %i" % self.laser_1_ms_since_change) if self.laser_4_ms_since_change is not None: self.logger.info("ms since laser 4 status change: %i" % self.laser_4_ms_since_change) if self.laser_1_status is not None and self.laser_1_status.status is not None: self.logger.info("Laser 1 status: %i" % int(self.laser_1_status.status)) if self.laser_4_status is not None and self.laser_4_status.status is not None: self.logger.info("Laser 4 status: %i" % int(self.laser_4_status.status))
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ # Increase the event counter, even if this event is to be skipped. self.nshots += 1 if (evt.get("skip_event")): return sifoil = cspad_tbx.env_sifoil(env) if self.check_beam_status and sifoil is None: self.nfail += 1 self.logger.warning("event(): no Si-foil thickness, shot skipped") evt.put(skip_event_flag(), "skip_event") return if (self.sifoil is not None and self.sifoil != sifoil): self.logger.warning("event(): Si-foil changed mid-run: % 8i -> % 8d" % (self.sifoil, sifoil)) self.sifoil = sifoil if self.verbose: self.logger.info("Si-foil thickness: %i" %sifoil) self.evt_time = cspad_tbx.evt_time(evt) # tuple of seconds, milliseconds self.timestamp = cspad_tbx.evt_timestamp(self.evt_time) # human readable format if (self.timestamp is None): self.nfail += 1 self.logger.warning("event(): no timestamp, shot skipped") evt.put(skip_event_flag(), "skip_event") return if self.verbose: self.logger.info(self.timestamp) if self.override_energy is None: self.wavelength = cspad_tbx.evt_wavelength(evt, self.delta_k) if self.wavelength is None: if self.check_beam_status: self.nfail += 1 self.logger.warning("event(): no wavelength, shot skipped") evt.put(skip_event_flag(), "skip_event") return else: self.wavelength = 0 else: self.wavelength = 12398.4187/self.override_energy if self.verbose: self.logger.info("Wavelength: %.4f" %self.wavelength) self.pulse_length = cspad_tbx.evt_pulse_length(evt) if self.pulse_length is None: if self.check_beam_status: self.nfail += 1 self.logger.warning("event(): no pulse length, shot skipped") evt.put(skip_event_flag(), "skip_event") return else: self.pulse_length = 0 if self.verbose: self.logger.info("Pulse length: %s" %self.pulse_length) self.beam_charge = cspad_tbx.evt_beam_charge(evt) if self.beam_charge is None: if self.check_beam_status: self.nfail += 1 self.logger.warning("event(): no beam charge, shot skipped") evt.put(skip_event_flag(), "skip_event") return else: self.beam_charge = 0 if self.verbose: self.logger.info("Beam charge: %s" %self.beam_charge) self.injector_xyz = cspad_tbx.env_injector_xyz(env) #if self.injector_xyz is not None: #self.logger.info("injector_z: %i" %self.injector_xyz[2].value) self.laser_1_status.set_status(cspad_tbx.env_laser_status(env, laser_id=1), self.evt_time) self.laser_4_status.set_status(cspad_tbx.env_laser_status(env, laser_id=4), self.evt_time) self.laser_1_ms_since_change = self.laser_1_status.ms_since_last_status_change(self.evt_time) self.laser_4_ms_since_change = self.laser_4_status.ms_since_last_status_change(self.evt_time) if self.verbose: if self.laser_1_ms_since_change is not None: self.logger.info("ms since laser 1 status change: %i" %self.laser_1_ms_since_change) if self.laser_4_ms_since_change is not None: self.logger.info("ms since laser 4 status change: %i" %self.laser_4_ms_since_change) if self.laser_1_status is not None and self.laser_1_status.status is not None: self.logger.info("Laser 1 status: %i" %int(self.laser_1_status.status)) if self.laser_4_status is not None and self.laser_4_status.status is not None: self.logger.info("Laser 4 status: %i" %int(self.laser_4_status.status))
times = times[params.skip_events:] nevents = min(len(times),max_events) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [times[i] for i in xrange(nevents) if (i+rank)%size == 0] for i, t in enumerate(mytimes): evt = run.event(t) accepted, data, spectrum, dc_offset, all_data, all_data_raw = spf.filter_event(evt, filter_name) if not accepted: continue print cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)), "Publishing data for event", i #header = "Event %d, m/f: %7.7f, f: %d"%(i, peak_max/flux, flux) header = "Event %d"%(i) if rank == 0: fee = Image(header, "FEE", data) # make a 2D plot publish.send("FEE", fee) # send to the display spectrumplot = XYPlot(header, 'summed 1D trace', range(data.shape[1]), spectrum) # make a 1D plot publish.send("SPECTRUM", spectrumplot) # send to the display fee = Image(header, "DC_OFFSET", dc_offset) # make a 2D plot publish.send("DC_OFFSET", fee) # send to the display fee = Image(header, "ALL_FEE", all_data) # make a 2D plot publish.send("ALL_FEE", fee) # send to the display