def __init__(self, image_file, **kwargs): assert (self.understand(image_file)) FormatXTCCspad.__init__(self, image_file, locator_scope=d9114_locator_scope, **kwargs) self._ds = FormatXTC._get_datasource(image_file, self.params) self.run_number = self.params.run[0] self.cspad = psana.Detector(self.params.detector_address[0]) self.dark = self.cspad.pedestals(self.run_number).astype(np.float64) self.gain = self.cspad.gain_mask(self.run_number) == 1. if CSPAD_MASK is not None: self.cspad_mask = CSPAD_MASK else: self.cspad_mask = np.ones_like(self.gain) self.nominal_gain_val = self.cspad._gain_mask_factor self.populate_events() self.n_images = len(self.times) self.params = FormatXTCD9114.get_params(image_file) self._set_pppg_args() self._set_psf() self._set_2d_img_info() self.detector_distance = env_distance(self.params.detector_address[0], self._ds.env(), self.params.cspad.detz_offset)
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ super(mod_radial_average, self).event(evt, env) if (evt.get("skip_event")): return # This module only applies to detectors for which a distance is # available. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return # See r17537 of mod_average.py. device = cspad_tbx.address_split(self.address)[2] if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': pixel_size = 0.079346 saturated_value = 2**16 - 1 d = cspad_tbx.dpack( active_areas=self.active_areas, address=self.address, beam_center_x=pixel_size * self.beam_center[0], beam_center_y=pixel_size * self.beam_center[1], data=self.cspad_img.iround(), # XXX ouch! distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=self.timestamp, wavelength=self.wavelength, xtal_target=self.m_xtal_target) from xfel.command_line.radial_average import run args = [ "file_path=XTC stream", "xfel_target=%s"%self.m_xtal_target, "verbose=False" ] t = self.timestamp s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[17:19] + t[20:23] if self._dirname is not None: dest_path = os.path.join(self._dirname, self._basename + s + ".txt") args.append("output_file=%s"%dest_path) self.logger.info("Calculating radial average for image %s"%s) xvals, results = run(args, d) evt.put(xvals, "cctbx.xfel.radial_average.xvals") evt.put(results, "cctbx.xfel.radial_average.results")
def event(self, evt, env): """The event() function creates a HDF5 group for the event, unless it contains a "skip_event" object with value @c True. @param evt Event data object, a configure object @param env Environment object """ super(mod_hdf5, self).event(evt, env) if (evt.get('skip_event')): return # If no detector distance is available set it to NaN, since # Python's None is not permitted in HDF5 distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: distance = float('nan') cspad_tbx.hdf5pack( hdf5_file=self._file, active_areas=self.active_areas, address=self.address, attenuation=self.sifoil, beam_center_x=cspad_tbx.pixel_size * self.beam_center[0], beam_center_y=cspad_tbx.pixel_size * self.beam_center[1], ccd_image_saturation=cspad_tbx.cspad_saturated_value, data=self.cspad_img, distance=distance, pixel_size=cspad_tbx.pixel_size, pulse_length=self.pulse_length, saturated_value=cspad_tbx.cspad_saturated_value, timestamp=self.timestamp, wavelength=self.wavelength, xtal_target=repr(None))
def event(self, evt, env): """The event() function is called for every L1Accept transition. It outputs the detector image associated with the event @p evt to the file system. @param evt Event data object, a configure object @param env Environment object """ super(mod_dump_bitmap, self).event(evt, env) if (evt.get('skip_event')): return # Where the sample-detector distance is not available, set it to # zero. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: distance = 0 # See r17537 of mod_average.py. device = cspad_tbx.address_split(self.address)[2] if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': pixel_size = 0.079346 saturated_value = 2**16 - 1 from iotbx.detectors import FlexImage_d as FlexImage vendortype = device saturation = 65535 flex_img = FlexImage(rawdata=self.cspad_img, binning=self._binning, vendortype=vendortype, brightness=self._brightness, saturation=saturated_value) flex_img.setWindow(0, 0, 1) flex_img.adjust(color_scheme=self._color_scheme) flex_img.prep_string() import Image # XXX is size//self._binning safe here? pil_img = Image.fromstring('RGB', (flex_img.size2() // self._binning, flex_img.size1() // self._binning), flex_img.export_string) # The output path should not contain any funny characters which may # not work in all environments. This constructs a sequence number a # la evt_seqno() from the dictionary's timestamp. t = self.timestamp s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[17:19] + t[ 20:23] path = os.path.join(self._dirname, self._basename + s + '.' + self._ext) self._logger.info("Exporting %s" % path) tmp_stream = open(path, 'wb') pil_img.save(tmp_stream, format=self._format) tmp_stream.close()
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ super(mod_image_dict, self).event(evt, env) if (evt.get("skip_event")): return # This module only applies to detectors for which a distance is # available. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return device = cspad_tbx.address_split(self.address)[2] self.logger.info("Subprocess %02d: process image #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) # See r17537 of mod_average.py. if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'Rayonix': pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size) saturated_value = rayonix_tbx.rayonix_saturated_value elif device == 'marccd': pixel_size = evt.get("marccd_pixel_size") saturated_value = evt.get("marccd_saturated_value") if distance == 0: distance = evt.get("marccd_distance") d = cspad_tbx.dpack( active_areas=self.active_areas, address=self.address, beam_center_x=pixel_size * self.beam_center[0], beam_center_y=pixel_size * self.beam_center[1], data=self.cspad_img.iround(), # XXX ouch! distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=self.timestamp, wavelength=self.wavelength) evt.put(d, self.m_out_key) # Diagnostic message emitted only when all the processing is done. if (env.subprocess() >= 0): self.logger.info("Subprocess %02d: accepted #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) else: self.logger.info("Accepted #%05d @ %s" % (self.nshots, self.timestamp))
def event(self, evt, env): """The event() function is called for every L1Accept transition. It outputs the detector image associated with the event @p evt to the file system. @param evt Event data object, a configure object @param env Environment object """ super(mod_dump, self).event(evt, env) if (evt.get('skip_event')): return if self.cspad_img is None: print("No image to save for %s"%self.timestamp) return # Where the sample-detector distance is not available, set it to # zero. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: distance = 0 # See r17537 of mod_average.py. device = cspad_tbx.address_split(self.address)[2] if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value output_filename = self._basename elif device == 'Rayonix': pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size) saturated_value = rayonix_tbx.rayonix_saturated_value output_filename = self._basename elif device == 'marccd': if distance == 0: distance = evt.get('marccd_distance') pixel_size = 0.079346 saturated_value = 2**16 - 1 output_filename = self._basename + evt.get(str, 'mccd_name') + "_" d = cspad_tbx.dpack( active_areas=self.active_areas, address=self.address, beam_center_x=pixel_size * self.beam_center[0], beam_center_y=pixel_size * self.beam_center[1], data=self.cspad_img.iround(), # XXX ouch! distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=self.timestamp, wavelength=self.wavelength) if self._format == "pickle": cspad_tbx.dwritef(d, self._dirname, output_filename) elif self._format == "tiff": cspad_tbx.write_tiff(d, self._dirname, output_filename) output_filename = None
def event(self, evt, env): """The event() function is called for every L1Accept transition. It outputs the detector image associated with the event @p evt to the file system. @param evt Event data object, a configure object @param env Environment object """ super(mod_dump, self).event(evt, env) if (evt.get('skip_event')): return if self.cspad_img is None: print "No image to save for %s"%self.timestamp return # Where the sample-detector distance is not available, set it to # zero. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: distance = 0 # See r17537 of mod_average.py. device = cspad_tbx.address_split(self.address)[2] if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value output_filename = self._basename elif device == 'Rayonix': pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size) saturated_value = rayonix_tbx.rayonix_saturated_value output_filename = self._basename elif device == 'marccd': if distance == 0: distance = evt.get('marccd_distance') pixel_size = 0.079346 saturated_value = 2**16 - 1 output_filename = self._basename + evt.get(str, 'mccd_name') + "_" d = cspad_tbx.dpack( active_areas=self.active_areas, address=self.address, beam_center_x=pixel_size * self.beam_center[0], beam_center_y=pixel_size * self.beam_center[1], data=self.cspad_img.iround(), # XXX ouch! distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=self.timestamp, wavelength=self.wavelength) if self._format == "pickle": cspad_tbx.dwritef(d, self._dirname, output_filename) elif self._format == "tiff": cspad_tbx.write_tiff(d, self._dirname, output_filename) output_filename = None
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ super(average_mixin, self).event(evt, env) if evt.get('skip_event'): return # Get the distance for the detectors that should have it, and set # it to NaN for those that should not. if self.detector == 'CxiDs1' or \ self.detector == 'CxiDs2' or \ self.detector == 'CxiDsd' or \ self.detector == 'XppGon': distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self._nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), 'skip_event') return else: distance = float('nan') if ("skew" in self.flags): # Take out inactive pixels if self.roi is not None: pixels = self.cspad_img[self.roi[2]:self.roi[3], self.roi[0]:self.roi[1]] dark_mask = self.dark_mask[self.roi[2]:self.roi[3], self.roi[0]:self.roi[1]] pixels = pixels.as_1d().select(dark_mask.as_1d()) else: pixels = self.cspad_img.as_1d().select(self.dark_mask.as_1d()).as_double() stats = scitbx.math.basic_statistics(pixels.as_double()) #stats.show() self.logger.info("skew: %.3f" %stats.skew) self.logger.info("kurtosis: %.3f" %stats.kurtosis) if 0: from matplotlib import pyplot hist_min, hist_max = flex.min(flex_cspad_img.as_double()), flex.max(flex_cspad_img.as_double()) print hist_min, hist_max n_slots = 100 n, bins, patches = pyplot.hist(flex_cspad_img.as_1d().as_numpy_array(), bins=n_slots, range=(hist_min, hist_max)) pyplot.show() # XXX This skew threshold probably needs fine-tuning skew_threshold = 0.35 if stats.skew < skew_threshold: self._nfail += 1 self.logger.warning("event(): skew < %f, shot skipped" % skew_threshold) evt.put(skip_event_flag(), 'skip_event') return #self.cspad_img *= stats.skew if ("inactive" in self.flags): self.cspad_img.set_selected(self.dark_stddev <= 0, 0) if ("noelastic" in self.flags): ELASTIC_THRESHOLD = self.elastic_threshold self.cspad_img.set_selected(self.cspad_img > ELASTIC_THRESHOLD, 0) if self.hot_threshold is not None: HOT_THRESHOLD = self.hot_threshold self.cspad_img.set_selected(self.dark_img > HOT_THRESHOLD, 0) if self.gain_map is not None and self.gain_threshold is not None: # XXX comparing each pixel to a moving average would probably be better # since the gain should vary approximately smoothly over different areas # of the detector GAIN_THRESHOLD = self.gain_threshold #self.logger.debug( #"rejecting: %i" %(self.gain_map > GAIN_THRESHOLD).count(True)) self.cspad_img.set_selected(self.gain_map > GAIN_THRESHOLD, 0) if ("nonoise" in self.flags): NOISE_THRESHOLD = self.noise_threshold self.cspad_img.set_selected(self.cspad_img < NOISE_THRESHOLD, 0) if ("sigma_scaling" in self.flags): self.do_sigma_scaling() if ("symnoise" in self.flags): SYMNOISE_THRESHOLD = self.symnoise_threshold self.cspad_img.set_selected((-SYMNOISE_THRESHOLD < self.cspad_img) & ( self.cspad_img < SYMNOISE_THRESHOLD), 0) if ("output" in self.flags): import pickle,os if (not os.path.isdir(self.pickle_dirname)): os.makedirs(self.pickle_dirname) flexdata = flex.int(self.cspad_img.astype(numpy.int32)) d = cspad_tbx.dpack( address=self.address, data=flexdata, timestamp=cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) ) G = open(os.path.join(".",self.pickle_dirname)+"/"+self.pickle_basename, "ab") pickle.dump(d,G,pickle.HIGHEST_PROTOCOL) G.close() if self.photon_threshold is not None and self.two_photon_threshold is not None: self.do_photon_counting() if self.background_path is not None: self.cspad_img -= self.background_img # t and self._sum_time are a two-long arrays of seconds and # milliseconds which hold time with respect to the base time. t = [t1 - t2 for (t1, t2) in zip(cspad_tbx.evt_time(evt), self._metadata['time_base'])] if self._nmemb == 0: # The peers metadata item is a bit field where a bit is set if # the partial sum from the corresponding worker process is # pending. If this is the first frame a worker process sees, # set its corresponding bit in the bit field since it will # contribute a partial sum. if env.subprocess() >= 0: self._lock.acquire() if 'peers' in self._metadata.keys(): self._metadata['peers'] |= (1 << env.subprocess()) else: self._metadata['peers'] = (1 << env.subprocess()) self._lock.release() self._sum_distance = distance self._sum_time = (t[0], t[1]) self._sum_wavelength = self.wavelength if self._have_max: self._max_img = self.cspad_img.deep_copy() if self._have_mean: self._sum_img = self.cspad_img.deep_copy() if self._have_std: self._ssq_img = flex.pow2(self.cspad_img) else: self._sum_distance += distance self._sum_time = (self._sum_time[0] + t[0], self._sum_time[1] + t[1]) self._sum_wavelength += self.wavelength if self._have_max: sel = (self.cspad_img > self._max_img).as_1d() self._max_img.as_1d().set_selected( sel, self.cspad_img.as_1d().select(sel)) if self._have_mean: self._sum_img += self.cspad_img if self._have_std: self._ssq_img += flex.pow2(self.cspad_img) self._nmemb += 1
def run(self): """ Process all images assigned to this thread """ params, options = self.parser.parse_args(show_diff_phil=True) if params.input.experiment is None or \ params.input.run_num is None or \ params.input.address is None: raise Usage(self.usage) if params.format.file_format == "cbf": if params.format.cbf.detz_offset is None: raise Usage(self.usage) elif params.format.file_format == "pickle": if params.input.cfg is None: raise Usage(self.usage) else: raise Usage(self.usage) if not os.path.exists(params.output.output_dir): raise Sorry("Output path not found:" + params.output.output_dir) #Environment variable redirect for CBFLib temporary CBF_TMP_XYZ file output if params.format.file_format == "cbf": if params.output.tmp_output_dir is None: tmp_dir = os.path.join(params.output.output_dir, '.tmp') else: tmp_dir = os.path.join(params.output.tmp_output_dir, '.tmp') if not os.path.exists(tmp_dir): with show_mail_on_error(): try: os.makedirs(tmp_dir) # Can fail if running multiprocessed - that's OK if the folder was created except OSError as e: # In Python 2, a FileExistsError is just an OSError if e.errno != errno.EEXIST: # If this OSError is not a FileExistsError raise os.environ['CBF_TMP_DIR'] = tmp_dir # Save the paramters self.params = params self.options = options from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.Get_rank( ) # each process in MPI has a unique id, 0-indexed size = comm.Get_size() # size: number of processes running in this job # set up psana if params.input.cfg is not None: psana.setConfigFile(params.input.cfg) if params.input.calib_dir is not None: psana.setOption('psana.calib-dir', params.input.calib_dir) dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment, params.input.run_num) if params.input.xtc_dir is not None: dataset_name = "exp=%s:run=%s:idx:dir=%s" % ( params.input.experiment, params.input.run_num, params.input.xtc_dir) ds = psana.DataSource(dataset_name) if params.format.file_format == "cbf": src = psana.Source('DetInfo(%s)' % params.input.address) psana_det = psana.Detector(params.input.address, ds.env()) # set this to sys.maxint to analyze all events if params.dispatch.max_events is None: max_events = sys.maxsize else: max_events = params.dispatch.max_events for run in ds.runs(): if params.format.file_format == "cbf": if params.format.cbf.mode == "cspad": # load a header only cspad cbf from the slac metrology base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology( run, params.input.address) if base_dxtbx is None: raise Sorry( "Couldn't load calibration file for run %d" % run.run()) elif params.format.cbf.mode == "rayonix": # load a header only rayonix cbf from the input parameters detector_size = rayonix_tbx.get_rayonix_detector_dimensions( ds.env()) base_dxtbx = rayonix_tbx.get_dxtbx_from_params( params.format.cbf.rayonix, detector_size) # list of all events times = run.times() if params.dispatch.selected_events: times = [ t for t in times if cspad_tbx.evt_timestamp((t.seconds(), t.nanoseconds() / 1e6)) in params.input.timestamp ] nevents = min(len(times), max_events) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [ times[i] for i in range(nevents) if (i + rank) % size == 0 ] for i in range(len(mytimes)): evt = run.event(mytimes[i]) id = evt.get(psana.EventId) print("Event #", i, " has id:", id) timestamp = cspad_tbx.evt_timestamp( cspad_tbx.evt_time(evt)) # human readable format if timestamp is None: print("No timestamp, skipping shot") continue if evt.get("skip_event") or "skip_event" in [ key.key() for key in evt.keys() ]: print("Skipping event", timestamp) continue t = timestamp s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[ 17:19] + t[20:23] print("Processing shot", s) if params.format.file_format == "pickle": if evt.get("skip_event"): print("Skipping event", id) continue # the data needs to have already been processed and put into the event by psana data = evt.get(params.format.pickle.out_key) if data is None: print("No data") continue # set output paths according to the templates path = os.path.join(params.output.output_dir, "shot-" + s + ".pickle") print("Saving", path) easy_pickle.dump(path, data) elif params.format.file_format == "cbf": if params.format.cbf.mode == "cspad": # get numpy array, 32x185x388 data = cspad_cbf_tbx.get_psana_corrected_data( psana_det, evt, use_default=False, dark=True, common_mode=None, apply_gain_mask=params.format.cbf.cspad. gain_mask_value is not None, gain_mask_value=params.format.cbf.cspad. gain_mask_value, per_pixel_gain=False) distance = cspad_tbx.env_distance( params.input.address, run.env(), params.format.cbf.detz_offset) elif params.format.cbf.mode == "rayonix": data = rayonix_tbx.get_data_from_psana_event( evt, params.input.address) distance = params.format.cbf.detz_offset if distance is None: print("No distance, skipping shot") continue if self.params.format.cbf.override_energy is None: wavelength = cspad_tbx.evt_wavelength(evt) if wavelength is None: print("No wavelength, skipping shot") continue else: wavelength = 12398.4187 / self.params.format.cbf.override_energy # stitch together the header, data and metadata into the final dxtbx format object if params.format.cbf.mode == "cspad": image = cspad_cbf_tbx.format_object_from_data( base_dxtbx, data, distance, wavelength, timestamp, params.input.address, round_to_int=False) elif params.format.cbf.mode == "rayonix": image = rayonix_tbx.format_object_from_data( base_dxtbx, data, distance, wavelength, timestamp, params.input.address) path = os.path.join(params.output.output_dir, "shot-" + s + ".cbf") print("Saving", path) # write the file import pycbf image._cbf_handle.write_widefile(path.encode(), pycbf.CBF,\ pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0) run.end() ds.end()
def process_event(self, run, timestamp): """ Process a single event from a run @param run psana run object @param timestamp psana timestamp object """ ts = cspad_tbx.evt_timestamp( (timestamp.seconds(), timestamp.nanoseconds() / 1e6)) if ts is None: print "No timestamp, skipping shot" return if len(self.params_cache.debug.event_timestamp ) > 0 and ts not in self.params_cache.debug.event_timestamp: return if self.params_cache.debug.skip_processed_events or self.params_cache.debug.skip_unprocessed_events or self.params_cache.debug.skip_bad_events: if ts in self.known_events: if self.known_events[ts] not in ["stop", "done", "fail"]: if self.params_cache.debug.skip_bad_events: print "Skipping event %s: possibly caused an unknown exception previously" % ts return elif self.params_cache.debug.skip_processed_events: print "Skipping event %s: processed successfully previously" % ts return else: if self.params_cache.debug.skip_unprocessed_events: print "Skipping event %s: not processed previously" % ts return self.debug_start(ts) evt = run.event(timestamp) if evt.get("skip_event") or "skip_event" in [ key.key() for key in evt.keys() ]: print "Skipping event", ts self.debug_write("psana_skip", "skip") return print "Accepted", ts self.params = copy.deepcopy(self.params_cache) # the data needs to have already been processed and put into the event by psana if self.params.format.file_format == 'cbf': # get numpy array, 32x185x388 data = cspad_cbf_tbx.get_psana_corrected_data( self.psana_det, evt, use_default=False, dark=True, common_mode=self.common_mode, apply_gain_mask=self.params.format.cbf.gain_mask_value is not None, gain_mask_value=self.params.format.cbf.gain_mask_value, per_pixel_gain=False) if data is None: print "No data" self.debug_write("no_data", "skip") return if self.params.format.cbf.override_distance is None: distance = cspad_tbx.env_distance( self.params.input.address, run.env(), self.params.format.cbf.detz_offset) if distance is None: print "No distance, skipping shot" self.debug_write("no_distance", "skip") return else: distance = self.params.format.cbf.override_distance if self.params.format.cbf.override_energy is None: wavelength = cspad_tbx.evt_wavelength(evt) if wavelength is None: print "No wavelength, skipping shot" self.debug_write("no_wavelength", "skip") return else: wavelength = 12398.4187 / self.params.format.cbf.override_energy if self.params.format.file_format == 'pickle': image_dict = evt.get(self.params.format.pickle.out_key) data = image_dict['DATA'] timestamp = t = ts s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[17:19] + t[ 20:23] print "Processing shot", s if self.params.format.file_format == 'cbf': # stitch together the header, data and metadata into the final dxtbx format object cspad_img = cspad_cbf_tbx.format_object_from_data( self.base_dxtbx, data, distance, wavelength, timestamp, self.params.input.address) if self.params.input.reference_geometry is not None: from dxtbx.model import Detector # copy.deep_copy(self.reference_detctor) seems unsafe based on tests. Use from_dict(to_dict()) instead. cspad_img._detector_instance = Detector.from_dict( self.reference_detector.to_dict()) cspad_img.sync_detector_to_cbf() elif self.params.format.file_format == 'pickle': from dxtbx.format.FormatPYunspecifiedStill import FormatPYunspecifiedStillInMemory cspad_img = FormatPYunspecifiedStillInMemory(image_dict) cspad_img.timestamp = s if self.params.dispatch.dump_all: self.save_image( cspad_img, self.params, os.path.join(self.params.output.output_dir, "shot-" + s)) self.cache_ranges(cspad_img, self.params) imgset = MemImageSet([cspad_img]) if self.params.dispatch.estimate_gain_only: from dials.command_line.estimate_gain import estimate_gain estimate_gain(imgset) return if not self.params.dispatch.find_spots: self.debug_write("data_loaded", "done") return datablock = DataBlockFactory.from_imageset(imgset)[0] # before calling DIALS for processing, set output paths according to the templates if self.indexed_filename_template is not None and "%s" in self.indexed_filename_template: self.params.output.indexed_filename = os.path.join( self.params.output.output_dir, self.indexed_filename_template % ("idx-" + s)) if "%s" in self.refined_experiments_filename_template: self.params.output.refined_experiments_filename = os.path.join( self.params.output.output_dir, self.refined_experiments_filename_template % ("idx-" + s)) if "%s" in self.integrated_filename_template: self.params.output.integrated_filename = os.path.join( self.params.output.output_dir, self.integrated_filename_template % ("idx-" + s)) if "%s" in self.reindexedstrong_filename_template: self.params.output.reindexedstrong_filename = os.path.join( self.params.output.output_dir, self.reindexedstrong_filename_template % ("idx-" + s)) # Load a dials mask from the trusted range and psana mask from dials.util.masking import MaskGenerator generator = MaskGenerator(self.params.border_mask) mask = generator.generate(imgset) if self.params.format.file_format == "cbf": mask = tuple([a & b for a, b in zip(mask, self.dials_mask)]) if self.spotfinder_mask is None: self.params.spotfinder.lookup.mask = mask else: self.params.spotfinder.lookup.mask = tuple( [a & b for a, b in zip(mask, self.spotfinder_mask)]) if self.integration_mask is None: self.params.integration.lookup.mask = mask else: self.params.integration.lookup.mask = tuple( [a & b for a, b in zip(mask, self.integration_mask)]) self.debug_write("spotfind_start") try: observed = self.find_spots(datablock) except Exception, e: import traceback traceback.print_exc() print str(e), "event", timestamp self.debug_write("spotfinding_exception", "fail") return
def event(self, evt, env): """The event() function is called for every L1Accept transition. XXX Since the viewer is now running in a parallel process, the averaging here is now the bottleneck. @param evt Event data object, a configure object @param env Environment object """ from pyana.event import Event self.n_shots += 1 super(mod_view, self).event(evt, env) if evt.status() != Event.Normal or evt.get( 'skip_event'): # XXX transition return # Get the distance for the detectors that should have it, and set # it to NaN for those that should not. if self.detector == 'CxiDs1' or \ self.detector == 'CxiDsd' or \ self.detector == 'XppGon': distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return else: distance = float('nan') if not self._proc.is_alive(): evt.setStatus(Event.Stop) # Early return if the next update to the viewer is more than # self.ncollate shots away. XXX Since the common_mode.event() # function does quite a bit of processing, the savings are # probably not so big. next_update = (self.nupdate - 1) - (self.nshots - 1) % self.nupdate if (self.ncollate > 0 and next_update >= self.ncollate): return if self.sigma_scaling: self.do_sigma_scaling() if self.photon_counting: self.do_photon_counting() # Trim the disabled section from the Sc1 detector image. XXX This # is a bit of a kludge, really. # if (self.address == "CxiSc1-0|Cspad2x2-0"): # self.cspad_img = self.cspad_img[185:2 * 185, :] # Update the sum of the valid images, starting a new collation if # appropriate. This guarantees self.nvalid > 0. if (self.nvalid == 0 or self.ncollate > 0 and self.nvalid >= self.ncollate): self.img_sum = self.cspad_img self.nvalid = 1 else: self.img_sum += self.cspad_img self.nvalid += 1 # Update the viewer to display the current average image, and # start a new collation, if appropriate. if (next_update == 0): from time import localtime, strftime time_str = strftime("%H:%M:%S", localtime(evt.getTime().seconds())) title = "r%04d@%s: average of %d last images on %s" \ % (evt.run(), time_str, self.nvalid, self.address) # See also mod_average.py. device = cspad_tbx.address_split(self.address)[2] if device == 'Cspad': beam_center = self.beam_center pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': beam_center = tuple(t // 2 for t in self.img_sum.focus()) pixel_size = 0.079346 saturated_value = 2**16 - 1 # Wait for the viewer process to empty the queue before feeding # it a new image, and ensure not to hang if the viewer process # exits. Because of multithreading/multiprocessing semantics, # self._queue.empty() is unreliable. fmt = _Format(BEAM_CENTER=beam_center, DATA=self.img_sum / self.nvalid, DETECTOR_ADDRESS=self.address, DISTANCE=distance, PIXEL_SIZE=pixel_size, SATURATED_VALUE=saturated_value, TIME_TUPLE=cspad_tbx.evt_time(evt), WAVELENGTH=self.wavelength) while not self._queue.empty(): if not self._proc.is_alive(): evt.setStatus(Event.Stop) return while True: try: self._queue.put((fmt, title), timeout=1) break except Exception: pass if (self.ncollate > 0): self.nvalid = 0
def event(self, evt, env): """The event() function is called for every L1Accept transition. XXX Since the viewer is now running in a parallel process, the averaging here is now the bottleneck. @param evt Event data object, a configure object @param env Environment object """ from pyana.event import Event self.n_shots += 1 super(mod_view, self).event(evt, env) if evt.status() != Event.Normal or evt.get('skip_event'): # XXX transition return # Get the distance for the detectors that should have it, and set # it to NaN for those that should not. if self.detector == 'CxiDs1' or \ self.detector == 'CxiDsd' or \ self.detector == 'XppGon': distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return else: distance = float('nan') if not self._proc.is_alive(): evt.setStatus(Event.Stop) # Early return if the next update to the viewer is more than # self.ncollate shots away. XXX Since the common_mode.event() # function does quite a bit of processing, the savings are # probably not so big. next_update = (self.nupdate - 1) - (self.nshots - 1) % self.nupdate if (self.ncollate > 0 and next_update >= self.ncollate): return if self.sigma_scaling: self.do_sigma_scaling() if self.photon_counting: self.do_photon_counting() # Trim the disabled section from the Sc1 detector image. XXX This # is a bit of a kludge, really. # if (self.address == "CxiSc1-0|Cspad2x2-0"): # self.cspad_img = self.cspad_img[185:2 * 185, :] # Update the sum of the valid images, starting a new collation if # appropriate. This guarantees self.nvalid > 0. if (self.nvalid == 0 or self.ncollate > 0 and self.nvalid >= self.ncollate): self.img_sum = self.cspad_img self.nvalid = 1 else: self.img_sum += self.cspad_img self.nvalid += 1 # Update the viewer to display the current average image, and # start a new collation, if appropriate. if (next_update == 0): from time import localtime, strftime time_str = strftime("%H:%M:%S", localtime(evt.getTime().seconds())) title = "r%04d@%s: average of %d last images on %s" \ % (evt.run(), time_str, self.nvalid, self.address) # See also mod_average.py. device = cspad_tbx.address_split(self.address)[2] if device == 'Cspad': beam_center = self.beam_center pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': beam_center = tuple(t // 2 for t in self.img_sum.focus()) pixel_size = 0.079346 saturated_value = 2**16 - 1 # Wait for the viewer process to empty the queue before feeding # it a new image, and ensure not to hang if the viewer process # exits. Because of multithreading/multiprocessing semantics, # self._queue.empty() is unreliable. fmt = _Format(BEAM_CENTER=beam_center, DATA=self.img_sum / self.nvalid, DETECTOR_ADDRESS=self.address, DISTANCE=distance, PIXEL_SIZE=pixel_size, SATURATED_VALUE=saturated_value, TIME_TUPLE=cspad_tbx.evt_time(evt), WAVELENGTH=self.wavelength) while not self._queue.empty(): if not self._proc.is_alive(): evt.setStatus(Event.Stop) return while True: try: self._queue.put((fmt, title), timeout=1) break except Exception: pass if (self.ncollate > 0): self.nvalid = 0
def event(self, evt, env): """The event() function is called for every L1Accept transition. It outputs the detector image associated with the event @p evt to the file system. @param evt Event data object, a configure object @param env Environment object """ super(mod_dump_bitmap, self).event(evt, env) if (evt.get('skip_event')): return # Where the sample-detector distance is not available, set it to # zero. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: distance = 0 # See r17537 of mod_average.py. device = cspad_tbx.address_split(self.address)[2] if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': pixel_size = 0.079346 saturated_value = 2**16 - 1 from iotbx.detectors import FlexImage_d as FlexImage vendortype = device saturation = 65535 flex_img = FlexImage( rawdata=self.cspad_img, binning=self._binning, vendortype=vendortype, brightness=self._brightness, saturation=saturated_value) flex_img.setWindow(0, 0, 1) flex_img.adjust(color_scheme=self._color_scheme) flex_img.prep_string() import Image # XXX is size//self._binning safe here? pil_img = Image.fromstring( 'RGB', (flex_img.size2()//self._binning, flex_img.size1()//self._binning), flex_img.export_string) # The output path should not contain any funny characters which may # not work in all environments. This constructs a sequence number a # la evt_seqno() from the dictionary's timestamp. t = self.timestamp s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[17:19] + t[20:23] path = os.path.join( self._dirname, self._basename + s + '.' + self._ext) self._logger.info("Exporting %s" %path) tmp_stream = open(path, 'wb') pil_img.save(tmp_stream, format=self._format) tmp_stream.close()
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ super(average_mixin, self).event(evt, env) if evt.get('skip_event'): return # Get the distance for the detectors that should have it, and set # it to NaN for those that should not. if self.detector == 'CxiDs1' or \ self.detector == 'CxiDs2' or \ self.detector == 'CxiDsd' or \ self.detector == 'XppGon': distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self._nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), 'skip_event') return else: distance = float('nan') if ("skew" in self.flags): # Take out inactive pixels if self.roi is not None: pixels = self.cspad_img[self.roi[2]:self.roi[3], self.roi[0]:self.roi[1]] dark_mask = self.dark_mask[self.roi[2]:self.roi[3], self.roi[0]:self.roi[1]] pixels = pixels.as_1d().select(dark_mask.as_1d()) else: pixels = self.cspad_img.as_1d().select(self.dark_mask.as_1d()).as_double() stats = scitbx.math.basic_statistics(pixels.as_double()) #stats.show() self.logger.info("skew: %.3f" %stats.skew) self.logger.info("kurtosis: %.3f" %stats.kurtosis) if 0: from matplotlib import pyplot hist_min, hist_max = flex.min(flex_cspad_img.as_double()), flex.max(flex_cspad_img.as_double()) print hist_min, hist_max n_slots = 100 n, bins, patches = pyplot.hist(flex_cspad_img.as_1d().as_numpy_array(), bins=n_slots, range=(hist_min, hist_max)) pyplot.show() # XXX This skew threshold probably needs fine-tuning skew_threshold = 0.35 if stats.skew < skew_threshold: self._nfail += 1 self.logger.warning("event(): skew < %f, shot skipped" % skew_threshold) evt.put(skip_event_flag(), 'skip_event') return #self.cspad_img *= stats.skew if ("inactive" in self.flags): self.cspad_img.set_selected(self.dark_stddev <= 0, 0) if ("noelastic" in self.flags): ELASTIC_THRESHOLD = self.elastic_threshold self.cspad_img.set_selected(self.cspad_img > ELASTIC_THRESHOLD, 0) if self.hot_threshold is not None: HOT_THRESHOLD = self.hot_threshold self.cspad_img.set_selected(self.dark_img > HOT_THRESHOLD, 0) if self.gain_map is not None and self.gain_threshold is not None: # XXX comparing each pixel to a moving average would probably be better # since the gain should vary approximately smoothly over different areas # of the detector GAIN_THRESHOLD = self.gain_threshold #self.logger.debug( #"rejecting: %i" %(self.gain_map > GAIN_THRESHOLD).count(True)) self.cspad_img.set_selected(self.gain_map > GAIN_THRESHOLD, 0) if ("nonoise" in self.flags): NOISE_THRESHOLD = self.noise_threshold self.cspad_img.set_selected(self.cspad_img < NOISE_THRESHOLD, 0) if ("sigma_scaling" in self.flags): self.do_sigma_scaling() if ("symnoise" in self.flags): SYMNOISE_THRESHOLD = self.symnoise_threshold self.cspad_img.set_selected((-SYMNOISE_THRESHOLD < self.cspad_img) & ( self.cspad_img < SYMNOISE_THRESHOLD), 0) if ("output" in self.flags): try: import cPickle as pickle except ImportError: import pickle import os if (not os.path.isdir(self.pickle_dirname)): os.makedirs(self.pickle_dirname) flexdata = flex.int(self.cspad_img.astype(numpy.int32)) d = cspad_tbx.dpack( address=self.address, data=flexdata, timestamp=cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) ) G = open(os.path.join(".",self.pickle_dirname)+"/"+self.pickle_basename, "ab") pickle.dump(d,G,pickle.HIGHEST_PROTOCOL) G.close() if self.photon_threshold is not None and self.two_photon_threshold is not None: self.do_photon_counting() if self.background_path is not None: self.cspad_img -= self.background_img # t and self._sum_time are a two-long arrays of seconds and # milliseconds which hold time with respect to the base time. t = [t1 - t2 for (t1, t2) in zip(cspad_tbx.evt_time(evt), self._metadata['time_base'])] if self._nmemb == 0: # The peers metadata item is a bit field where a bit is set if # the partial sum from the corresponding worker process is # pending. If this is the first frame a worker process sees, # set its corresponding bit in the bit field since it will # contribute a partial sum. if env.subprocess() >= 0: self._lock.acquire() if 'peers' in self._metadata.keys(): self._metadata['peers'] |= (1 << env.subprocess()) else: self._metadata['peers'] = (1 << env.subprocess()) self._lock.release() self._sum_distance = distance self._sum_time = (t[0], t[1]) self._sum_wavelength = self.wavelength if self._have_max: self._max_img = self.cspad_img.deep_copy() if self._have_mean: self._sum_img = self.cspad_img.deep_copy() if self._have_std: self._ssq_img = flex.pow2(self.cspad_img) else: self._sum_distance += distance self._sum_time = (self._sum_time[0] + t[0], self._sum_time[1] + t[1]) self._sum_wavelength += self.wavelength if self._have_max: sel = (self.cspad_img > self._max_img).as_1d() self._max_img.as_1d().set_selected( sel, self.cspad_img.as_1d().select(sel)) if self._have_mean: self._sum_img += self.cspad_img if self._have_std: self._ssq_img += flex.pow2(self.cspad_img) self._nmemb += 1
def _detector(self, index=None): if index is None: index = 0 run = self.get_run_from_index(index) det = self._get_psana_detector(run) geom = det.pyda.geoaccess(run.run()) cob = read_slac_metrology(geometry=geom, include_asic_offset=True) distance = env_distance( self.params.detector_address[0], run.env(), self.params.cspad.detz_offset ) d = Detector() pg0 = d.hierarchy() # first deal with D0 det_num = 0 origin = col((cob[(0,)] * col((0, 0, 0, 1)))[0:3]) fast = col((cob[(0,)] * col((1, 0, 0, 1)))[0:3]) - origin slow = col((cob[(0,)] * col((0, 1, 0, 1)))[0:3]) - origin origin += col((0.0, 0.0, -distance)) pg0.set_local_frame(fast.elems, slow.elems, origin.elems) pg0.set_name("D%d" % (det_num)) for quad_num in range(4): # Now deal with Qx pg1 = pg0.add_group() origin = col((cob[(0, quad_num)] * col((0, 0, 0, 1)))[0:3]) fast = col((cob[(0, quad_num)] * col((1, 0, 0, 1)))[0:3]) - origin slow = col((cob[(0, quad_num)] * col((0, 1, 0, 1)))[0:3]) - origin pg1.set_local_frame(fast.elems, slow.elems, origin.elems) pg1.set_name("D%dQ%d" % (det_num, quad_num)) for sensor_num in range(8): # Now deal with Sy pg2 = pg1.add_group() origin = col((cob[(0, quad_num, sensor_num)] * col((0, 0, 0, 1)))[0:3]) fast = ( col((cob[(0, quad_num, sensor_num)] * col((1, 0, 0, 1)))[0:3]) - origin ) slow = ( col((cob[(0, quad_num, sensor_num)] * col((0, 1, 0, 1)))[0:3]) - origin ) pg2.set_local_frame(fast.elems, slow.elems, origin.elems) pg2.set_name("D%dQ%dS%d" % (det_num, quad_num, sensor_num)) # Now deal with Az for asic_num in range(2): val = "ARRAY_D0Q%dS%dA%d" % (quad_num, sensor_num, asic_num) p = pg2.add_panel() origin = col( (cob[(0, quad_num, sensor_num, asic_num)] * col((0, 0, 0, 1)))[ 0:3 ] ) fast = ( col( ( cob[(0, quad_num, sensor_num, asic_num)] * col((1, 0, 0, 1)) )[0:3] ) - origin ) slow = ( col( ( cob[(0, quad_num, sensor_num, asic_num)] * col((0, 1, 0, 1)) )[0:3] ) - origin ) p.set_local_frame(fast.elems, slow.elems, origin.elems) p.set_pixel_size( (cspad_cbf_tbx.pixel_size, cspad_cbf_tbx.pixel_size) ) p.set_image_size(cspad_cbf_tbx.asic_dimension) p.set_trusted_range( ( cspad_tbx.cspad_min_trusted_value, cspad_tbx.cspad_saturated_value, ) ) p.set_name(val) try: beam = self._beam(index) except Exception: print( "No beam object initialized. Returning CSPAD detector without parallax corrections" ) return d # take into consideration here the thickness of the sensor also the # wavelength of the radiation (which we have in the same file...) wavelength = beam.get_wavelength() thickness = 0.5 # mm, see Hart et al. 2012 table = attenuation_coefficient.get_table("Si") # mu_at_angstrom returns cm^-1 mu = table.mu_at_angstrom(wavelength) / 10.0 # mu: mm^-1 t0 = thickness for panel in d: panel.set_px_mm_strategy(ParallaxCorrectedPxMmStrategy(mu, t0)) return d
def __init__(self, dataset_name=None, detector_address=None, data_type='idx', mask_path=None, mask_angles=None, mask_widths=None, backimg_path=None, backmsk_path=None, geom_path=None, det_dist=None, det_pix=0.075, beam_l=None, mask_thr=None, nQ=None, nPhi=None, dQ=1, dPhi=1, cent0=None, r_max=None, dr=None, dx=None, dy=None, r_0=None, q_bound=None, peak=None, dpeak=None): """The fluctuation scattering class stores processing parameters, initiates mask and background data and retrieves 2D images from events. Processing options of the 2D images include: * Transform from cartesian to polar coordinates * Beam center refinement * Dynamic masking * Normalization % SAXS calculation * Particle sizing * Computation of in-frame 2-point angular auto-correlations using FFTs @param dataset_name Experiment name and run number @param detector_address Adress to back or front detector @param data_type Type of data file format (h5 or xtc in the formats: idx|idx_ffb|smd|smd_ffb|h5) @param mask_path Full path to static image mask @param mask_angles Center of angluar slices (deg) that should be masked out (due to jet streaks etc), [Ang1 Ang2 ...] @param mask_widths Width of angular slices (deg) that should be masked out (due to jet streaks etc), [delta1 delta2 ...] @param backimg_path Full path to background image @param backmsk_path Full path to background mask @param geom_path Full path to geometry file (for h5 format) @param det_dist Override of detecor distance (in mm) @param det_pix Pixel size (in mm) @param beam_l Override of beam wavelength (in Angstrom) @param mask_thr Threshold for dynamic masking @param nQ Number of Q-bins to consider (in pixels) @param nPhi Number of Phi-bins to consider (in pixels) @param dQ Stepsize in Q (in pixels) @param dPhi Stepsize in Phi (in pixels) @param cent0 Initial beam center coordinates [xc,yc] @param r_max Maximum radial value to use for beamcenter refinement (in pixels) @param dr Stepsize in r (in pixels) @param dx Gridsize for beam center refinement in x, i.e xc+/-dx (in pixels) @param dy Gridsize for beam center refinement in y, i.e yc+/-dy (in pixles) @param r_0 Starting value for particle radius refinement [in Ang] @param q_bound Upper and Lower boundaries of q for Particle radius refinement [in Ang^-1] @param peak Q-values for peak maxima [q_peak1 q_peak2 ...] @param dpeak Delta Q used for peak integration of peak maxima [delta_q1 delta_q2 ...] """ # Initialize parameters and configuration files once self.data_type = data_type self.dataset_name = dataset_name self.detector_address = detector_address if (self.data_type == 'idx') or (self.data_type == 'idx_ffb') or ( self.data_type == 'smd') or (self.data_type == 'smd_ffb') or (self.data_type == 'xtc'): self.ds = DataSource(self.dataset_name) self.src = Detector(self.detector_address, self.ds.env()) if mask_path is None: # Create a binary mask of ones, default mask only works for xtc/ffb evt = next(self.ds.events()) self.mask_address = self.src.mask(evt, calib=True, status=True) self.msk = self.src.image(evt, self.mask_address) self.mask = np.copy(self.msk) else: self.msk = np.loadtxt(mask_path) self.mask = np.copy(self.msk) if geom_path is not None: geom = np.genfromtxt(geom_path, skiprows=1) self.gap = geom[0] self.shift = geom[1] self.orient = geom[2] self.comm = geom[3] self.param1 = geom[4] self.param2 = geom[5] self.param3 = geom[6] self.param4 = geom[7] if (self.data_type == 'h5'): if mask_path is None: # Create a binary mask of ones ## Add default binary mask here ## Default pnCCD dimensions dim1 = 1024 dim2 = 1024 self.mask = np.ones((dim1, dim2)) else: self.mask = np.loadtxt(mask_path) # Apply geometry self.msk = pnccd_tbx.get_geometry(img=self.mask, gap=self.gap, shift=self.shift, orient=self.orient) if self.detector_address == 'pnccdFront': evt = next(self.ds.events()) gain = self.src.gain(evt) self.gain = self.src.image(evt, gain) self.cart = 0 self.flat = 0 if backimg_path is None: self.backimg = None else: self.backimg = np.loadtxt(backimg_path).astype(np.float64) # Check if background image in cartesian coordinates exists if (self.backimg.shape == self.msk.shape): self.cart = 1 # Remove bg before transform to polar coordinates if backmsk_path is None: self.backmsk = None else: self.backmsk = np.loadtxt(backmsk_path).astype(np.float64) # Check if flat-field image exists if (self.backmsk is None) and (self.backimg is not None) and (self.cart == 0): self.pcflat = pnccd_tbx.dynamic_flatfield(self.backimg) self.flat = 1 if det_dist is None: # Get detector distance from events for run in self.ds.runs(): self.det_dist = cspad_tbx.env_distance(self.detector_address, run.env(), 577) else: self.det_dist = det_dist self.det_pix = det_pix if beam_l is None: # Get wavelength from event, note it can change slightly between events. So in the future use average. self.beam_l = cspad_tbx.evt_wavelength(next(self.ds.events())) else: self.beam_l = beam_l if mask_thr is None: # No dynamic masking self.thr = None else: self.thr = mask_thr if nQ is None: # Use image dimensions as a guide, leave room for offset beamC if self.msk.shape[0] > self.msk.shape[ 1]: # nQ determined by smallest dimension self.nQ = int(self.msk.shape[1] / 2) - 20 else: self.nQ = int(self.msk.shape[0] / 2) - 20 else: self.nQ = nQ if (self.nQ % 10): # Ascert even number, speeds things up massively for FFT self.nQ = np.floor(self.nQ / 10) * 10 if (self.nQ % dQ): # Ascert clean divisor self.nQ = np.floor(self.nQ / dQ) * dQ if nPhi is None: # Estimate based on 2*pi*nQ self.nPhi = np.ceil(2 * np.pi * self.nQ) else: self.nPhi = nPhi if (self.nPhi % 10): # Ascert even number, speeds things up massively for FFT self.nPhi = np.ceil(self.nPhi / 10) * 10 if (self.nPhi % dPhi): # Ascert clean divisor self.nPhi = np.ceil(self.nPhi / dPhi) * dPhi self.dQ = dQ self.dPhi = dPhi self.mask_angles = mask_angles self.mask_widths = mask_widths # Compute slices that should be masked in static mask if (self.mask_angles is not None) and (self.mask_widths is not None): self.mask_angles = (self.mask_angles / 360) * self.nPhi self.mask_widths = (self.mask_widths / 360) * self.nPhi # Check if nQ > smallest dimension/2 then we extend the image with zero values if self.nQ > min(self.msk.shape[0] / 2, self.msk.shape[1] / 2): self.msk = pnccd_tbx.extend_image(img=self.msk) self.mask = np.copy(self.msk) if (cent0 is None) or ( sum(cent0) == 0): # Use center of gravity to estimate starting beamC self.cent0 = [ int(round(self.msk.shape[1] / 2)), int(round(self.msk.shape[0] / 2)) ] else: self.cent0 = cent0 self.cent = self.cent0 # Default center if r_max is None: # Default, Use half of nQ self.r_max = int(self.nQ * (3 / 4)) else: self.r_max = r_max if (self.r_max % dr): # Ascert clean divisor self.r_max = np.floor(self.r_max / dr) * dr self.dr = dr self.dx = dx self.dy = dy if r_0 is None: self.radius = 0 self.score = 0 self.r_0 = r_0 if q_bound is None or sum(q_bound) == 0: self.q_bound = [None, None] else: self.q_bound = [None, self.q_bound] # Compute q-spacing self.q = np.arange(0, self.nQ, self.dQ) self.q = self.q * self.det_pix / self.det_dist * 4 * np.pi / self.beam_l / 2 # Compute Phi (Not accounting for curvature) self.phi = np.linspace(0, 2 * np.pi, self.nPhi / self.dPhi, endpoint=False) # Compute indices for Peak maxima if (peak is not None) and (dpeak is not None): self.peak = peak self.dpeak = dpeak self.ind1 = (self.q >= (self.peak[0] - self.dpeak[0])) & ( self.q <= (self.peak[0] + self.dpeak[0])) self.ind2 = (self.q >= (self.peak[1] - self.dpeak[1])) & ( self.q <= (self.peak[1] + self.dpeak[1])) else: self.peak = None self.dpeak = None
def process_event(self, run, timestamp): """ Process a single event from a run @param run psana run object @param timestamp psana timestamp object """ ts = cspad_tbx.evt_timestamp((timestamp.seconds(),timestamp.nanoseconds()/1e6)) if ts is None: print "No timestamp, skipping shot" return if len(self.params_cache.debug.event_timestamp) > 0 and ts not in self.params_cache.debug.event_timestamp: return if self.params_cache.debug.skip_processed_events or self.params_cache.debug.skip_unprocessed_events or self.params_cache.debug.skip_bad_events: if ts in self.known_events: if self.known_events[ts] not in ["stop", "done", "fail"]: if self.params_cache.debug.skip_bad_events: print "Skipping event %s: possibly caused an unknown exception previously"%ts return elif self.params_cache.debug.skip_processed_events: print "Skipping event %s: processed successfully previously"%ts return else: if self.params_cache.debug.skip_unprocessed_events: print "Skipping event %s: not processed previously"%ts return self.debug_start(ts) evt = run.event(timestamp) if evt.get("skip_event") or "skip_event" in [key.key() for key in evt.keys()]: print "Skipping event",ts self.debug_write("psana_skip", "skip") return print "Accepted", ts self.params = copy.deepcopy(self.params_cache) # the data needs to have already been processed and put into the event by psana if self.params.format.file_format == 'cbf': # get numpy array, 32x185x388 data = cspad_cbf_tbx.get_psana_corrected_data(self.psana_det, evt, use_default=False, dark=True, common_mode=self.common_mode, apply_gain_mask=self.params.format.cbf.gain_mask_value is not None, gain_mask_value=self.params.format.cbf.gain_mask_value, per_pixel_gain=False) if data is None: print "No data" self.debug_write("no_data", "skip") return if self.params.format.cbf.override_distance is None: distance = cspad_tbx.env_distance(self.params.input.address, run.env(), self.params.format.cbf.detz_offset) if distance is None: print "No distance, skipping shot" self.debug_write("no_distance", "skip") return else: distance = self.params.format.cbf.override_distance if self.params.format.cbf.override_energy is None: wavelength = cspad_tbx.evt_wavelength(evt) if wavelength is None: print "No wavelength, skipping shot" self.debug_write("no_wavelength", "skip") return else: wavelength = 12398.4187/self.params.format.cbf.override_energy if self.params.format.file_format == 'pickle': image_dict = evt.get(self.params.format.pickle.out_key) data = image_dict['DATA'] timestamp = t = ts s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[17:19] + t[20:23] print "Processing shot", s if self.params.format.file_format == 'cbf': # stitch together the header, data and metadata into the final dxtbx format object cspad_img = cspad_cbf_tbx.format_object_from_data(self.base_dxtbx, data, distance, wavelength, timestamp, self.params.input.address) if self.params.input.reference_geometry is not None: from dxtbx.model import Detector # copy.deep_copy(self.reference_detctor) seems unsafe based on tests. Use from_dict(to_dict()) instead. cspad_img._detector_instance = Detector.from_dict(self.reference_detector.to_dict()) cspad_img.sync_detector_to_cbf() elif self.params.format.file_format == 'pickle': from dxtbx.format.FormatPYunspecifiedStill import FormatPYunspecifiedStillInMemory cspad_img = FormatPYunspecifiedStillInMemory(image_dict) cspad_img.timestamp = s if self.params.dispatch.dump_all: self.save_image(cspad_img, self.params, os.path.join(self.params.output.output_dir, "shot-" + s)) self.cache_ranges(cspad_img, self.params) imgset = MemImageSet([cspad_img]) if self.params.dispatch.estimate_gain_only: from dials.command_line.estimate_gain import estimate_gain estimate_gain(imgset) return if not self.params.dispatch.find_spots: self.debug_write("data_loaded", "done") return datablock = DataBlockFactory.from_imageset(imgset)[0] # before calling DIALS for processing, set output paths according to the templates if self.indexed_filename_template is not None and "%s" in self.indexed_filename_template: self.params.output.indexed_filename = os.path.join(self.params.output.output_dir, self.indexed_filename_template%("idx-" + s)) if "%s" in self.refined_experiments_filename_template: self.params.output.refined_experiments_filename = os.path.join(self.params.output.output_dir, self.refined_experiments_filename_template%("idx-" + s)) if "%s" in self.integrated_filename_template: self.params.output.integrated_filename = os.path.join(self.params.output.output_dir, self.integrated_filename_template%("idx-" + s)) if "%s" in self.reindexedstrong_filename_template: self.params.output.reindexedstrong_filename = os.path.join(self.params.output.output_dir, self.reindexedstrong_filename_template%("idx-" + s)) # Load a dials mask from the trusted range and psana mask from dials.util.masking import MaskGenerator generator = MaskGenerator(self.params.border_mask) mask = generator.generate(imgset) if self.params.format.file_format == "cbf": mask = tuple([a&b for a, b in zip(mask,self.dials_mask)]) if self.spotfinder_mask is None: self.params.spotfinder.lookup.mask = mask else: self.params.spotfinder.lookup.mask = tuple([a&b for a, b in zip(mask,self.spotfinder_mask)]) if self.integration_mask is None: self.params.integration.lookup.mask = mask else: self.params.integration.lookup.mask = tuple([a&b for a, b in zip(mask,self.integration_mask)]) self.debug_write("spotfind_start") try: observed = self.find_spots(datablock) except Exception, e: import traceback; traceback.print_exc() print str(e), "event", timestamp self.debug_write("spotfinding_exception", "fail") return
def run(self): """ Process all images assigned to this thread """ params, options = self.parser.parse_args(show_diff_phil=True) if params.input.experiment is None or params.input.run_num is None or params.input.address is None: raise Usage(self.usage) if params.format.file_format == "cbf": if params.format.cbf.detz_offset is None: raise Usage(self.usage) elif params.format.file_format == "pickle": if params.format.pickle.cfg is None: raise Usage(self.usage) else: raise Usage(self.usage) if not os.path.exists(params.output.output_dir): raise Sorry("Output path not found:" + params.output.output_dir) # Save the paramters self.params = params self.options = options from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.Get_rank() # each process in MPI has a unique id, 0-indexed size = comm.Get_size() # size: number of processes running in this job # set up psana if params.format.file_format == "pickle": psana.setConfigFile(params.format.pickle.cfg) dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment, params.input.run_num) ds = psana.DataSource(dataset_name) if params.format.file_format == "cbf": src = psana.Source("DetInfo(%s)" % params.input.address) psana_det = psana.Detector(params.input.address, ds.env()) # set this to sys.maxint to analyze all events if params.dispatch.max_events is None: max_events = sys.maxint else: max_events = params.dispatch.max_events for run in ds.runs(): if params.format.file_format == "cbf": # load a header only cspad cbf from the slac metrology base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(run, params.input.address) if base_dxtbx is None: raise Sorry("Couldn't load calibration file for run %d" % run.run()) if params.format.cbf.gain_mask_value is not None: gain_mask = psana_det.gain_mask(gain=params.format.cbf.gain_mask_value) # list of all events times = run.times() nevents = min(len(times), max_events) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [times[i] for i in xrange(nevents) if (i + rank) % size == 0] for i in xrange(len(mytimes)): evt = run.event(mytimes[i]) id = evt.get(psana.EventId) print "Event #", i, " has id:", id timestamp = cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt)) # human readable format if timestamp is None: print "No timestamp, skipping shot" continue t = timestamp s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[17:19] + t[20:23] print "Processing shot", s if params.format.file_format == "pickle": if evt.get("skip_event"): print "Skipping event", id continue # the data needs to have already been processed and put into the event by psana data = evt.get(params.format.pickle.out_key) if data is None: print "No data" continue # set output paths according to the templates path = os.path.join(params.output.output_dir, "shot-" + s + ".pickle") print "Saving", path easy_pickle.dump(path, data) elif params.format.file_format == "cbf": # get numpy array, 32x185x388 data = psana_det.calib(evt) # applies psana's complex run-dependent calibrations if params.format.cbf.gain_mask_value is not None: # apply gain mask data *= gain_mask distance = cspad_tbx.env_distance(params.input.address, run.env(), params.format.cbf.detz_offset) if distance is None: print "No distance, skipping shot" continue if self.params.format.cbf.override_energy is None: wavelength = cspad_tbx.evt_wavelength(evt) if wavelength is None: print "No wavelength, skipping shot" continue else: wavelength = 12398.4187 / self.params.format.cbf.override_energy # stitch together the header, data and metadata into the final dxtbx format object cspad_img = cspad_cbf_tbx.format_object_from_data( base_dxtbx, data, distance, wavelength, timestamp, params.input.address ) path = os.path.join(params.output.output_dir, "shot-" + s + ".cbf") print "Saving", path # write the file import pycbf cspad_img._cbf_handle.write_widefile( path, pycbf.CBF, pycbf.MIME_HEADERS | pycbf.MSG_DIGEST | pycbf.PAD_4K, 0 ) run.end() ds.end()
def event(self, evt, env): """The event() function is called for every L1Accept transition. XXX more? Previously, common-mode correction was applied only after initial threshold filtering. Since the common_mode class applies the (lengthy) common-mode correction immediately after reading the image from the stream, this optimisation is currently not (elegantly) doable. @param evt Event data object, a configure object @param env Environment object """ super(mod_hitfind, self).event(evt, env) if (evt.get("skip_event")): return # This module only applies to detectors for which a distance is # available. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return device = cspad_tbx.address_split(self.address)[2] # ***** HITFINDING ***** XXX For hitfinding it may be interesting # to look at the fraction of subzero pixels in the dark-corrected # image. if (self.m_threshold is not None): # If a threshold value is given it can be applied in one of three ways: # 1. Apply it over the whole image if (self.m_roi is None and self.m_distl_min_peaks is None): vmax = flex.max(self.cspad_img) if (vmax < self.m_threshold): if not self.m_negate_hits: # Tell downstream modules to skip this event if the threshold was not met. evt.put(skip_event_flag(), "skip_event") return elif self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return # 2. Apply threshold over a rectangular region of interest. elif (self.m_roi is not None): vmax = flex.max(self.cspad_img[self.m_roi[2]:self.m_roi[3], self.m_roi[0]:self.m_roi[1]]) if (vmax < self.m_threshold): if not self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return elif self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return # 3. Determine the spotfinder spots within the central ASICS, and accept the # image as a hit if there are m_distl_min_peaks exceeding m_threshold. # As a further requirement, the peaks must exceed 2.5 * the 90-percentile # pixel value of the central ASICS. This filter was added to avoid high-background # false positives. elif (self.m_distl_min_peaks is not None): if device == 'marccd': self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0] self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1] elif device == 'Rayonix': self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0] self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1] peak_heights,outvalue = self.distl_filter( self.address, self.cspad_img.iround(), # XXX correct? distance, self.timestamp, self.wavelength) if ('permissive' in self.m_distl_flags): number_of_accepted_peaks = (peak_heights > self.m_threshold).count(True) else: number_of_accepted_peaks = ((peak_heights > self.m_threshold).__and__(outvalue==0)).count(True) sec,ms = cspad_tbx.evt_time(evt) evt_time = sec + ms/1000 self.stats_logger.info("BRAGG %.3f %d" %(evt_time, number_of_accepted_peaks)) skip_event = False if number_of_accepted_peaks < self.m_distl_min_peaks: self.logger.info("Subprocess %02d: Spotfinder NO HIT image #%05d @ %s; %d spots > %d" %( env.subprocess(), self.nshots, self.timestamp, number_of_accepted_peaks, self.m_threshold)) if not self.m_negate_hits: skip_event = True else: self.logger.info("Subprocess %02d: Spotfinder YES HIT image #%05d @ %s; %d spots > %d" %( env.subprocess(), self.nshots, self.timestamp, number_of_accepted_peaks, self.m_threshold)) if self.m_negate_hits: skip_event = True if skip_event: if self.m_db_logging: # log misses to the database self.queue_entry((self.trial, evt.run(), "%.3f"%evt_time, number_of_accepted_peaks, distance, self.sifoil, self.wavelength, False, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, self.m_db_tags)) evt.put(skip_event_flag(), "skip_event") return # the indexer will log this hit when it is ran. Bug: if the spotfinder is ran by itself, this # hit will not be logged in the db. evt.put(number_of_accepted_peaks, 'sfspots') self.logger.info("Subprocess %02d: process image #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) # See r17537 of mod_average.py. if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': pixel_size = evt.get("marccd_pixel_size") saturated_value = evt.get("marccd_saturated_value") elif device == 'Rayonix': pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size) saturated_value = rayonix_tbx.rayonix_saturated_value d = cspad_tbx.dpack( active_areas=self.active_areas, address=self.address, beam_center_x=pixel_size * self.beam_center[0], beam_center_y=pixel_size * self.beam_center[1], data=self.cspad_img.iround(), # XXX ouch! distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=self.timestamp, wavelength=self.wavelength, xtal_target=self.m_xtal_target) if (self.m_dispatch == "index"): import sys from xfel.cxi.integrate_image_api import integrate_one_image info = integrate_one_image(d, integration_dirname = self.m_integration_dirname, integration_basename = self.m_integration_basename) sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ indexed = info is not None if indexed and self.m_progress_logging: # integration pickle dictionary is available here as info.last_saved_best if info.last_saved_best["identified_isoform"] is not None: #print info.last_saved_best.keys() from cxi_xdr_xes.cftbx.cspad_ana import db dbobj = db.dbconnect(self.m_db_host, self.m_db_name, self.m_db_user, self.m_db_password) cursor = dbobj.cursor() if info.last_saved_best["identified_isoform"] in self.isoforms: PM, indices, miller_id = self.isoforms[info.last_saved_best["identified_isoform"]] else: from xfel.xpp.progress_support import progress_manager PM = progress_manager(info.last_saved_best,self.m_db_experiment_tag, self.m_trial_id, self.m_rungroup_id, evt.run()) indices, miller_id = PM.get_HKL(cursor) # cache these as they don't change for a given isoform self.isoforms[info.last_saved_best["identified_isoform"]] = PM, indices, miller_id if self.m_sql_buffer_size > 1: self.queue_progress_entry(PM.scale_frame_detail(self.timestamp,cursor,do_inserts=False)) else: PM.scale_frame_detail(self.timestamp,cursor,do_inserts=True) dbobj.commit() cursor.close() dbobj.close() if self.m_db_logging: sec,ms = cspad_tbx.evt_time(evt) evt_time = sec + ms/1000 sfspots = evt.get('sfspots') if sfspots is None: if indexed: n_spots = len(info.spotfinder_results.images[info.frames[0]]['spots_total']) else: n_spots = 0 else: n_spots = sfspots if indexed: mosaic_bloc_rotation = info.last_saved_best.get('ML_half_mosaicity_deg', [0])[0] mosaic_block_size = info.last_saved_best.get('ML_domain_size_ang', [0])[0] ewald_proximal_volume = info.last_saved_best.get('ewald_proximal_volume', [0])[0] obs = info.last_saved_best['observations'][0] cell_a, cell_b, cell_c, cell_alpha, cell_beta, cell_gamma = obs.unit_cell().parameters() pointgroup = info.last_saved_best['pointgroup'] resolution = obs.d_min() else: mosaic_bloc_rotation = mosaic_block_size = ewald_proximal_volume = cell_a = cell_b = cell_c = \ cell_alpha = cell_beta = cell_gamma = spacegroup = resolution = 0 self.queue_entry((self.trial, evt.run(), "%.3f"%evt_time, n_spots, distance, self.sifoil, self.wavelength, indexed, mosaic_bloc_rotation, mosaic_block_size, ewald_proximal_volume, pointgroup, cell_a, cell_b, cell_c, cell_alpha, cell_beta, cell_gamma, resolution, self.m_db_tags)) if (not indexed): evt.put(skip_event_flag(), "skip_event") return elif (self.m_dispatch == "nop"): pass elif (self.m_dispatch == "view"): #interactive image viewer args = ["indexing.data=dummy"] detector_format_version = detector_format_function( self.address, evt.GetTime()) if detector_format_version is not None: args += ["distl.detector_format_version=%" % detector_format_version] from xfel.phil_preferences import load_cxi_phil horizons_phil = load_cxi_phil(self.m_xtal_target, args) horizons_phil.indexing.data = d from xfel.cxi import display_spots display_spots.parameters.horizons_phil = horizons_phil display_spots.wrapper_of_callback().display(horizons_phil.indexing.data) elif (self.m_dispatch == "spots"): #interactive spotfinder viewer args = ["indexing.data=dummy"] detector_format_version = detector_format_function( self.address, evt.GetTime()) if detector_format_version is not None: args += ["distl.detector_format_version=%s" % detector_format_version] from xfel.phil_preferences import load_cxi_phil horizons_phil = load_cxi_phil(self.m_xtal_target, args) horizons_phil.indexing.data = d from xfel.cxi import display_spots display_spots.parameters.horizons_phil = horizons_phil from rstbx.new_horizons.index import pre_indexing_validation,pack_names pre_indexing_validation(horizons_phil) imagefile_arguments = pack_names(horizons_phil) horizons_phil.persist.show() from spotfinder.applications import signal_strength info = signal_strength.run_signal_strength_core(horizons_phil,imagefile_arguments) work = display_spots.wrapper_of_callback(info) work.display_with_callback(horizons_phil.indexing.data) elif (self.m_dispatch == "write_dict"): self.logger.warning( "event(): deprecated dispatch 'write_dict', use mod_dump instead") if (self.m_out_dirname is not None or self.m_out_basename is not None): cspad_tbx.dwritef(d, self.m_out_dirname, self.m_out_basename) # Diagnostic message emitted only when all the processing is done. if (env.subprocess() >= 0): self.logger.info("Subprocess %02d: accepted #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) else: self.logger.info("Accepted #%05d @ %s" % (self.nshots, self.timestamp))
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ super(mod_radial_average, self).event(evt, env) if (evt.get("skip_event")): return # This module only applies to detectors for which a distance is # available. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return # See r17537 of mod_average.py. device = cspad_tbx.address_split(self.address)[2] if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': pixel_size = 0.079346 saturated_value = 2**16 - 1 elif device == 'Rayonix': pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size) saturated_value = rayonix_tbx.rayonix_saturated_value d = cspad_tbx.dpack( active_areas=self.active_areas, address=self.address, beam_center_x=pixel_size * self.beam_center[0], beam_center_y=pixel_size * self.beam_center[1], data=self.cspad_img.iround(), # XXX ouch! distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=self.timestamp, wavelength=self.wavelength, xtal_target=self.m_xtal_target) from xfel.command_line.radial_average import run args = [ "file_path=XTC stream", "xfel_target=%s" % self.m_xtal_target, "verbose=False" ] t = self.timestamp s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[17:19] + t[ 20:23] if self._dirname is not None: dest_path = os.path.join(self._dirname, self._basename + s + ".txt") args.append("output_file=%s" % dest_path) self.logger.info("Calculating radial average for image %s" % s) xvals, results = run(args, d) evt.put(xvals, "cctbx.xfel.radial_average.xvals") evt.put(results, "cctbx.xfel.radial_average.results") def get_closest_idx(data, val): from scitbx.array_family import flex deltas = flex.abs(data - val) return flex.first_index(deltas, flex.min(deltas)) if self._two_theta_low is not None: i_low = results[get_closest_idx(xvals, self._two_theta_low)] evt.put(i_low, "cctbx.xfel.radial_average.two_theta_low") if self._two_theta_high is not None: i_high = results[get_closest_idx(xvals, self._two_theta_high)] evt.put(i_high, "cctbx.xfel.radial_average.two_theta_high")
def __init__(self, dataset_name = None, detector_address = None, data_type = 'xtc', mask_path = None, mask_angles = None, mask_widths = None, backimg_path = None, backmsk_path = None, param_path = None, det_dist = None, det_pix = 0.075, beam_l = None, mask_thr = None, nQ = None, nPhi = None, dQ = 1, dPhi = 1, cent0 = None, r_max = None, dr = 10, dx = 5, dy = 5, r_0 = None, q_bound = None): """The fluctuation scattering class stores processing parameters, initiates mask and background data and retrieves 2D images from events. Processing options of the 2D images include: * Transform from cartesian to polar coordinates * Beam center refinement * Dynamic masking * Normalization % SAXS calculation * Particle sizing * Computation of in-frame 2-point angular auto-correlations using FFTs @param dataset_name Experiment name and run number @param detector_address Adress to back or front detector @param data_type Type of data file format (xtc, ffb, h5) @param mask_path Full path to static image mask @param mask_angles Center of angluar slices (deg) that should be masked out (due to jet streaks etc), [Ang1 Ang2 ...] @param mask_widths Width of angular slices (deg) that should be masked out (due to jet streaks etc), [delta1 delta2 ...] @param backimg_path Full path to background image @param backmsk_path Full path to background mask @param param_path Full path to file with pre-computed parameters (i.e beam center,particle nr,particle size) @param det_dist Override of detecor distance (in mm) @param det_pix Pixel size (in mm) @param beam_l Override of beam wavelength (in Angstrom) @param mask_thr Threshold for dynamic masking @param nQ Number of Q-bins to consider (in pixels) @param nPhi Number of Phi-bins to consider (in pixels) @param dQ Stepsize in Q (in pixels) @param dPhi Stepsize in Phi (in pixels) @param cent0 Initial beam center coordinates [xc,yc] @param r_max Maximum radial value to use for beamcenter refinement (in pixels) @param dr Stepsize in r (in pixels) @param dx Gridsize for beam center refinement in x, i.e xc+/-dx (in pixels) @param dy Gridsize for beam center refinement in y, i.e yc+/-dy (in pixles) @param r_0 Starting value for particle radius refinement [in Ang] @param q_bound Upper and Lower boundaries of q for Particle radius refinement [in Ang^-1] """ # Initialize parameters and configuration files once self.data_type = data_type self.dataset_name = dataset_name self.detector_address = detector_address if (self.data_type == 'xtc') or (self.data_type == 'ffb') : self.ds = DataSource(self.dataset_name) self.src = Detector(self.detector_address, self.ds.env()) if mask_path is None : # Create a binary mask of ones, default mask only works for xtc/ffb for run in self.ds.runs(): times = run.times() evt = run.event(times[0]) break mask_address = self.src.mask(run.run(),calib=True,status=True,edges=True,central=True,unbond=True,unbondnbrs=True) self.msk = self.src.image(evt,mask_address) else : self.msk = np.loadtxt(mask_path).astype(np.float64) if backimg_path is None : self.backimg = None else : self.backimg = np.loadtxt(backimg_path).astype(np.float64) if backmsk_path is None : self.backmsk = None else : self.backmsk = np.loadtxt(backmsk_path).astype(np.float64) if param_path is None : self.param = [] if det_dist is None : # Get detector distance from events for run in self.ds.runs(): self.det_dist = cspad_tbx.env_distance(self.detector_address, run.env(), 577) else : self.det_dist = det_dist self.det_pix = det_pix if beam_l is None : # Get wavelength from event, note it change slightly between events for run in self.ds.runs(): times = run.times() evt = run.event(times[0]) break self.beam_l = cspad_tbx.evt_wavelength(evt) else : self.beam_l = beam_l if mask_thr is None : # No dynamic masking self.thr = None else : self.thr = mask_thr if nQ is None : # Use image dimensions as a guide, leave room for offset beamC if self.msk.shape[0] > self.msk.shape[1] : self.nQ = int(self.msk.shape[1]/2)-20 else : self.nQ = int(self.msk.shape[0]/2)-20 else : self.nQ = nQ if (self.nQ % 10): # Ascert even number, speeds things up massively for FFT self.nQ = np.floor(self.nQ/10)*10 if (self.nQ % dQ): # Ascert clean divisor self.nQ = np.floor(self.nQ/dQ)*dQ if nPhi is None : # Estimate based on 2*pi*nQ self.nPhi = np.ceil(2*np.pi*self.nQ) else : self.nPhi = nPhi if (self.nPhi % 10): # Ascert even number, speeds things up massively for FFT self.nPhi = np.ceil(self.nPhi/10)*10 if (self.nPhi % dPhi): # Ascert clean divisor self.nPhi = np.ceil(self.nPhi/dPhi)*dPhi self.dQ = dQ self.dPhi = dPhi self.mask_angles = mask_angles self.mask_widths = mask_widths # Compute slices that should be masked in static mask if (self.mask_angles is not None) and (self.mask_widths is not None) : self.mask_angles = (self.mask_angles/360) * self.nPhi self.mask_widths = (self.mask_widths/360) * self.nPhi if (cent0 is None) or (sum(cent0) == 0): # Use center of gravity to estimate starting beamC self.cent0 = [int(round(self.msk.shape[1]/2)) , int(round(self.msk.shape[0]/2))] else : self.cent0 = cent0 self.cent = self.cent0 # Default center if r_max is None : # Default, Use half of nQ self.r_max = int(self.nQ/2) else : self.r_max = r_max if (self.r_max % dr): # Ascert clean divisor self.r_max = np.floor(self.r_max/dr)*dr self.dr = dr self.dx = dx self.dy = dy if r_0 is None : self.radius = 0 self.score = 0 self.r_0 = r_0 if q_bound is None or sum(q_bound)==0 : self.q_bound = [None,None] else : self.q_bound = [None,self.q_bound] # Compute q-spacing self.q = np.arange(0, self.nQ, self.dQ) self.q = self.q*self.det_pix/self.det_dist*4*np.pi/self.beam_l/2 # Compute Phi (Not accounting for curvature) self.phi = np.linspace(0, 2*np.pi, self.nPhi/self.dPhi,endpoint=False)
def _detector(self, index=None): import psana from xfel.cftbx.detector.cspad_cbf_tbx import read_slac_metrology from dxtbx.model import Detector from scitbx.matrix import col from dxtbx.model import ParallaxCorrectedPxMmStrategy from xfel.cxi.cspad_ana.cspad_tbx import env_distance if index is None: index = 0 self._env = self._ds.env() # XXX should be run specific assert len(self.params.detector_address) == 1 self._det = psana.Detector(self.params.detector_address[0], self._env) geom = self._det.pyda.geoaccess(self._get_event(index).run()) cob = read_slac_metrology(geometry=geom, include_asic_offset=True) distance = env_distance(self.params.detector_address[0], self._env, self.params.cspad.detz_offset) d = Detector() pg0 = d.hierarchy() # first deal with D0 det_num = 0 origin = col((cob[(0, )] * col((0, 0, 0, 1)))[0:3]) fast = col((cob[(0, )] * col((1, 0, 0, 1)))[0:3]) - origin slow = col((cob[(0, )] * col((0, 1, 0, 1)))[0:3]) - origin origin += col((0., 0., -distance)) pg0.set_local_frame(fast.elems, slow.elems, origin.elems) pg0.set_name('D%d' % (det_num)) for quad_num in xrange(4): # Now deal with Qx pg1 = pg0.add_group() origin = col((cob[(0, quad_num)] * col((0, 0, 0, 1)))[0:3]) fast = col((cob[(0, quad_num)] * col((1, 0, 0, 1)))[0:3]) - origin slow = col((cob[(0, quad_num)] * col((0, 1, 0, 1)))[0:3]) - origin pg1.set_local_frame(fast.elems, slow.elems, origin.elems) pg1.set_name('D%dQ%d' % (det_num, quad_num)) for sensor_num in xrange(8): # Now deal with Sy pg2 = pg1.add_group() origin = col((cob[(0, quad_num, sensor_num)] * col( (0, 0, 0, 1)))[0:3]) fast = col((cob[(0, quad_num, sensor_num)] * col( (1, 0, 0, 1)))[0:3]) - origin slow = col((cob[(0, quad_num, sensor_num)] * col( (0, 1, 0, 1)))[0:3]) - origin pg2.set_local_frame(fast.elems, slow.elems, origin.elems) pg2.set_name('D%dQ%dS%d' % (det_num, quad_num, sensor_num)) # Now deal with Az for asic_num in xrange(2): val = 'ARRAY_D0Q%dS%dA%d' % (quad_num, sensor_num, asic_num) p = pg2.add_panel() origin = col( (cob[(0, quad_num, sensor_num, asic_num)] * col( (0, 0, 0, 1)))[0:3]) fast = col((cob[(0, quad_num, sensor_num, asic_num)] * col( (1, 0, 0, 1)))[0:3]) - origin slow = col((cob[(0, quad_num, sensor_num, asic_num)] * col( (0, 1, 0, 1)))[0:3]) - origin p.set_local_frame(fast.elems, slow.elems, origin.elems) p.set_pixel_size( (cspad_cbf_tbx.pixel_size, cspad_cbf_tbx.pixel_size)) p.set_image_size(cspad_cbf_tbx.asic_dimension) p.set_trusted_range((cspad_tbx.cspad_min_trusted_value, cspad_tbx.cspad_saturated_value)) p.set_name(val) try: beam = self._beam(index) except Exception: print( 'No beam object initialized. Returning CSPAD detector without parallax corrections' ) return d # take into consideration here the thickness of the sensor also the # wavelength of the radiation (which we have in the same file...) wavelength = beam.get_wavelength() thickness = 0.5 # mm, see Hart et al. 2012 from cctbx.eltbx import attenuation_coefficient table = attenuation_coefficient.get_table("Si") # mu_at_angstrom returns cm^-1 mu = table.mu_at_angstrom(wavelength) / 10.0 # mu: mm^-1 t0 = thickness for panel in d: panel.set_px_mm_strategy(ParallaxCorrectedPxMmStrategy(mu, t0)) return d
def process_event(self, run, timestamp): """ Process a single event from a run @param run psana run object @param timestamp psana timestamp object """ ts = cspad_tbx.evt_timestamp((timestamp.seconds(),timestamp.nanoseconds()/1e6)) if ts is None: print "No timestamp, skipping shot" return if len(self.params_cache.debug.event_timestamp) > 0 and ts not in self.params_cache.debug.event_timestamp: return if self.params_cache.debug.skip_processed_events or self.params_cache.debug.skip_unprocessed_events or self.params_cache.debug.skip_bad_events: if ts in self.known_events: if self.known_events[ts] == "unknown": if self.params_cache.debug.skip_bad_events and self.known_events[ts] == "unknown": print "Skipping event %s: possibly caused an unknown exception previously"%ts return elif self.params_cache.debug.skip_processed_events: print "Skipping event %s: processed successfully previously"%ts return else: if self.params_cache.debug.skip_unprocessed_events: print "Skipping event %s: not processed previously"%ts return print "Accepted", ts self.debug_file_handle.write("%s,%s"%(socket.gethostname(), ts)) self.params = copy.deepcopy(self.params_cache) evt = run.event(timestamp) id = evt.get(psana.EventId) if evt.get("skip_event"): print "Skipping event",id self.debug_file_handle.write(",psana_skip\n") return # the data needs to have already been processed and put into the event by psana if self.params.format.file_format == 'cbf': # get numpy array, 32x185x388 data = self.psana_det.calib(evt) # applies psana's complex run-dependent calibrations if data is None: print "No data" self.debug_file_handle.write(",no_data\n") return if self.params.format.cbf.gain_mask_value is not None: # apply gain mask data *= self.gain_mask distance = cspad_tbx.env_distance(self.params.input.address, run.env(), self.params.format.cbf.detz_offset) if distance is None: print "No distance, skipping shot" self.debug_file_handle.write(",no_distance\n") return if self.params.format.cbf.override_energy is None: wavelength = cspad_tbx.evt_wavelength(evt) if wavelength is None: print "No wavelength, skipping shot" self.debug_file_handle.write(",no_wavelength\n") return else: wavelength = 12398.4187/self.params.format.cbf.override_energy if self.params.format.file_format == 'pickle': image_dict = evt.get(self.params.format.pickle.out_key) data = image_dict['DATA'] timestamp = t = ts s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[17:19] + t[20:23] print "Processing shot", s if self.params.format.file_format == 'cbf': # stitch together the header, data and metadata into the final dxtbx format object cspad_img = cspad_cbf_tbx.format_object_from_data(self.base_dxtbx, data, distance, wavelength, timestamp, self.params.input.address) elif self.params.format.file_format == 'pickle': from dxtbx.format.FormatPYunspecifiedStill import FormatPYunspecifiedStillInMemory cspad_img = FormatPYunspecifiedStillInMemory(image_dict) cspad_img.timestamp = s if self.params.dispatch.dump_all: self.save_image(cspad_img, self.params, os.path.join(self.params.output.output_dir, "shot-" + s)) self.cache_ranges(cspad_img, self.params) imgset = MemImageSet([cspad_img]) datablock = DataBlockFactory.from_imageset(imgset)[0] # before calling DIALS for processing, set output paths according to the templates if self.indexed_filename_template is not None and "%s" in self.indexed_filename_template: self.params.output.indexed_filename = os.path.join(self.params.output.output_dir, self.indexed_filename_template%("idx-" + s)) if "%s" in self.refined_experiments_filename_template: self.params.output.refined_experiments_filename = os.path.join(self.params.output.output_dir, self.refined_experiments_filename_template%("idx-" + s)) if "%s" in self.integrated_filename_template: self.params.output.integrated_filename = os.path.join(self.params.output.output_dir, self.integrated_filename_template%("idx-" + s)) # if border is requested, generate a border only mask if self.params.border_mask.border > 0: from dials.command_line.generate_mask import MaskGenerator generator = MaskGenerator(self.params.border_mask) mask = generator.generate(imgset) self.params.spotfinder.lookup.mask = mask try: observed = self.find_spots(datablock) except Exception, e: import traceback; traceback.print_exc() print str(e), "event", timestamp self.debug_file_handle.write(",spotfinding_exception\n") return
def __init__(self, dataset_name = None, detector_address = None, data_type = 'idx', mask_path = None, mask_angles = None, mask_widths = None, backimg_path = None, backmsk_path = None, geom_path = None, det_dist = None, det_pix = 0.075, beam_l = None, mask_thr = None, nQ = None, nPhi = None, dQ = 1, dPhi = 1, cent0 = None, r_max = None, dr = None, dx = None, dy = None, r_0 = None, q_bound = None, peak = None, dpeak = None): """The fluctuation scattering class stores processing parameters, initiates mask and background data and retrieves 2D images from events. Processing options of the 2D images include: * Transform from cartesian to polar coordinates * Beam center refinement * Dynamic masking * Normalization % SAXS calculation * Particle sizing * Computation of in-frame 2-point angular auto-correlations using FFTs @param dataset_name Experiment name and run number @param detector_address Adress to back or front detector @param data_type Type of data file format (h5 or xtc in the formats: idx|idx_ffb|smd|smd_ffb|h5) @param mask_path Full path to static image mask @param mask_angles Center of angluar slices (deg) that should be masked out (due to jet streaks etc), [Ang1 Ang2 ...] @param mask_widths Width of angular slices (deg) that should be masked out (due to jet streaks etc), [delta1 delta2 ...] @param backimg_path Full path to background image @param backmsk_path Full path to background mask @param geom_path Full path to geometry file (for h5 format) @param det_dist Override of detecor distance (in mm) @param det_pix Pixel size (in mm) @param beam_l Override of beam wavelength (in Angstrom) @param mask_thr Threshold for dynamic masking @param nQ Number of Q-bins to consider (in pixels) @param nPhi Number of Phi-bins to consider (in pixels) @param dQ Stepsize in Q (in pixels) @param dPhi Stepsize in Phi (in pixels) @param cent0 Initial beam center coordinates [xc,yc] @param r_max Maximum radial value to use for beamcenter refinement (in pixels) @param dr Stepsize in r (in pixels) @param dx Gridsize for beam center refinement in x, i.e xc+/-dx (in pixels) @param dy Gridsize for beam center refinement in y, i.e yc+/-dy (in pixles) @param r_0 Starting value for particle radius refinement [in Ang] @param q_bound Upper and Lower boundaries of q for Particle radius refinement [in Ang^-1] @param peak Q-values for peak maxima [q_peak1 q_peak2 ...] @param dpeak Delta Q used for peak integration of peak maxima [delta_q1 delta_q2 ...] """ # Initialize parameters and configuration files once self.data_type = data_type self.dataset_name = dataset_name self.detector_address = detector_address if (self.data_type == 'idx') or (self.data_type == 'idx_ffb') or (self.data_type == 'smd') or (self.data_type == 'smd_ffb') or (self.data_type == 'xtc') : self.ds = DataSource(self.dataset_name) self.src = Detector(self.detector_address, self.ds.env()) if mask_path is None : # Create a binary mask of ones, default mask only works for xtc/ffb evt = self.ds.events().next() self.mask_address = self.src.mask(evt,calib=True,status=True) self.msk = self.src.image(evt,self.mask_address) self.mask = np.copy(self.msk) else: self.msk = np.loadtxt(mask_path) self.mask = np.copy(self.msk) if geom_path is not None : geom = np.genfromtxt(geom_path,skiprows=1) self.gap = geom[0] self.shift = geom[1] self.orient = geom[2 ] self.comm = geom[3] self.param1 = geom[4] self.param2 = geom[5] self.param3 = geom[6] self.param4 = geom[7] if (self.data_type == 'h5'): if mask_path is None : # Create a binary mask of ones ## Add default binary mask here ## Default pnCCD dimensions dim1 = 1024 dim2 = 1024 self.mask = np.ones((dim1,dim2)) else: self.mask = np.loadtxt(mask_path) # Apply geometry self.msk = pnccd_tbx.get_geometry(img = self.mask, gap = self.gap, shift = self.shift, orient = self.orient) if self.detector_address == 'pnccdFront' : evt = self.ds.events().next() gain = self.src.gain(evt) self.gain = self.src.image(evt,gain) self.cart = 0 self.flat = 0 if backimg_path is None : self.backimg = None else : self.backimg = np.loadtxt(backimg_path).astype(np.float64) # Check if background image in cartesian coordinates exists if (self.backimg.shape == self.msk.shape): self.cart = 1 # Remove bg before transform to polar coordinates if backmsk_path is None : self.backmsk = None else : self.backmsk = np.loadtxt(backmsk_path).astype(np.float64) # Check if flat-field image exists if (self.backmsk is None) and (self.backimg is not None) and (self.cart==0): self.pcflat = pnccd_tbx.dynamic_flatfield(self.backimg) self.flat = 1 if det_dist is None : # Get detector distance from events for run in self.ds.runs(): self.det_dist = cspad_tbx.env_distance(self.detector_address, run.env(), 577) else : self.det_dist = det_dist self.det_pix = det_pix if beam_l is None : # Get wavelength from event, note it can change slightly between events. So in the future use average. self.beam_l = cspad_tbx.evt_wavelength(self.ds.events().next()) else : self.beam_l = beam_l if mask_thr is None : # No dynamic masking self.thr = None else : self.thr = mask_thr if nQ is None : # Use image dimensions as a guide, leave room for offset beamC if self.msk.shape[0] > self.msk.shape[1] : # nQ determined by smallest dimension self.nQ = int(self.msk.shape[1]/2)-20 else : self.nQ = int(self.msk.shape[0]/2)-20 else : self.nQ = nQ if (self.nQ % 10): # Ascert even number, speeds things up massively for FFT self.nQ = np.floor(self.nQ/10)*10 if (self.nQ % dQ): # Ascert clean divisor self.nQ = np.floor(self.nQ/dQ)*dQ if nPhi is None : # Estimate based on 2*pi*nQ self.nPhi = np.ceil(2*np.pi*self.nQ) else : self.nPhi = nPhi if (self.nPhi % 10): # Ascert even number, speeds things up massively for FFT self.nPhi = np.ceil(self.nPhi/10)*10 if (self.nPhi % dPhi): # Ascert clean divisor self.nPhi = np.ceil(self.nPhi/dPhi)*dPhi self.dQ = dQ self.dPhi = dPhi self.mask_angles = mask_angles self.mask_widths = mask_widths # Compute slices that should be masked in static mask if (self.mask_angles is not None) and (self.mask_widths is not None) : self.mask_angles = (self.mask_angles/360) * self.nPhi self.mask_widths = (self.mask_widths/360) * self.nPhi # Check if nQ > smallest dimension/2 then we extend the image with zero values if self.nQ > min(self.msk.shape[0]/2,self.msk.shape[1]/2) : self.msk = pnccd_tbx.extend_image(img = self.msk) self.mask = np.copy(self.msk) if (cent0 is None) or (sum(cent0) == 0): # Use center of gravity to estimate starting beamC self.cent0 = [int(round(self.msk.shape[1]/2)) , int(round(self.msk.shape[0]/2))] else : self.cent0 = cent0 self.cent = self.cent0 # Default center if r_max is None : # Default, Use half of nQ self.r_max = int(self.nQ*(3/4)) else : self.r_max = r_max if (self.r_max % dr): # Ascert clean divisor self.r_max = np.floor(self.r_max/dr)*dr self.dr = dr self.dx = dx self.dy = dy if r_0 is None : self.radius = 0 self.score = 0 self.r_0 = r_0 if q_bound is None or sum(q_bound)==0 : self.q_bound = [None,None] else : self.q_bound = [None,self.q_bound] # Compute q-spacing self.q = np.arange(0, self.nQ, self.dQ) self.q = self.q*self.det_pix/self.det_dist*4*np.pi/self.beam_l/2 # Compute Phi (Not accounting for curvature) self.phi = np.linspace(0, 2*np.pi, self.nPhi/self.dPhi,endpoint=False) # Compute indices for Peak maxima if (peak is not None) and (dpeak is not None) : self.peak = peak self.dpeak = dpeak self.ind1 = (self.q >= (self.peak[0] - self.dpeak[0])) & (self.q <= (self.peak[0] + self.dpeak[0]) ) self.ind2 = (self.q >= (self.peak[1] - self.dpeak[1])) & (self.q <= (self.peak[1] + self.dpeak[1]) ) else: self.peak = None self.dpeak = None
def average(argv=None): if argv == None: argv = sys.argv[1:] try: from mpi4py import MPI except ImportError: raise Sorry("MPI not found") command_line = (libtbx.option_parser.option_parser( usage=""" %s [-p] -c config -x experiment -a address -r run -d detz_offset [-o outputdir] [-A averagepath] [-S stddevpath] [-M maxpath] [-n numevents] [-s skipnevents] [-v] [-m] [-b bin_size] [-X override_beam_x] [-Y override_beam_y] [-D xtc_dir] [-f] To write image pickles use -p, otherwise the program writes CSPAD CBFs. Writing CBFs requires the geometry to be already deployed. Examples: cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 Use one process on the current node to process all the events from run 25 of experiment cxi49812, using a detz_offset of 571. mpirun -n 16 cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 As above, using 16 cores on the current node. bsub -a mympi -n 100 -o average.out -q psanaq cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 -o cxi49812 As above, using the psanaq and 100 cores, putting the log in average.out and the output images in the folder cxi49812. """ % libtbx.env.dispatcher_name) .option(None, "--as_pickle", "-p", action="store_true", default=False, dest="as_pickle", help="Write results as image pickle files instead of cbf files") .option(None, "--config", "-c", type="string", default=None, dest="config", metavar="PATH", help="psana config file") .option(None, "--experiment", "-x", type="string", default=None, dest="experiment", help="experiment name (eg cxi84914)") .option(None, "--run", "-r", type="int", default=None, dest="run", help="run number") .option(None, "--address", "-a", type="string", default="CxiDs2.0:Cspad.0", dest="address", help="detector address name (eg CxiDs2.0:Cspad.0)") .option(None, "--detz_offset", "-d", type="float", default=None, dest="detz_offset", help="offset (in mm) from sample interaction region to back of CSPAD detector rail (CXI), or detector distance (XPP)") .option(None, "--outputdir", "-o", type="string", default=".", dest="outputdir", metavar="PATH", help="Optional path to output directory for output files") .option(None, "--averagebase", "-A", type="string", default="{experiment!l}_avg-r{run:04d}", dest="averagepath", metavar="PATH", help="Path to output average image without extension. String substitution allowed") .option(None, "--stddevbase", "-S", type="string", default="{experiment!l}_stddev-r{run:04d}", dest="stddevpath", metavar="PATH", help="Path to output standard deviation image without extension. String substitution allowed") .option(None, "--maxbase", "-M", type="string", default="{experiment!l}_max-r{run:04d}", dest="maxpath", metavar="PATH", help="Path to output maximum projection image without extension. String substitution allowed") .option(None, "--numevents", "-n", type="int", default=None, dest="numevents", help="Maximum number of events to process. Default: all") .option(None, "--skipevents", "-s", type="int", default=0, dest="skipevents", help="Number of events in the beginning of the run to skip. Default: 0") .option(None, "--verbose", "-v", action="store_true", default=False, dest="verbose", help="Print more information about progress") .option(None, "--pickle-optical-metrology", "-m", action="store_true", default=False, dest="pickle_optical_metrology", help="If writing pickle files, use the optical metrology in the experiment's calib directory") .option(None, "--bin_size", "-b", type="int", default=None, dest="bin_size", help="Rayonix detector bin size") .option(None, "--override_beam_x", "-X", type="float", default=None, dest="override_beam_x", help="Rayonix detector beam center x coordinate") .option(None, "--override_beam_y", "-Y", type="float", default=None, dest="override_beam_y", help="Rayonix detector beam center y coordinate") .option(None, "--calib_dir", "-C", type="string", default=None, dest="calib_dir", metavar="PATH", help="calibration directory") .option(None, "--xtc_dir", "-D", type="string", default=None, dest="xtc_dir", metavar="PATH", help="xtc stream directory") .option(None, "--use_ffb", "-f", action="store_true", default=False, dest="use_ffb", help="Use the fast feedback filesystem at LCLS. Only for the active experiment!") ).process(args=argv) if len(command_line.args) > 0 or \ command_line.options.as_pickle is None or \ command_line.options.experiment is None or \ command_line.options.run is None or \ command_line.options.address is None or \ command_line.options.detz_offset is None or \ command_line.options.averagepath is None or \ command_line.options.stddevpath is None or \ command_line.options.maxpath is None or \ command_line.options.pickle_optical_metrology is None: command_line.parser.show_help() return # set this to sys.maxint to analyze all events if command_line.options.numevents is None: maxevents = sys.maxint else: maxevents = command_line.options.numevents comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() if command_line.options.config is not None: psana.setConfigFile(command_line.options.config) dataset_name = "exp=%s:run=%d:idx"%(command_line.options.experiment, command_line.options.run) if command_line.options.xtc_dir is not None: if command_line.options.use_ffb: raise Sorry("Cannot specify the xtc_dir and use SLAC's ffb system") dataset_name += ":dir=%s"%command_line.options.xtc_dir elif command_line.options.use_ffb: # as ffb is only at SLAC, ok to hardcode /reg/d here dataset_name += ":dir=/reg/d/ffb/%s/%s/xtc"%(command_line.options.experiment[0:3],command_line.options.experiment) ds = psana.DataSource(dataset_name) address = command_line.options.address src = psana.Source('DetInfo(%s)'%address) if not command_line.options.as_pickle: psana_det = psana.Detector(address, ds.env()) nevent = np.array([0.]) for run in ds.runs(): runnumber = run.run() # list of all events if command_line.options.skipevents > 0: print "Skipping first %d events"%command_line.options.skipevents times = run.times()[command_line.options.skipevents:] nevents = min(len(times),maxevents) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [times[i] for i in xrange(nevents) if (i+rank)%size == 0] for i in xrange(len(mytimes)): if i%10==0: print 'Rank',rank,'processing event',rank*len(mytimes)+i,', ',i,'of',len(mytimes) evt = run.event(mytimes[i]) #print "Event #",rank*mylength+i," has id:",evt.get(EventId) if 'Rayonix' in command_line.options.address: data = evt.get(Camera.FrameV1,src) if data is None: print "No data" continue data=data.data16().astype(np.float64) elif command_line.options.as_pickle: data = evt.get(psana.ndarray_float64_3, src, 'image0') else: # get numpy array, 32x185x388 data = psana_det.calib(evt) # applies psana's complex run-dependent calibrations if data is None: print "No data" continue d = cspad_tbx.env_distance(address, run.env(), command_line.options.detz_offset) if d is None: print "No distance, skipping shot" continue if 'distance' in locals(): distance += d else: distance = np.array([float(d)]) w = cspad_tbx.evt_wavelength(evt) if w is None: print "No wavelength, skipping shot" continue if 'wavelength' in locals(): wavelength += w else: wavelength = np.array([w]) t = cspad_tbx.evt_time(evt) if t is None: print "No timestamp, skipping shot" continue if 'timestamp' in locals(): timestamp += t[0] + (t[1]/1000) else: timestamp = np.array([t[0] + (t[1]/1000)]) if 'sum' in locals(): sum+=data else: sum=np.array(data, copy=True) if 'sumsq' in locals(): sumsq+=data*data else: sumsq=data*data if 'maximum' in locals(): maximum=np.maximum(maximum,data) else: maximum=np.array(data, copy=True) nevent += 1 #sum the images across mpi cores if size > 1: print "Synchronizing rank", rank totevent = np.zeros(nevent.shape) comm.Reduce(nevent,totevent) if rank == 0 and totevent[0] == 0: raise Sorry("No events found in the run") sumall = np.zeros(sum.shape).astype(sum.dtype) comm.Reduce(sum,sumall) sumsqall = np.zeros(sumsq.shape).astype(sumsq.dtype) comm.Reduce(sumsq,sumsqall) maxall = np.zeros(maximum.shape).astype(maximum.dtype) comm.Reduce(maximum,maxall, op=MPI.MAX) waveall = np.zeros(wavelength.shape).astype(wavelength.dtype) comm.Reduce(wavelength,waveall) distall = np.zeros(distance.shape).astype(distance.dtype) comm.Reduce(distance,distall) timeall = np.zeros(timestamp.shape).astype(timestamp.dtype) comm.Reduce(timestamp,timeall) if rank==0: if size > 1: print "Synchronized" # Accumulating floating-point numbers introduces errors, # which may cause negative variances. Since a two-pass # approach is unacceptable, the standard deviation is # clamped at zero. mean = sumall / float(totevent[0]) variance = (sumsqall / float(totevent[0])) - (mean**2) variance[variance < 0] = 0 stddev = np.sqrt(variance) wavelength = waveall[0] / totevent[0] distance = distall[0] / totevent[0] pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value timestamp = timeall[0] / totevent[0] timestamp = (int(timestamp), timestamp % int(timestamp) * 1000) timestamp = cspad_tbx.evt_timestamp(timestamp) if command_line.options.as_pickle: extension = ".pickle" else: extension = ".cbf" dest_paths = [cspad_tbx.pathsubst(command_line.options.averagepath + extension, evt, ds.env()), cspad_tbx.pathsubst(command_line.options.stddevpath + extension, evt, ds.env()), cspad_tbx.pathsubst(command_line.options.maxpath + extension, evt, ds.env())] dest_paths = [os.path.join(command_line.options.outputdir, path) for path in dest_paths] if 'Rayonix' in command_line.options.address: from xfel.cxi.cspad_ana import rayonix_tbx pixel_size = rayonix_tbx.get_rayonix_pixel_size(command_line.options.bin_size) beam_center = [command_line.options.override_beam_x,command_line.options.override_beam_y] detector_dimensions = rayonix_tbx.get_rayonix_detector_dimensions(command_line.options.bin_size) active_areas = flex.int([0,0,detector_dimensions[0],detector_dimensions[1]]) split_address = cspad_tbx.address_split(address) old_style_address = split_address[0] + "-" + split_address[1] + "|" + split_address[2] + "-" + split_address[3] for data, path in zip([mean, stddev, maxall], dest_paths): print "Saving", path d = cspad_tbx.dpack( active_areas=active_areas, address=old_style_address, beam_center_x=pixel_size * beam_center[0], beam_center_y=pixel_size * beam_center[1], data=flex.double(data), distance=distance, pixel_size=pixel_size, saturated_value=rayonix_tbx.rayonix_saturated_value, timestamp=timestamp, wavelength=wavelength) easy_pickle.dump(path, d) elif command_line.options.as_pickle: split_address = cspad_tbx.address_split(address) old_style_address = split_address[0] + "-" + split_address[1] + "|" + split_address[2] + "-" + split_address[3] xpp = 'xpp' in address.lower() if xpp: evt_time = cspad_tbx.evt_time(evt) # tuple of seconds, milliseconds timestamp = cspad_tbx.evt_timestamp(evt_time) # human readable format from xfel.detector_formats import detector_format_version, reverse_timestamp from xfel.cxi.cspad_ana.cspad_tbx import xpp_active_areas version_lookup = detector_format_version(old_style_address, reverse_timestamp(timestamp)[0]) assert version_lookup is not None active_areas = xpp_active_areas[version_lookup]['active_areas'] beam_center = [1765 // 2, 1765 // 2] else: if command_line.options.calib_dir is not None: metro_path = command_line.options.calib_dir elif command_line.options.pickle_optical_metrology: from xfel.cftbx.detector.cspad_cbf_tbx import get_calib_file_path metro_path = get_calib_file_path(run.env(), address, run) else: metro_path = libtbx.env.find_in_repositories("xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0") sections = parse_calib.calib2sections(metro_path) beam_center, active_areas = cspad_tbx.cbcaa( cspad_tbx.getConfig(address, ds.env()), sections) class fake_quad(object): def __init__(self, q, d): self.q = q self.d = d def quad(self): return self.q def data(self): return self.d if xpp: quads = [fake_quad(i, mean[i*8:(i+1)*8,:,:]) for i in xrange(4)] mean = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads = quads) mean = flex.double(mean.astype(np.float64)) quads = [fake_quad(i, stddev[i*8:(i+1)*8,:,:]) for i in xrange(4)] stddev = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads = quads) stddev = flex.double(stddev.astype(np.float64)) quads = [fake_quad(i, maxall[i*8:(i+1)*8,:,:]) for i in xrange(4)] maxall = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads = quads) maxall = flex.double(maxall.astype(np.float64)) else: quads = [fake_quad(i, mean[i*8:(i+1)*8,:,:]) for i in xrange(4)] mean = cspad_tbx.CsPadDetector( address, evt, ds.env(), sections, quads=quads) mean = flex.double(mean.astype(np.float64)) quads = [fake_quad(i, stddev[i*8:(i+1)*8,:,:]) for i in xrange(4)] stddev = cspad_tbx.CsPadDetector( address, evt, ds.env(), sections, quads=quads) stddev = flex.double(stddev.astype(np.float64)) quads = [fake_quad(i, maxall[i*8:(i+1)*8,:,:]) for i in xrange(4)] maxall = cspad_tbx.CsPadDetector( address, evt, ds.env(), sections, quads=quads) maxall = flex.double(maxall.astype(np.float64)) for data, path in zip([mean, stddev, maxall], dest_paths): print "Saving", path d = cspad_tbx.dpack( active_areas=active_areas, address=old_style_address, beam_center_x=pixel_size * beam_center[0], beam_center_y=pixel_size * beam_center[1], data=data, distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=timestamp, wavelength=wavelength) easy_pickle.dump(path, d) else: # load a header only cspad cbf from the slac metrology from xfel.cftbx.detector import cspad_cbf_tbx import pycbf base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(run, address) if base_dxtbx is None: raise Sorry("Couldn't load calibration file for run %d"%run.run()) for data, path in zip([mean, stddev, maxall], dest_paths): print "Saving", path cspad_img = cspad_cbf_tbx.format_object_from_data(base_dxtbx, data, distance, wavelength, timestamp, address) cspad_img._cbf_handle.write_widefile(path, pycbf.CBF,\ pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)
def average(argv=None): if argv == None: argv = sys.argv[1:] try: from mpi4py import MPI except ImportError: raise Sorry("MPI not found") command_line = (libtbx.option_parser.option_parser(usage=""" %s [-p] -c config -x experiment -a address -r run -d detz_offset [-o outputdir] [-A averagepath] [-S stddevpath] [-M maxpath] [-n numevents] [-s skipnevents] [-v] [-m] [-b bin_size] [-X override_beam_x] [-Y override_beam_y] [-D xtc_dir] [-f] [-g gain_mask_value] [--min] [--minpath minpath] To write image pickles use -p, otherwise the program writes CSPAD CBFs. Writing CBFs requires the geometry to be already deployed. Examples: cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 Use one process on the current node to process all the events from run 25 of experiment cxi49812, using a detz_offset of 571. mpirun -n 16 cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 As above, using 16 cores on the current node. bsub -a mympi -n 100 -o average.out -q psanaq cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 -o cxi49812 As above, using the psanaq and 100 cores, putting the log in average.out and the output images in the folder cxi49812. """ % libtbx.env.dispatcher_name).option( None, "--as_pickle", "-p", action="store_true", default=False, dest="as_pickle", help="Write results as image pickle files instead of cbf files" ).option( None, "--raw_data", "-R", action="store_true", default=False, dest="raw_data", help= "Disable psana corrections such as dark pedestal subtraction or common mode (cbf only)" ).option( None, "--background_pickle", "-B", default=None, dest="background_pickle", help="" ).option( None, "--config", "-c", type="string", default=None, dest="config", metavar="PATH", help="psana config file" ).option( None, "--experiment", "-x", type="string", default=None, dest="experiment", help="experiment name (eg cxi84914)" ).option( None, "--run", "-r", type="int", default=None, dest="run", help="run number" ).option( None, "--address", "-a", type="string", default="CxiDs2.0:Cspad.0", dest="address", help="detector address name (eg CxiDs2.0:Cspad.0)" ).option( None, "--detz_offset", "-d", type="float", default=None, dest="detz_offset", help= "offset (in mm) from sample interaction region to back of CSPAD detector rail (CXI), or detector distance (XPP)" ).option( None, "--outputdir", "-o", type="string", default=".", dest="outputdir", metavar="PATH", help="Optional path to output directory for output files" ).option( None, "--averagebase", "-A", type="string", default="{experiment!l}_avg-r{run:04d}", dest="averagepath", metavar="PATH", help= "Path to output average image without extension. String substitution allowed" ).option( None, "--stddevbase", "-S", type="string", default="{experiment!l}_stddev-r{run:04d}", dest="stddevpath", metavar="PATH", help= "Path to output standard deviation image without extension. String substitution allowed" ).option( None, "--maxbase", "-M", type="string", default="{experiment!l}_max-r{run:04d}", dest="maxpath", metavar="PATH", help= "Path to output maximum projection image without extension. String substitution allowed" ).option( None, "--numevents", "-n", type="int", default=None, dest="numevents", help="Maximum number of events to process. Default: all" ).option( None, "--skipevents", "-s", type="int", default=0, dest="skipevents", help="Number of events in the beginning of the run to skip. Default: 0" ).option( None, "--verbose", "-v", action="store_true", default=False, dest="verbose", help="Print more information about progress" ).option( None, "--pickle-optical-metrology", "-m", action="store_true", default=False, dest="pickle_optical_metrology", help= "If writing pickle files, use the optical metrology in the experiment's calib directory" ).option( None, "--bin_size", "-b", type="int", default=None, dest="bin_size", help="Rayonix detector bin size" ).option( None, "--override_beam_x", "-X", type="float", default=None, dest="override_beam_x", help="Rayonix detector beam center x coordinate" ).option( None, "--override_beam_y", "-Y", type="float", default=None, dest="override_beam_y", help="Rayonix detector beam center y coordinate" ).option( None, "--calib_dir", "-C", type="string", default=None, dest="calib_dir", metavar="PATH", help="calibration directory" ).option( None, "--pickle_calib_dir", "-P", type="string", default=None, dest="pickle_calib_dir", metavar="PATH", help= "pickle calibration directory specification. Replaces --calib_dir functionality." ).option( None, "--xtc_dir", "-D", type="string", default=None, dest="xtc_dir", metavar="PATH", help="xtc stream directory" ).option( None, "--use_ffb", "-f", action="store_true", default=False, dest="use_ffb", help= "Use the fast feedback filesystem at LCLS. Only for the active experiment!" ).option( None, "--gain_mask_value", "-g", type="float", default=None, dest="gain_mask_value", help= "Ratio between low and high gain pixels, if CSPAD in mixed-gain mode. Only used in CBF averaging mode." ).option( None, "--min", None, action="store_true", default=False, dest="do_minimum_projection", help="Output a minimum projection" ).option( None, "--minpath", None, type="string", default="{experiment!l}_min-r{run:04d}", dest="minpath", metavar="PATH", help= "Path to output minimum image without extension. String substitution allowed" )).process(args=argv) if len(command_line.args) > 0 or \ command_line.options.as_pickle is None or \ command_line.options.experiment is None or \ command_line.options.run is None or \ command_line.options.address is None or \ command_line.options.detz_offset is None or \ command_line.options.averagepath is None or \ command_line.options.stddevpath is None or \ command_line.options.maxpath is None or \ command_line.options.pickle_optical_metrology is None: command_line.parser.show_help() return # set this to sys.maxint to analyze all events if command_line.options.numevents is None: maxevents = sys.maxsize else: maxevents = command_line.options.numevents comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() if command_line.options.config is not None: psana.setConfigFile(command_line.options.config) dataset_name = "exp=%s:run=%d:smd" % (command_line.options.experiment, command_line.options.run) if command_line.options.xtc_dir is not None: if command_line.options.use_ffb: raise Sorry("Cannot specify the xtc_dir and use SLAC's ffb system") dataset_name += ":dir=%s" % command_line.options.xtc_dir elif command_line.options.use_ffb: # as ffb is only at SLAC, ok to hardcode /reg/d here dataset_name += ":dir=/reg/d/ffb/%s/%s/xtc" % ( command_line.options.experiment[0:3], command_line.options.experiment) if command_line.options.calib_dir is not None: psana.setOption('psana.calib-dir', command_line.options.calib_dir) ds = psana.DataSource(dataset_name) address = command_line.options.address src = psana.Source('DetInfo(%s)' % address) nevent = np.array([0.]) if command_line.options.background_pickle is not None: background = easy_pickle.load( command_line.options.background_pickle)['DATA'].as_numpy_array() for run in ds.runs(): runnumber = run.run() if not command_line.options.as_pickle: psana_det = psana.Detector(address, ds.env()) # list of all events if command_line.options.skipevents > 0: print("Skipping first %d events" % command_line.options.skipevents) elif "Rayonix" in command_line.options.address: print("Skipping first image in the Rayonix detector" ) # Shuttering issue command_line.options.skipevents = 1 for i, evt in enumerate(run.events()): if i % size != rank: continue if i < command_line.options.skipevents: continue if i >= maxevents: break if i % 10 == 0: print('Rank', rank, 'processing event', i) #print "Event #",rank*mylength+i," has id:",evt.get(EventId) if 'Rayonix' in command_line.options.address or 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address: data = evt.get(psana.Camera.FrameV1, src) if data is None: print("No data") continue data = data.data16().astype(np.float64) elif command_line.options.as_pickle: data = evt.get(psana.ndarray_float64_3, src, 'image0') else: # get numpy array, 32x185x388 from xfel.cftbx.detector.cspad_cbf_tbx import get_psana_corrected_data if command_line.options.raw_data: data = get_psana_corrected_data(psana_det, evt, use_default=False, dark=False, common_mode=None, apply_gain_mask=False, per_pixel_gain=False) else: if command_line.options.gain_mask_value is None: data = get_psana_corrected_data(psana_det, evt, use_default=True) else: data = get_psana_corrected_data( psana_det, evt, use_default=False, dark=True, common_mode=None, apply_gain_mask=True, gain_mask_value=command_line.options. gain_mask_value, per_pixel_gain=False) if data is None: print("No data") continue if command_line.options.background_pickle is not None: data -= background if 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address: distance = np.array([0.0]) wavelength = np.array([1.0]) else: d = cspad_tbx.env_distance(address, run.env(), command_line.options.detz_offset) if d is None: print("No distance, using distance", command_line.options.detz_offset) assert command_line.options.detz_offset is not None if 'distance' not in locals(): distance = np.array([command_line.options.detz_offset]) else: distance += command_line.options.detz_offset else: if 'distance' in locals(): distance += d else: distance = np.array([float(d)]) w = cspad_tbx.evt_wavelength(evt) if w is None: print("No wavelength") if 'wavelength' not in locals(): wavelength = np.array([1.0]) else: if 'wavelength' in locals(): wavelength += w else: wavelength = np.array([w]) t = cspad_tbx.evt_time(evt) if t is None: print("No timestamp, skipping shot") continue if 'timestamp' in locals(): timestamp += t[0] + (t[1] / 1000) else: timestamp = np.array([t[0] + (t[1] / 1000)]) if 'sum' in locals(): sum += data else: sum = np.array(data, copy=True) if 'sumsq' in locals(): sumsq += data * data else: sumsq = data * data if 'maximum' in locals(): maximum = np.maximum(maximum, data) else: maximum = np.array(data, copy=True) if command_line.options.do_minimum_projection: if 'minimum' in locals(): minimum = np.minimum(minimum, data) else: minimum = np.array(data, copy=True) nevent += 1 #sum the images across mpi cores if size > 1: print("Synchronizing rank", rank) totevent = np.zeros(nevent.shape) comm.Reduce(nevent, totevent) if rank == 0 and totevent[0] == 0: raise Sorry("No events found in the run") sumall = np.zeros(sum.shape).astype(sum.dtype) comm.Reduce(sum, sumall) sumsqall = np.zeros(sumsq.shape).astype(sumsq.dtype) comm.Reduce(sumsq, sumsqall) maxall = np.zeros(maximum.shape).astype(maximum.dtype) comm.Reduce(maximum, maxall, op=MPI.MAX) if command_line.options.do_minimum_projection: minall = np.zeros(maximum.shape).astype(minimum.dtype) comm.Reduce(minimum, minall, op=MPI.MIN) waveall = np.zeros(wavelength.shape).astype(wavelength.dtype) comm.Reduce(wavelength, waveall) distall = np.zeros(distance.shape).astype(distance.dtype) comm.Reduce(distance, distall) timeall = np.zeros(timestamp.shape).astype(timestamp.dtype) comm.Reduce(timestamp, timeall) if rank == 0: if size > 1: print("Synchronized") # Accumulating floating-point numbers introduces errors, # which may cause negative variances. Since a two-pass # approach is unacceptable, the standard deviation is # clamped at zero. mean = sumall / float(totevent[0]) variance = (sumsqall / float(totevent[0])) - (mean**2) variance[variance < 0] = 0 stddev = np.sqrt(variance) wavelength = waveall[0] / totevent[0] distance = distall[0] / totevent[0] pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value timestamp = timeall[0] / totevent[0] timestamp = (int(timestamp), timestamp % int(timestamp) * 1000) timestamp = cspad_tbx.evt_timestamp(timestamp) if command_line.options.as_pickle: extension = ".pickle" else: extension = ".cbf" dest_paths = [ cspad_tbx.pathsubst(command_line.options.averagepath + extension, evt, ds.env()), cspad_tbx.pathsubst(command_line.options.stddevpath + extension, evt, ds.env()), cspad_tbx.pathsubst(command_line.options.maxpath + extension, evt, ds.env()) ] if command_line.options.do_minimum_projection: dest_paths.append( cspad_tbx.pathsubst(command_line.options.minpath + extension, evt, ds.env())) dest_paths = [ os.path.join(command_line.options.outputdir, path) for path in dest_paths ] if 'Rayonix' in command_line.options.address: all_data = [mean, stddev, maxall] if command_line.options.do_minimum_projection: all_data.append(minall) from xfel.cxi.cspad_ana import rayonix_tbx pixel_size = rayonix_tbx.get_rayonix_pixel_size( command_line.options.bin_size) beam_center = [ command_line.options.override_beam_x, command_line.options.override_beam_y ] active_areas = flex.int([0, 0, mean.shape[1], mean.shape[0]]) split_address = cspad_tbx.address_split(address) old_style_address = split_address[0] + "-" + split_address[ 1] + "|" + split_address[2] + "-" + split_address[3] for data, path in zip(all_data, dest_paths): print("Saving", path) d = cspad_tbx.dpack( active_areas=active_areas, address=old_style_address, beam_center_x=pixel_size * beam_center[0], beam_center_y=pixel_size * beam_center[1], data=flex.double(data), distance=distance, pixel_size=pixel_size, saturated_value=rayonix_tbx.rayonix_saturated_value, timestamp=timestamp, wavelength=wavelength) easy_pickle.dump(path, d) elif 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address: all_data = [mean, stddev, maxall] split_address = cspad_tbx.address_split(address) old_style_address = split_address[0] + "-" + split_address[ 1] + "|" + split_address[2] + "-" + split_address[3] if command_line.options.do_minimum_projection: all_data.append(minall) for data, path in zip(all_data, dest_paths): d = cspad_tbx.dpack(address=old_style_address, data=flex.double(data), distance=distance, pixel_size=0.1, timestamp=timestamp, wavelength=wavelength) print("Saving", path) easy_pickle.dump(path, d) elif command_line.options.as_pickle: split_address = cspad_tbx.address_split(address) old_style_address = split_address[0] + "-" + split_address[ 1] + "|" + split_address[2] + "-" + split_address[3] xpp = 'xpp' in address.lower() if xpp: evt_time = cspad_tbx.evt_time( evt) # tuple of seconds, milliseconds timestamp = cspad_tbx.evt_timestamp( evt_time) # human readable format from iotbx.detectors.cspad_detector_formats import detector_format_version, reverse_timestamp from xfel.cxi.cspad_ana.cspad_tbx import xpp_active_areas version_lookup = detector_format_version( old_style_address, reverse_timestamp(timestamp)[0]) assert version_lookup is not None active_areas = xpp_active_areas[version_lookup]['active_areas'] beam_center = [1765 // 2, 1765 // 2] else: if command_line.options.pickle_calib_dir is not None: metro_path = command_line.options.pickle_calib_dir elif command_line.options.pickle_optical_metrology: from xfel.cftbx.detector.cspad_cbf_tbx import get_calib_file_path metro_path = get_calib_file_path(run.env(), address, run) else: metro_path = libtbx.env.find_in_repositories( "xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0") sections = parse_calib.calib2sections(metro_path) beam_center, active_areas = cspad_tbx.cbcaa( cspad_tbx.getConfig(address, ds.env()), sections) class fake_quad(object): def __init__(self, q, d): self.q = q self.d = d def quad(self): return self.q def data(self): return self.d if xpp: quads = [ fake_quad(i, mean[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] mean = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads=quads) mean = flex.double(mean.astype(np.float64)) quads = [ fake_quad(i, stddev[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] stddev = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads=quads) stddev = flex.double(stddev.astype(np.float64)) quads = [ fake_quad(i, maxall[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] maxall = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads=quads) maxall = flex.double(maxall.astype(np.float64)) if command_line.options.do_minimum_projection: quads = [ fake_quad(i, minall[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] minall = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads=quads) minall = flex.double(minall.astype(np.float64)) else: quads = [ fake_quad(i, mean[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] mean = cspad_tbx.CsPadDetector(address, evt, ds.env(), sections, quads=quads) mean = flex.double(mean.astype(np.float64)) quads = [ fake_quad(i, stddev[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] stddev = cspad_tbx.CsPadDetector(address, evt, ds.env(), sections, quads=quads) stddev = flex.double(stddev.astype(np.float64)) quads = [ fake_quad(i, maxall[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] maxall = cspad_tbx.CsPadDetector(address, evt, ds.env(), sections, quads=quads) maxall = flex.double(maxall.astype(np.float64)) if command_line.options.do_minimum_projection: quads = [ fake_quad(i, minall[i * 8:(i + 1) * 8, :, :]) for i in range(4) ] minall = cspad_tbx.CsPadDetector(address, evt, ds.env(), sections, quads=quads) minall = flex.double(minall.astype(np.float64)) all_data = [mean, stddev, maxall] if command_line.options.do_minimum_projection: all_data.append(minall) for data, path in zip(all_data, dest_paths): print("Saving", path) d = cspad_tbx.dpack(active_areas=active_areas, address=old_style_address, beam_center_x=pixel_size * beam_center[0], beam_center_y=pixel_size * beam_center[1], data=data, distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=timestamp, wavelength=wavelength) easy_pickle.dump(path, d) else: # load a header only cspad cbf from the slac metrology from xfel.cftbx.detector import cspad_cbf_tbx import pycbf base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology( run, address) if base_dxtbx is None: raise Sorry("Couldn't load calibration file for run %d" % run.run()) all_data = [mean, stddev, maxall] if command_line.options.do_minimum_projection: all_data.append(minall) for data, path in zip(all_data, dest_paths): print("Saving", path) cspad_img = cspad_cbf_tbx.format_object_from_data( base_dxtbx, data, distance, wavelength, timestamp, address, round_to_int=False) cspad_img._cbf_handle.write_widefile(path, pycbf.CBF,\ pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)
def event(self, evt, env): """The event() function is called for every L1Accept transition. XXX more? Previously, common-mode correction was applied only after initial threshold filtering. Since the common_mode class applies the (lengthy) common-mode correction immediately after reading the image from the stream, this optimisation is currently not (elegantly) doable. @param evt Event data object, a configure object @param env Environment object """ super(mod_hitfind, self).event(evt, env) if (evt.get("skip_event")): return # This module only applies to detectors for which a distance is # available. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return device = cspad_tbx.address_split(self.address)[2] # ***** HITFINDING ***** XXX For hitfinding it may be interesting # to look at the fraction of subzero pixels in the dark-corrected # image. if (self.m_threshold is not None): # If a threshold value is given it can be applied in one of three ways: # 1. Apply it over the whole image if (self.m_roi is None and self.m_distl_min_peaks is None): vmax = flex.max(self.cspad_img) if (vmax < self.m_threshold): if not self.m_negate_hits: # Tell downstream modules to skip this event if the threshold was not met. evt.put(skip_event_flag(), "skip_event") return elif self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return # 2. Apply threshold over a rectangular region of interest. elif (self.m_roi is not None): vmax = flex.max(self.cspad_img[self.m_roi[2]:self.m_roi[3], self.m_roi[0]:self.m_roi[1]]) if (vmax < self.m_threshold): if not self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return elif self.m_negate_hits: evt.put(skip_event_flag(), "skip_event") return # 3. Determine the spotfinder spots within the central ASICS, and accept the # image as a hit if there are m_distl_min_peaks exceeding m_threshold. # As a further requirement, the peaks must exceed 2.5 * the 90-percentile # pixel value of the central ASICS. This filter was added to avoid high-background # false positives. elif (self.m_distl_min_peaks is not None): if device == 'marccd': self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0] self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1] elif device == 'Rayonix': self.hitfinder_d['BEAM_CENTER_X'] = self.beam_center[0] self.hitfinder_d['BEAM_CENTER_Y'] = self.beam_center[1] peak_heights, outvalue = self.distl_filter( self.address, self.cspad_img.iround(), # XXX correct? distance, self.timestamp, self.wavelength) if ('permissive' in self.m_distl_flags): number_of_accepted_peaks = (peak_heights > self.m_threshold).count(True) else: number_of_accepted_peaks = (( peak_heights > self.m_threshold).__and__( outvalue == 0)).count(True) sec, ms = cspad_tbx.evt_time(evt) evt_time = sec + ms / 1000 self.stats_logger.info("BRAGG %.3f %d" % (evt_time, number_of_accepted_peaks)) skip_event = False if number_of_accepted_peaks < self.m_distl_min_peaks: self.logger.info( "Subprocess %02d: Spotfinder NO HIT image #%05d @ %s; %d spots > %d" % (env.subprocess(), self.nshots, self.timestamp, number_of_accepted_peaks, self.m_threshold)) if not self.m_negate_hits: skip_event = True else: self.logger.info( "Subprocess %02d: Spotfinder YES HIT image #%05d @ %s; %d spots > %d" % (env.subprocess(), self.nshots, self.timestamp, number_of_accepted_peaks, self.m_threshold)) if self.m_negate_hits: skip_event = True if skip_event: if self.m_db_logging: # log misses to the database self.queue_entry( (self.trial, evt.run(), "%.3f" % evt_time, number_of_accepted_peaks, distance, self.sifoil, self.wavelength, False, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, self.m_db_tags)) evt.put(skip_event_flag(), "skip_event") return # the indexer will log this hit when it is ran. Bug: if the spotfinder is ran by itself, this # hit will not be logged in the db. evt.put(number_of_accepted_peaks, 'sfspots') self.logger.info("Subprocess %02d: process image #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) # See r17537 of mod_average.py. if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'marccd': pixel_size = evt.get("marccd_pixel_size") saturated_value = evt.get("marccd_saturated_value") elif device == 'Rayonix': pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size) saturated_value = rayonix_tbx.rayonix_saturated_value d = cspad_tbx.dpack( active_areas=self.active_areas, address=self.address, beam_center_x=pixel_size * self.beam_center[0], beam_center_y=pixel_size * self.beam_center[1], data=self.cspad_img.iround(), # XXX ouch! distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=self.timestamp, wavelength=self.wavelength, xtal_target=self.m_xtal_target) if (self.m_dispatch == "index"): import sys from xfel.cxi.integrate_image_api import integrate_one_image info = integrate_one_image( d, integration_dirname=self.m_integration_dirname, integration_basename=self.m_integration_basename) sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ indexed = info is not None and hasattr(info, 'spotfinder_results') if self.m_progress_logging: if self.m_db_version == 'v1': if indexed: # integration pickle dictionary is available here as info.last_saved_best if info.last_saved_best[ "identified_isoform"] is not None: #print info.last_saved_best.keys() from cxi_xdr_xes.cftbx.cspad_ana import db dbobj = db.dbconnect(self.m_db_host, self.m_db_name, self.m_db_user, self.m_db_password) cursor = dbobj.cursor() if info.last_saved_best[ "identified_isoform"] in self.isoforms: PM, indices, miller_id = self.isoforms[ info.last_saved_best["identified_isoform"]] else: from xfel.xpp.progress_support import progress_manager PM = progress_manager(info.last_saved_best, self.m_db_experiment_tag, self.m_trial_id, self.m_rungroup_id, evt.run()) indices, miller_id = PM.get_HKL(cursor) # cache these as they don't change for a given isoform self.isoforms[info.last_saved_best[ "identified_isoform"]] = PM, indices, miller_id if self.m_sql_buffer_size > 1: self.queue_progress_entry( PM.scale_frame_detail(self.timestamp, cursor, do_inserts=False)) else: PM.scale_frame_detail(self.timestamp, cursor, do_inserts=True) dbobj.commit() cursor.close() dbobj.close() elif self.m_db_version == 'v2': key_low = 'cctbx.xfel.radial_average.two_theta_low' key_high = 'cctbx.xfel.radial_average.two_theta_high' tt_low = evt.get(key_low) tt_high = evt.get(key_high) from xfel.ui.db.dxtbx_db import log_frame if indexed: n_spots = len(info.spotfinder_results.images[ info.frames[0]]['spots_total']) else: sfspots = evt.get('sfspots') if sfspots is None: if info is None or not isinstance(info, int): n_spots = 0 else: n_spots = info else: n_spots = sfspots if indexed: known_setting = info.horizons_phil.known_setting indexed_setting = info.organizer.info[ 'best_integration']['counter'] if known_setting is None or known_setting == indexed_setting: from xfel.command_line.frame_unpickler import construct_reflection_table_and_experiment_list c = construct_reflection_table_and_experiment_list( info.last_saved_best, None, pixel_size, proceed_without_image=True) c.assemble_experiments() c.assemble_reflections() log_frame(c.experiment_list, c.reflections, self.db_params, evt.run(), n_spots, self.timestamp, tt_low, tt_high) else: print( "Not logging %s, wrong bravais setting (expecting %d, got %d)" % (self.timestamp, known_setting, indexed_setting)) else: log_frame(None, None, self.db_params, evt.run(), n_spots, self.timestamp, tt_low, tt_high) if self.m_db_logging: sec, ms = cspad_tbx.evt_time(evt) evt_time = sec + ms / 1000 sfspots = evt.get('sfspots') if sfspots is None: if indexed: n_spots = len(info.spotfinder_results.images[ info.frames[0]]['spots_total']) else: n_spots = 0 else: n_spots = sfspots if indexed: mosaic_bloc_rotation = info.last_saved_best.get( 'ML_half_mosaicity_deg', [0])[0] mosaic_block_size = info.last_saved_best.get( 'ML_domain_size_ang', [0])[0] ewald_proximal_volume = info.last_saved_best.get( 'ewald_proximal_volume', [0])[0] obs = info.last_saved_best['observations'][0] cell_a, cell_b, cell_c, cell_alpha, cell_beta, cell_gamma = obs.unit_cell( ).parameters() pointgroup = info.last_saved_best['pointgroup'] resolution = obs.d_min() else: mosaic_bloc_rotation = mosaic_block_size = ewald_proximal_volume = cell_a = cell_b = cell_c = \ cell_alpha = cell_beta = cell_gamma = spacegroup = resolution = 0 self.queue_entry( (self.trial, evt.run(), "%.3f" % evt_time, n_spots, distance, self.sifoil, self.wavelength, indexed, mosaic_bloc_rotation, mosaic_block_size, ewald_proximal_volume, pointgroup, cell_a, cell_b, cell_c, cell_alpha, cell_beta, cell_gamma, resolution, self.m_db_tags)) if (not indexed): evt.put(skip_event_flag(), "skip_event") return elif (self.m_dispatch == "nop"): pass elif (self.m_dispatch == "view"): #interactive image viewer args = ["indexing.data=dummy"] detector_format_version = detector_format_function( self.address, evt.GetTime()) if detector_format_version is not None: args += [ "distl.detector_format_version=%" % detector_format_version ] from xfel.phil_preferences import load_cxi_phil horizons_phil = load_cxi_phil(self.m_xtal_target, args) horizons_phil.indexing.data = d from xfel.cxi import display_spots display_spots.parameters.horizons_phil = horizons_phil display_spots.wrapper_of_callback().display( horizons_phil.indexing.data) elif (self.m_dispatch == "spots"): #interactive spotfinder viewer args = ["indexing.data=dummy"] detector_format_version = detector_format_function( self.address, evt.GetTime()) if detector_format_version is not None: args += [ "distl.detector_format_version=%s" % detector_format_version ] from xfel.phil_preferences import load_cxi_phil horizons_phil = load_cxi_phil(self.m_xtal_target, args) horizons_phil.indexing.data = d from xfel.cxi import display_spots display_spots.parameters.horizons_phil = horizons_phil from rstbx.new_horizons.index import pre_indexing_validation, pack_names pre_indexing_validation(horizons_phil) imagefile_arguments = pack_names(horizons_phil) horizons_phil.persist.show() from spotfinder.applications import signal_strength info = signal_strength.run_signal_strength_core( horizons_phil, imagefile_arguments) work = display_spots.wrapper_of_callback(info) work.display_with_callback(horizons_phil.indexing.data) elif (self.m_dispatch == "write_dict"): self.logger.warning( "event(): deprecated dispatch 'write_dict', use mod_dump instead" ) if (self.m_out_dirname is not None or self.m_out_basename is not None): cspad_tbx.dwritef(d, self.m_out_dirname, self.m_out_basename) # Diagnostic message emitted only when all the processing is done. if (env.subprocess() >= 0): self.logger.info("Subprocess %02d: accepted #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) else: self.logger.info("Accepted #%05d @ %s" % (self.nshots, self.timestamp))
def run(self): """ Process all images assigned to this thread """ params, options = self.parser.parse_args(show_diff_phil=True) if params.input.experiment is None or \ params.input.run_num is None or \ params.input.address is None: raise Usage(self.usage) if params.format.file_format == "cbf": if params.format.cbf.detz_offset is None: raise Usage(self.usage) elif params.format.file_format == "pickle": if params.format.pickle.cfg is None: raise Usage(self.usage) else: raise Usage(self.usage) if not os.path.exists(params.output.output_dir): raise Sorry("Output path not found:" + params.output.output_dir) # Save the paramters self.params = params self.options = options from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.Get_rank( ) # each process in MPI has a unique id, 0-indexed size = comm.Get_size() # size: number of processes running in this job # set up psana if params.format.file_format == "pickle": psana.setConfigFile(params.format.pickle.cfg) dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment, params.input.run_num) ds = psana.DataSource(dataset_name) if params.format.file_format == "cbf": src = psana.Source('DetInfo(%s)' % params.input.address) psana_det = psana.Detector(params.input.address, ds.env()) # set this to sys.maxint to analyze all events if params.dispatch.max_events is None: max_events = sys.maxint else: max_events = params.dispatch.max_events for run in ds.runs(): if params.format.file_format == "cbf": # load a header only cspad cbf from the slac metrology base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology( run, params.input.address) if base_dxtbx is None: raise Sorry("Couldn't load calibration file for run %d" % run.run()) # list of all events times = run.times() nevents = min(len(times), max_events) # chop the list into pieces, depending on rank. This assigns each process # events such that the get every Nth event where N is the number of processes mytimes = [ times[i] for i in xrange(nevents) if (i + rank) % size == 0 ] for i in xrange(len(mytimes)): evt = run.event(mytimes[i]) id = evt.get(psana.EventId) print "Event #", i, " has id:", id timestamp = cspad_tbx.evt_timestamp( cspad_tbx.evt_time(evt)) # human readable format if timestamp is None: print "No timestamp, skipping shot" continue t = timestamp s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[ 17:19] + t[20:23] print "Processing shot", s if params.format.file_format == "pickle": if evt.get("skip_event"): print "Skipping event", id continue # the data needs to have already been processed and put into the event by psana data = evt.get(params.format.pickle.out_key) if data is None: print "No data" continue # set output paths according to the templates path = os.path.join(params.output.output_dir, "shot-" + s + ".pickle") print "Saving", path easy_pickle.dump(path, data) elif params.format.file_format == "cbf": # get numpy array, 32x185x388 data = cspad_cbf_tbx.get_psana_corrected_data( psana_det, evt, use_default=False, dark=True, common_mode=None, apply_gain_mask=params.format.cbf.gain_mask_value is not None, gain_mask_value=params.format.cbf.gain_mask_value, per_pixel_gain=False) distance = cspad_tbx.env_distance( params.input.address, run.env(), params.format.cbf.detz_offset) if distance is None: print "No distance, skipping shot" continue if self.params.format.cbf.override_energy is None: wavelength = cspad_tbx.evt_wavelength(evt) if wavelength is None: print "No wavelength, skipping shot" continue else: wavelength = 12398.4187 / self.params.format.cbf.override_energy # stitch together the header, data and metadata into the final dxtbx format object cspad_img = cspad_cbf_tbx.format_object_from_data( base_dxtbx, data, distance, wavelength, timestamp, params.input.address) path = os.path.join(params.output.output_dir, "shot-" + s + ".cbf") print "Saving", path # write the file import pycbf cspad_img._cbf_handle.write_widefile(path, pycbf.CBF,\ pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0) run.end() ds.end()
def event(self, evt, env): """The event() function is called for every L1Accept transition. @param evt Event data object, a configure object @param env Environment object """ super(mod_image_dict, self).event(evt, env) if (evt.get("skip_event")): return if self.cspad_img is None: return # This module only applies to detectors for which a distance is # available. distance = cspad_tbx.env_distance(self.address, env, self._detz_offset) if distance is None: self.nfail += 1 self.logger.warning("event(): no distance, shot skipped") evt.put(skip_event_flag(), "skip_event") return device = cspad_tbx.address_split(self.address)[2] self.logger.info("Subprocess %02d: process image #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) # See r17537 of mod_average.py. if device == 'Cspad': pixel_size = cspad_tbx.pixel_size saturated_value = cspad_tbx.cspad_saturated_value elif device == 'Rayonix': pixel_size = rayonix_tbx.get_rayonix_pixel_size(self.bin_size) saturated_value = rayonix_tbx.rayonix_saturated_value elif device == 'marccd': pixel_size = evt.get("marccd_pixel_size") saturated_value = evt.get("marccd_saturated_value") if distance == 0: distance = evt.get("marccd_distance") d = cspad_tbx.dpack( active_areas=self.active_areas, address=self.address, beam_center_x=pixel_size * self.beam_center[0], beam_center_y=pixel_size * self.beam_center[1], data=self.cspad_img.iround(), # XXX ouch! distance=distance, pixel_size=pixel_size, saturated_value=saturated_value, timestamp=self.timestamp, wavelength=self.wavelength) evt.put(d, self.m_out_key) # Diagnostic message emitted only when all the processing is done. if (env.subprocess() >= 0): self.logger.info("Subprocess %02d: accepted #%05d @ %s" % (env.subprocess(), self.nshots, self.timestamp)) else: self.logger.info("Accepted #%05d @ %s" % (self.nshots, self.timestamp))