def test_plugin(self): "Test a stand alone (dummy-) plugin" p = Plugin() p.setup() p.process() p.teardown() logger.debug(p.output)
def process(self): Plugin.process(self) logger.debug("Integrate.process") for fname in self.input_files: if not os.path.exists(fname): self.log_error("image file: %s does not exist, skipping" % fname, do_raise=False) continue basename = os.path.splitext(os.path.basename(fname))[0] destination = os.path.join(self.dest_dir, basename + ".dat") fimg = fabio.open(fname) if self.wavelength is not None: monitor = self.getMon(fimg.header, self.wavelength) / self.norm else: monitor = 1.0 self.ai.integrate1d(fimg.data, npt=self.npt, method=self.method, safe=False, filename=destination, normalization_factor=monitor, unit=self.unit, dummy=self.dummy, delta_dummy=self.delta_dummy, polarization_factor=self.polarization_factor, correctSolidAngle=self.do_SA) self.output_files.append(destination)
def test_plugin(self): "Test a stand alone (dummy-) plugin" p = Plugin() p.setup() p.process() p.teardown() logger.debug(p.output)
def process(self): Plugin.process(self) logger.debug("Integrate.process") if self.monitor_values is None: self.monitor_values = [1] * len(self.input_files) for monitor, fname in zip(self.monitor_values, self.input_files): if not os.path.exists(fname): self.log_error("image file: %s does not exist, skipping" % fname, do_raise=False) continue if not monitor: self.log_error("Monitor value is %s: skipping image %s" % (monitor, fname), do_raise=False) continue basename = os.path.splitext(os.path.basename(fname))[0] destination = os.path.join(self.dest_dir, basename + ".dat") data = fabio.open(fname).data self.ai.integrate1d(data, npt=self.npt, method=self.method, safe=False, filename=destination, normalization_factor=monitor, unit=self.unit) self.output_files.append(destination)
def process(self): Plugin.process(self) self.dataset = self.read_data() self.nframes = self.dataset.shape[0] self.shape = self.dataset.shape[1:] self.qmask = self.make_qmask() Correlator = self.get_correlator() correlator = Correlator(self.shape, self.nframes, qmask=self.qmask) results = correlator.correlate(self.dataset[...]) self.save_results(results)
def process(self): Plugin.process(self) logger.debug("PluginPyFAIv1_0.process") data = EDUtilsArray.getArray(self.dataInput.input) if self.dataInput.saxsWaxs and self.dataInput.saxsWaxs.value.lower().startswith("s"): out = self.ai.saxs(self.data, nbPt=self.nbPt, filename=self.strOutputFile, mask=self.mask, dummy=self.dummy, delta_dummy=self.delta_dummy) else: out = self.ai.xrpd(self.data, nbPt=self.nbPt, filename=self.strOutputFile, mask=self.mask, dummy=self.dummy, delta_dummy=self.delta_dummy) self.npaOut = np.hstack((i.reshape(-1, 1) for i in out if i is not None))
def process(self): Plugin.process(self) logger.debug("IntegrateManyFrames.process") for idx, fname in enumerate(self.input_files): self.queue_in.put((idx, fname)) res = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data sigma = None if self.error_model: sigma = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data for i in self.input_files: logger.debug("process %s", i) idx, data = self.queue_out.get() if data is None: self.log_error("Failed reading file: %s" % self.input_files[idx], do_raise=False) continue if self.save_raw: self.queue_saver.put((idx, data)) out = self.ai.integrate1d(data, self.npt, method=self.method, unit=self.unit, safe=False, dummy=self.dummy, delta_dummy=self.delta_dummy, error_model=self.error_model, mask=self.mask, polarization_factor=self.polarization_factor, normalization_factor=self.norm, correctSolidAngle=self.do_SA) res[idx, :] = out.intensity if self.error_model: sigma[idx, :] = out.sigma self.queue_out.task_done() self.queue_in.join() self.queue_out.join() if self.queue_saver is not None: self.queue_saver.join() self.save_result(out, res, sigma) if self.input.get("delete_incoming"): for fname in self.input_files: try: os.unlink(fname) except IOError as err: self.log_warning(err)
def process(self): Plugin.process(self) logger.debug("PluginPyFAIv1_0.process") #TODO: read the actual data data = 0#EDUtilsArray.getArray(self.dataInput.input) if self.dataInput.saxsWaxs and self.dataInput.saxsWaxs.value.lower().startswith("s"): out = self.ai.saxs(self.data, nbPt=self.nbPt, filename=self.strOutputFile, mask=self.mask, dummy=self.dummy, delta_dummy=self.delta_dummy) else: out = self.ai.xrpd(self.data, nbPt=self.nbPt, filename=self.strOutputFile, mask=self.mask, dummy=self.dummy, delta_dummy=self.delta_dummy) self.npaOut = numpy.hstack((i.reshape(-1, 1) for i in out if i is not None))
def process(self): Plugin.process(self) logger.debug("IntegrateManyFrames.process") for idx, fname in enumerate(self.input_files): self.queue_in.put((idx, fname)) if self.integration_method == "integrate2d": res = numpy.zeros((len(self.input_files), self.npt_azim, self.npt), dtype=numpy.float32) # numpy array for storing data else: res = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data sigma = None if self.error_model: if self.integration_method == "integrate2d": sigma = numpy.zeros((len(self.input_files), self.npt_azim, self.npt), dtype=numpy.float32) # numpy array for storing data else: sigma = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data method = self.ai.__getattribute__(self.integration_method) common_param = {"method": self.method, "unit": self.unit, "dummy": self.dummy, "delta_dummy": self.delta_dummy, "mask": self.mask, "polarization_factor": self.polarization_factor, "normalization_factor": self.norm, "correctSolidAngle": self.do_SA} if self.integration_method in ("integrate1d", "integrate_radial"): common_param["npt"] = self.npt common_param["error_model"] = self.error_model common_param["safe"] = False else: common_param["npt_rad"] = self.npt common_param["npt_azim"] = self.npt_azim if self.integration_method == "sigma_clip": common_param["thres"] = self.sigma_clip_thresold, common_param["max_iter"] = self.sigma_clip_max_iter if self.integration_method == "medfilt1d": common_param["percentile"] = self.medfilt1d_percentile for i in self.input_files: logger.debug("process %s", i) idx, data = self.queue_out.get() if data is None: self.log_error("Failed reading file: %s" % self.input_files[idx], do_raise=False) continue if self.save_raw: self.queue_saver.put((idx, data)) out = method(data, **common_param) res[idx] = out.intensity if self.error_model: sigma[idx] = out.sigma self.queue_out.task_done() self.queue_in.join() self.queue_out.join() if self.queue_saver is not None: self.queue_saver.join() self.save_result(out, res, sigma) if self.input.get("delete_incoming"): for fname in self.input_files: try: os.unlink(fname) except IOError as err: self.log_warning(err)
def process(self): Plugin.process(self) logger.debug("SubtractBuffer.process") self.sample_juice = self.read_nexus(self.sample_file) self.create_nexus()
def process(self): Plugin.process(self) if self.input is None: logger.warning("input is None") x = self.input.get("x", 0) self.output["result"] = x * x * x
def process(self): Plugin.process(self) if self.input is None: logger.warning("input is None") x = self.input.get("x", 0) self.output["result"] = x * x * x
def process(self): Plugin.process(self) logger.debug("IntegrateManyFrames.process") for idx, fname in enumerate(self.input_files): self.queue_in.put((idx, fname)) if self.integration_method == "integrate2d": res = numpy.zeros( (len(self.input_files), self.npt_azim, self.npt), dtype=numpy.float32) # numpy array for storing data else: res = numpy.zeros( (len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data sigma = None if self.error_model: if self.integration_method == "integrate2d": sigma = numpy.zeros( (len(self.input_files), self.npt_azim, self.npt), dtype=numpy.float32) # numpy array for storing data else: sigma = numpy.zeros( (len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data method = self.ai.__getattribute__(self.integration_method) common_param = { "method": self.method, "unit": self.unit, "dummy": self.dummy, "delta_dummy": self.delta_dummy, "mask": self.mask, "polarization_factor": self.polarization_factor, "normalization_factor": self.norm, "correctSolidAngle": self.do_SA } if self.integration_method in ("integrate1d", "integrate_radial"): common_param["npt"] = self.npt common_param["error_model"] = self.error_model common_param["safe"] = False else: common_param["npt_rad"] = self.npt common_param["npt_azim"] = self.npt_azim if self.integration_method == "sigma_clip": common_param["thres"] = self.sigma_clip_thresold, common_param["max_iter"] = self.sigma_clip_max_iter if self.integration_method == "medfilt1d": common_param["percentile"] = self.medfilt1d_percentile for i in self.input_files: logger.debug("process %s", i) idx, data = self.queue_out.get() if data is None: self.log_error("Failed reading file: %s" % self.input_files[idx], do_raise=False) continue if self.save_raw: self.queue_saver.put((idx, data)) out = method(data, **common_param) res[idx] = out.intensity if self.error_model: sigma[idx] = out.sigma self.queue_out.task_done() self.queue_in.join() self.queue_out.join() if self.queue_saver is not None: self.queue_saver.join() self.save_result(out, res, sigma) if self.input.get("delete_incoming"): for fname in self.input_files: try: os.unlink(fname) except IOError as err: self.log_warning(err)
def process(self): Plugin.process(self) logger.debug("IntegrateManyFrames.process") if self.integration_method == "integrate2d": res = numpy.zeros((len(self.input_files), self.npt_azim, self.npt), dtype=numpy.float32) # numpy array for storing data else: res = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data sigma = None if self.error_model or self.integration_method == "sigma_clip": if self.integration_method == "integrate2d": sigma = numpy.zeros((len(self.input_files), self.npt_azim, self.npt), dtype=numpy.float32) # numpy array for storing data else: sigma = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data method = self.ai.__getattribute__(self.integration_method) common_param = {"method": self.method, "unit": self.unit, "dummy": self.dummy, "delta_dummy": self.delta_dummy, "mask": self.mask, "polarization_factor": self.polarization_factor, "normalization_factor": self.norm, "correctSolidAngle": self.do_SA} if self.integration_method in ("integrate1d", "integrate_radial"): common_param["npt"] = self.npt common_param["error_model"] = self.error_model common_param["safe"] = False else: common_param["npt_rad"] = self.npt common_param["npt_azim"] = self.npt_azim if self.integration_method == "sigma_clip": common_param["thres"] = self.sigma_clip_thresold, common_param["max_iter"] = self.sigma_clip_max_iter if self.integration_method == "medfilt1d": common_param["percentile"] = self.medfilt1d_percentile # prepare some tools cbf = fabio.open(self.input_files[0]) bo = ByteOffset(os.path.getsize(self.input_files[0]), cbf.data.size, devicetype="gpu") shape = cbf.data.shape for idx, fname in enumerate(self.input_files): logger.debug("process %s: %s", idx, fname) if fname.endswith("cbf"): raw = cbf.read(fname, only_raw=True) data = bo(raw, as_float=False).get().reshape(shape) else: data = fabio.open(fname).data if data is None: self.log_error("Failed reading file: %s" % self.input_files[idx], do_raise=False) continue if self.save_raw: self.raw_ds[idx] = data out = method(data, **common_param) res[idx] = out.intensity if self.error_model or self.integration_method == "sigma_clip": sigma[idx] = out.sigma self.save_result(out, res, sigma) if self.input.get("delete_incoming"): for fname in self.input_files: try: os.unlink(fname) except IOError as err: self.log_warning(err)