def __init__(self): """ """ Plugin.__init__(self) self.queue_in = Queue() self.queue_out = Queue() self.queue_saver = None self.quit_event = Event() self.pool = [] self.raw_saver = None self.ai = None # this is the azimuthal integrator to use self.npt = 2000 self.npt_azim = 256 self.input_files = [] self.method = "full_ocl_csr" self.unit = "q_nm^-1" self.output_file = None self.mask = None self.wavelength = None self.polarization_factor = None self.do_SA = False self.dummy = None self.delta_dummy = None self.norm = 1 self.error_model = None # "poisson" self.save_raw = None self.raw_nxs = None self.raw_ds = None self.raw_compression = None self.integration_method = "integrate1d" self.sigma_clip_thresold = 3 self.sigma_clip_max_iter = 5 self.medfilt1d_percentile = (10, 90)
def teardown(self): if self.input_ds: self.input_ds.file.close() if self.output_ds: self.output_ds.file.close() self.distortion = None Plugin.teardown(self)
def process(self): Plugin.process(self) logger.debug("Integrate.process") if self.monitor_values is None: self.monitor_values = [1] * len(self.input_files) for monitor, fname in zip(self.monitor_values, self.input_files): if not os.path.exists(fname): self.log_error("image file: %s does not exist, skipping" % fname, do_raise=False) continue if not monitor: self.log_error("Monitor value is %s: skipping image %s" % (monitor, fname), do_raise=False) continue basename = os.path.splitext(os.path.basename(fname))[0] destination = os.path.join(self.dest_dir, basename + ".dat") data = fabio.open(fname).data self.ai.integrate1d(data, npt=self.npt, method=self.method, safe=False, filename=destination, normalization_factor=monitor, unit=self.unit) self.output_files.append(destination)
def __init__(self): Plugin.__init__(self) self.images = None self.output_format = "edf" self.output_file = "toto.edf" self.filter = "mean" #Todo: average_percentil_20-80 self.cutoff = None
def setup(self, kwargs=None): logger.debug("IntegrateMultiframe.setup") Plugin.setup(self, kwargs) self.ispyb = Ispyb._fromdict(self.input.get("ispyb", {})) self.sample = Sample._fromdict(self.input.get("sample", {})) self.input_file = self.input.get("input_file") if self.input_file is None: self.log_error("No input file provided", do_raise=True) self.output_file = self.input.get("output_file") if self.output_file is None: lst = list(os.path.splitext(self.input_file)) lst.insert(1, "-integrate") self.output_file = "".join(lst) self.log_warning( f"No output file provided, using: {self.output_file}") self.nb_frames = len(self.input.get("frame_ids", [])) self.npt = self.input.get("npt", self.npt) self.unit = pyFAI.units.to_unit(self.input.get("unit", self.unit)) self.poni = self.input.get("poni_file") if self.poni is None: self.log_error("No poni-file provided! aborting", do_raise=True) self.mask = self.input.get("mask_file") self.energy = self.input.get("energy") if self.energy is None: self.log_error("No energy provided! aborting", do_raise=True) else: self.energy = numpy.float32( self.energy ) #It is important to fix the datatype of the energy self.monitor_values = numpy.array(self.input.get("monitor_values", 1), dtype=numpy.float64) self.normalization_factor = float( self.input.get("normalization_factor", 1))
def process(self): Plugin.process(self) logger.debug("Integrate.process") for fname in self.input_files: if not os.path.exists(fname): self.log_error("image file: %s does not exist, skipping" % fname, do_raise=False) continue basename = os.path.splitext(os.path.basename(fname))[0] destination = os.path.join(self.dest_dir, basename + ".dat") fimg = fabio.open(fname) if self.wavelength is not None: monitor = self.getMon(fimg.header, self.wavelength) / self.norm else: monitor = 1.0 self.ai.integrate1d(fimg.data, npt=self.npt, method=self.method, safe=False, filename=destination, normalization_factor=monitor, unit=self.unit, dummy=self.dummy, delta_dummy=self.delta_dummy, polarization_factor=self.polarization_factor, correctSolidAngle=self.do_SA) self.output_files.append(destination)
def teardown(self): self.output["c216_filename"] = self.hdf5_filename if self.group: self.output["c216_path"] = self.group.name self.group.parent["end_time"] = numpy.string_(get_isotime()) if self.hdf5: self.hdf5.close() Plugin.teardown(self)
def teardown(self): if self.images_ds: self.images_ds.file.close() for ds in self.output_ds.values(): ds.file.close() self.ai = None self.output["files"] = self.output_hdf5 Plugin.teardown(self)
def teardown(self): self.output["c216_filename"] = self.hdf5_filename if self.group: self.output["c216_path"] = self.group.name # self.group.parent["end_time"] = str(get_isotime()) if self.nxs: self.nxs.close() Plugin.teardown(self)
def setup(self, kwargs): Plugin.setup(self, kwargs) logger.debug("PluginPyFAIv1_0.setup") ai = pyFAI.AzimuthalIntegrator() if sdi.geometryFit2D is not None: xsGeometry = sdi.geometryFit2D detector = self.getDetector(xsGeometry.detector) d = {"direct": EDUtilsUnit.getSIValue(xsGeometry.distance) * 1000, #fit2D takes the distance in mm "centerX": xsGeometry.beamCentreInPixelsX.value , "centerY":xsGeometry.beamCentreInPixelsY.value , "tilt": xsGeometry.angleOfTilt.value, "tiltPlanRotation": xsGeometry.tiltRotation.value} d.update(detector.getFit2D()) ai.setFit2D(**d) elif sdi.geometryPyFAI is not None: xsGeometry = sdi.geometryPyFAI detector = self.getDetector(xsGeometry.detector) d = {"dist": EDUtilsUnit.getSIValue(xsGeometry.sampleDetectorDistance), "poni1": EDUtilsUnit.getSIValue(xsGeometry.pointOfNormalIncidence1), "poni2": EDUtilsUnit.getSIValue(xsGeometry.pointOfNormalIncidence2), "rot1": EDUtilsUnit.getSIValue(xsGeometry.rotation1), "rot2": EDUtilsUnit.getSIValue(xsGeometry.rotation2), "rot3": EDUtilsUnit.getSIValue(xsGeometry.rotation3)} d.update(detector.getPyFAI()) ai.setPyFAI(**d) else: strError = "Geometry definition in %s, not recognized as a valid geometry%s %s" % (sdi, os.linesep, sdi.marshal()) self.ERROR(strError) raise RuntimeError(strError) ######################################################################## # Choose the azimuthal integrator ######################################################################## with self.__class__._sem: if tuple(ai.param) in self.__class__._dictGeo: self.ai = self.__class__._dictGeo[tuple(ai.param)] else: self.__class__._dictGeo[tuple(ai.param)] = ai self.ai = ai self.data = EDUtilsArray.getArray(self.dataInput.input).astype(float) if sdi.dark is not None: self.data -= EDUtilsArray.getArray(sdi.dark) if sdi.flat is not None: self.data /= EDUtilsArray.getArray(sdi.flat) if sdi.mask is not None: self.mask = EDUtilsArray.getArray(sdi.mask) if sdi.wavelength is not None: self.ai.wavelength = EDUtilsUnit.getSIValue(sdi.wavelength) if sdi.output is not None: self.strOutputFile = sdi.output.path.value if sdi.dummy is not None: self.dummy = sdi.dummy.value if sdi.deltaDummy is not None: self.delta_dummy = sdi.deltaDummy.value if sdi.nbPt: self.nbPt = sdi.nbPt.value
def teardown(self): if self.images_ds: self.images_ds.file.close() for ds in self.output_ds.values(): ds.file.close() self.ai = None self.polarization = None self.output["files"] = self.output_hdf5 Plugin.teardown(self)
def teardown(self): Plugin.teardown(self) logger.debug("IntegrateManyFrames.teardown") # Create some output data self.output["output_file"] = self.output_file if self.save_raw: self.raw_nxs.close() self.output["save_raw"] = self.save_raw self.raw_nxs = None self.raw_ds = None
def process(self): Plugin.process(self) self.dataset = self.read_data() self.nframes = self.dataset.shape[0] self.shape = self.dataset.shape[1:] self.qmask = self.make_qmask() Correlator = self.get_correlator() correlator = Correlator(self.shape, self.nframes, qmask=self.qmask) results = correlator.correlate(self.dataset[...]) self.save_results(results)
def setup(self, kwargs=None): """Perform the setup of the job. mainly parsing of the kwargs. :param kwargs: dict with parmaters. :return: None """ logger.debug("IntegrateManyFrames.setup") Plugin.setup(self, kwargs) self.input_files = self.input.get("input_files") if not self.input_files: self.log_error("InputError: input_files not in input.") if "output_file" not in self.input: self.log_error("InputWarning: output_file not in input, save in input directory", do_raise=False) self.output_file = os.path.join(os.path.dirname(self.input_files[0]), "output.h5") else: self.output_file = os.path.abspath(self.input["output_file"]) if not self.output_file.endswith(".h5"): self.output_file = self.output_file + ".h5" poni_file = self.input.get("poni_file") if not poni_file: self.log_error("InputError: poni_file not in input.") ai = pyFAI.load(poni_file) stored = self._ais.get(poni_file, ai) if stored is ai: self.ai = stored else: self.ai = copy.deepcopy(stored) self.npt = int(self.input.get("npt", self.npt)) self.unit = self.input.get("unit", self.unit) self.wavelength = self.input.get("wavelength", self.wavelength) if os.path.exists(self.input.get("mask", "")): self.mask = fabio.open(self.input["mask"]).data self.dummy = self.input.get("dummy", self.dummy) self.delta_dummy = self.input.get("delta_dummy", self.delta_dummy) if self.input.get("do_polarziation"): self.polarization_factor = self.input.get("polarization_factor", self.polarization_factor) self.do_SA = self.input.get("do_SA", self.do_SA) self.norm = self.input.get("norm", self.norm) self.method = self.input.get("method", self.method) self.save_raw = self.input.get("save_raw", self.save_raw) self.raw_compression = self.input.get("raw_compression", self.raw_compression) if self.save_raw: dataset = self.prepare_raw_hdf5(self.raw_compression) self.queue_saver = Queue() self.raw_saver = RawSaver(self.queue_saver, self.quit_event, dataset) self.raw_saver.start() # create the pool of workers self.pool = Reader.build_pool((self.queue_in, self.queue_out, self.quit_event), self.pool_size)
def teardown(self): Plugin.teardown(self) logger.debug("SubtractBuffer.teardown") # export the output file location self.output["output_file"] = self.output_file if self.nxs is not None: self.nxs.close() if self.ai is not None: self.ai = None self.sample_juice = None self.buffer_juices = []
def __init__(self): """ """ Plugin.__init__(self) self.ai = None # this is the azimuthal integrator to use self.dest_dir = None self.ntp = 1000 self.input_files = [] self.monitor_values = None self.method = "full_ocl_csr" self.unit = "q_nm^-1" self.output_files = []
def __init__(self): Plugin.__init__(self) self.cycle = None self.c216 = None self.hdf5 = None self.hdf5_filename = None self.entry = None self.instrument = None self.group = None self.tfg_grp = None self.mcs_grp = None self.input2 = {}
def teardown(self): if self.result_filename: self.output["result_file"] = self.result_filename try: self.dataset.file.close() except Exception as err: self.log_warning("%s Unable to close dataset file: %s" % (type(err), err)) self.qmask = None self.ai = None self.correlator_name = None Plugin.teardown(self)
def teardown(self): Plugin.teardown(self) logger.debug("IntegrateMultiframe.teardown") # export the output file location self.output["output_file"] = self.output_file if self.nxs is not None: self.nxs.close() if self.ai is not None: self.ai = None # clean cache if self._input_frames is not None: self._input_frames = None self.monitor_values = None
def __init__(self): """ """ Plugin.__init__(self) self.shared = None self.strOutputFile = None self.ai = None #this is the azimuthal integrator to use self.data = None self.mask = None self.nbPt = None self.dummy = None self.delta_dummy = None self.npaOut = None
def __init__(self): Plugin.__init__(self) self.buffer_files = [] self.sample_file = None self.nxs = None self.output_file = None self.ai = None self.npt = None self.poni = None self.mask = None self.energy = None self.sample_juice = None self.buffer_juices = []
def __init__(self): Plugin.__init__(self) self.start_time = get_isotime() self.nxs = None # output data file self.dataset = None self.shape = None # shape of every image self.nframes = None # Number of input frames self.qmask = None # contains the numpy array with the qmask self.unit = "q_nm^-1" self.ai = None self.correlator_name = None self.result_filename = None self.timestep = None
def setup(self, kwargs=None): logger.debug("SubtractBuffer.setup") Plugin.setup(self, kwargs) self.sample_file = self.input.get("sample_file") if self.sample_file is None: self.log_error("No sample file provided", do_raise=True) self.output_file = self.input.get("output_file") if self.output_file is None: lst = list(os.path.splitext(self.sample_file)) lst.insert(1, "-sub") self.output_file = "".join(lst) self.log_warning(f"No output file provided, using: {self.output_file}") self.buffer_files = [os.path.abspath(fn) for fn in self.input.get("buffer_files", []) if os.path.exists(fn)]
def test_plugin(self): "Test a stand alone (dummy-) plugin" p = Plugin() p.setup() p.process() p.teardown() logger.debug(p.output)
def __init__(self): Plugin.__init__(self) self.cycle = None self.c216 = None self.nxs = None self.hdf5_filename = None self.entry = None self.instrument = None self.group = None self.tfg_grp = None self.mcs_grp = None self.input2 = {} if "TANGO_HOST" not in os.environ: raise RuntimeError("No TANGO_HOST defined")
def __init__(self): Plugin.__init__(self) self.sample = None self.ispyb = None self.input_file = None self._input_frames = None self.output_file = None self.nxs = None self.nb_frames = None self.ai = None self.npt = 1000 self.unit = pyFAI.units.to_unit("q_nm^-1") # self.polarization_factor = 0.9 --> constant self.poni = self.mask = None self.energy = None #self.method = IntegrationMethod.select_method(1, "no", "csr", "opencl")[0] -> constant self.monitor_values = None self.normalization_factor = None
def setup(self, kwargs=None): """ see class documentation """ Plugin.setup(self, kwargs) if "HS32F10" in self.input: self.input2.update(preproc(**self.input)) else: self.input2.update(self.input) # for debugging self.input["input2"] = self.input2 self.c216 = self.input2.get("c216", "id02/c216/0") self.cycle = self.input2.get("cycle", 1) if "hdf5_filename" not in self.input2: self.log_error("hdf5_filename not in input") self.hdf5_filename = self.input2.get("hdf5_filename") self.entry = self.input2.get("entry", "entry") self.instrument = self.input2.get("instrument", "ESRF-ID02")
def __init__(self): """ """ Plugin.__init__(self) self.ai = None # this is the azimuthal integrator to use self.dest_dir = None self.json_data = None self.ntp = 3000 self.input_files = [] self.method = "full_ocl_csr" self.unit = "q_nm^-1" self.output_files = [] self.mask = "" self.wavelength = None self.dummy = -1 self.delta_dummy = 0 self.polarization_factor = None self.do_SA = False self.norm = 1e12
def setup(self, kwargs=None): """ see class documentation """ Plugin.setup(self, kwargs) if "HS32F10" in self.input: self.input2.update(preproc(**self.input)) else: self.input2.update(self.input) # for debugging self.input["input2"] = self.input2 self.c216 = self.input2.get("c216", "id02/c216/0") self.cycle = self.input2.get("cycle", 1) if "hdf5_filename" not in self.input2: self.log_error("hdf5_filename not in input") self.hdf5_filename = self.input2.get("hdf5_filename") self.entry = self.input2.get("entry", "entry") self.instrument = self.input2.get("instrument", "ID02")
def process(self): Plugin.process(self) logger.debug("IntegrateManyFrames.process") for idx, fname in enumerate(self.input_files): self.queue_in.put((idx, fname)) res = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data sigma = None if self.error_model: sigma = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data for i in self.input_files: logger.debug("process %s", i) idx, data = self.queue_out.get() if data is None: self.log_error("Failed reading file: %s" % self.input_files[idx], do_raise=False) continue if self.save_raw: self.queue_saver.put((idx, data)) out = self.ai.integrate1d(data, self.npt, method=self.method, unit=self.unit, safe=False, dummy=self.dummy, delta_dummy=self.delta_dummy, error_model=self.error_model, mask=self.mask, polarization_factor=self.polarization_factor, normalization_factor=self.norm, correctSolidAngle=self.do_SA) res[idx, :] = out.intensity if self.error_model: sigma[idx, :] = out.sigma self.queue_out.task_done() self.queue_in.join() self.queue_out.join() if self.queue_saver is not None: self.queue_saver.join() self.save_result(out, res, sigma) if self.input.get("delete_incoming"): for fname in self.input_files: try: os.unlink(fname) except IOError as err: self.log_warning(err)
def process(self): Plugin.process(self) logger.debug("PluginPyFAIv1_0.process") data = EDUtilsArray.getArray(self.dataInput.input) if self.dataInput.saxsWaxs and self.dataInput.saxsWaxs.value.lower().startswith("s"): out = self.ai.saxs(self.data, nbPt=self.nbPt, filename=self.strOutputFile, mask=self.mask, dummy=self.dummy, delta_dummy=self.delta_dummy) else: out = self.ai.xrpd(self.data, nbPt=self.nbPt, filename=self.strOutputFile, mask=self.mask, dummy=self.dummy, delta_dummy=self.delta_dummy) self.npaOut = np.hstack((i.reshape(-1, 1) for i in out if i is not None))
def setup(self, kwargs): Plugin.setup(self, kwargs) logger.debug("PluginPyFAIv1_0.setup") ai = pyFAI.AzimuthalIntegrator() #TODO: setup the integrator from the input # sdi = self.input.get("data") # if sdi.geometryFit2D is not None: # xsGeometry = sdi.geometryFit2D # detector = self.getDetector(xsGeometry.detector) # d = {"direct": EDUtilsUnit.getSIValue(xsGeometry.distance) * 1000, #fit2D takes the distance in mm # "centerX": xsGeometry.beamCentreInPixelsX.value , # "centerY":xsGeometry.beamCentreInPixelsY.value , # "tilt": xsGeometry.angleOfTilt.value, # "tiltPlanRotation": xsGeometry.tiltRotation.value} # d.update(detector.getFit2D()) # ai.setFit2D(**d) # elif sdi.geometryPyFAI is not None: # xsGeometry = sdi.geometryPyFAI # detector = self.getDetector(xsGeometry.detector) # d = {"dist": EDUtilsUnit.getSIValue(xsGeometry.sampleDetectorDistance), # "poni1": EDUtilsUnit.getSIValue(xsGeometry.pointOfNormalIncidence1), # "poni2": EDUtilsUnit.getSIValue(xsGeometry.pointOfNormalIncidence2), # "rot1": EDUtilsUnit.getSIValue(xsGeometry.rotation1), # "rot2": EDUtilsUnit.getSIValue(xsGeometry.rotation2), # "rot3": EDUtilsUnit.getSIValue(xsGeometry.rotation3)} # d.update(detector.getPyFAI()) # ai.setPyFAI(**d) # else: # strError = "Geometry definition in %s, not recognized as a valid geometry%s %s" % (sdi, os.linesep, sdi.marshal()) # self.ERROR(strError) # raise RuntimeError(strError) ######################################################################## # Choose the azimuthal integrator ######################################################################## with self.__class__._sem: if tuple(ai.param) in self.__class__._dictGeo: self.ai = self.__class__._dictGeo[tuple(ai.param)] else: self.__class__._dictGeo[tuple(ai.param)] = ai self.ai = ai
def __init__(self): Plugin.__init__(self) self.ai = None self.distortion = None self.workers = {} self.output_ds = {} # output datasets self.dest = None # output directory self.I1 = None # beam stop diode values self.t = None # time of the start of the frame. Same shape as self.I1 self.nframes = None self.to_save = [ "raw", "ave" ] # by default only raw image and averaged one is saved self.input_nxs = None self.metadata_nxs = None self.images_ds = None self.metadata_plugin = None self.metadata = {} self.npt1_rad = None self.npt2_rad = None self.npt2_azim = None self.dark = None self.dark_filename = None self.flat_filename = None self.flat = None self.mask_filename = None self.distortion_filename = None self.output_hdf5 = {} self.dist = 1.0 self.absolute_solid_angle = None self.in_shape = None self.scaling_factor = 1.0 self.correct_solid_angle = True self.correct_I1 = True self.dummy = None self.delta_dummy = None self.unit = "q_nm^-1" self.polarization = None self.cache_ai = None self.cache_dis = None self.variance_formula = None self.variance_function = lambda data, dark: None
def process(self): Plugin.process(self) logger.debug("PluginPyFAIv1_0.process") #TODO: read the actual data data = 0#EDUtilsArray.getArray(self.dataInput.input) if self.dataInput.saxsWaxs and self.dataInput.saxsWaxs.value.lower().startswith("s"): out = self.ai.saxs(self.data, nbPt=self.nbPt, filename=self.strOutputFile, mask=self.mask, dummy=self.dummy, delta_dummy=self.delta_dummy) else: out = self.ai.xrpd(self.data, nbPt=self.nbPt, filename=self.strOutputFile, mask=self.mask, dummy=self.dummy, delta_dummy=self.delta_dummy) self.npaOut = numpy.hstack((i.reshape(-1, 1) for i in out if i is not None))
def setup(self, kwargs): logger.debug("Integrate.setup") Plugin.setup(self, kwargs) if "output_dir" not in self.input: self.log_error("output_dir not in input") self.dest_dir = os.path.abspath(self.input["output_dir"]) ponifile = self.input.get("poni_file", "") if not os.path.exists(ponifile): self.log_error("Ponifile: %s does not exist" % ponifile, do_raise=True) ai = pyFAI.load(ponifile) stored = self._ais.get(str(ai), ai) if stored is ai: self.ai = stored else: self.ai = stored.__deepcopy__() self.npt = int(self.input.get("npt", self.npt)) self.unit = self.input.get("unit", self.unit)
def teardown(self): Plugin.teardown(self) logger.debug("IntegrateManyFrames.teardown") # Create some output data self.output["output_file"] = self.output_file if self.save_raw: self.raw_nxs.close() self.output["save_raw"] = self.save_raw self.raw_nxs = None self.raw_ds = None # now clean up threads, empty pool of workers self.quit_event.set() for _ in self.pool: self.queue_in.put(None) if self.queue_saver is not None: self.queue_saver.put(None) self.queue_saver = None self.raw_saver = None self.pool = None self.queue_in = None self.queue_out = None self.quit_event = None
def __init__(self): Plugin.__init__(self) self.ai = None self.distotion_cor = None self.distotion_norm = None self.workers = {} self.output_ds = {} # output datasets self.dest = None # output directory self.I1 = None # beam stop diode values self.t = None # time of the start of the frame. Same shape as self.I1 self.nframes = None self.to_save = ["raw", "ave"] # by default only raw image and averaged one is saved self.input_nxs = None self.metadata_nxs = None self.images_ds = None self.metadata_plugin = None self.metadata = {} self.npt1_rad = None self.npt2_rad = None self.npt2_azim = None self.dark = None self.dark_filename = None self.flat_filename = None self.flat = None self.mask_filename = None self.distortion_filename = None self.output_hdf5 = {} self.dist = 1.0 self.absolute_solid_angle = None self.in_shape = None self.scaling_factor = 1.0 self.correct_solid_angle = True self.correct_I1 = True self.dummy = None self.delta_dummy = None self.unit = "q_nm^-1" self.polarization = None
def setup(self, kwargs): logger.debug("Integrate.setup") Plugin.setup(self, kwargs) if "output_dir" not in self.input: self.log_error("output_dir not in input") # this needs to be added in the SPEC macro self.dest_dir = os.path.abspath(self.input["output_dir"]) if "json" not in self.input: self.log_error("json not in input") json_path = self.input.get("json", "") if not os.path.exists(json_path): self.log_error("Integration setup file (JSON): %s does not exist" % json_path, do_raise=True) self.json_data = json.load(open(json_path)) ai = make_ai(self.json_data) stored = self._ais.get(str(ai), ai) if stored is ai: self.ai = stored else: self.ai = stored.__deepcopy__() self.npt = int(self.json_data.get("npt", self.npt)) self.unit = self.json_data.get("unit", self.unit) self.wavelength = self.json_data.get("wavelength", self.wavelength) if os.path.exists(self.json_data["mask"]): self.mask = self.json_data.get("mask", self.mask) self.dummy = self.json_data.get("val_dummy", self.dummy) self.delta_dummy = self.json_data.get("delta_dummy", self.delta_dummy) if self.json_data["do_polarziation"]: self.polarization_factor = self.json_data.get( "polarization_factor", self.polarization_factor) self.do_SA = self.json_data.get("do_SA", self.do_SA) self.norm = self.json_data.get( "norm", self.norm) # need to be added in the spec macro
def process(self): Plugin.process(self) logger.debug("IntegrateManyFrames.process") for idx, fname in enumerate(self.input_files): self.queue_in.put((idx, fname)) if self.integration_method == "integrate2d": res = numpy.zeros((len(self.input_files), self.npt_azim, self.npt), dtype=numpy.float32) # numpy array for storing data else: res = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data sigma = None if self.error_model: if self.integration_method == "integrate2d": sigma = numpy.zeros((len(self.input_files), self.npt_azim, self.npt), dtype=numpy.float32) # numpy array for storing data else: sigma = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data method = self.ai.__getattribute__(self.integration_method) common_param = {"method": self.method, "unit": self.unit, "dummy": self.dummy, "delta_dummy": self.delta_dummy, "mask": self.mask, "polarization_factor": self.polarization_factor, "normalization_factor": self.norm, "correctSolidAngle": self.do_SA} if self.integration_method in ("integrate1d", "integrate_radial"): common_param["npt"] = self.npt common_param["error_model"] = self.error_model common_param["safe"] = False else: common_param["npt_rad"] = self.npt common_param["npt_azim"] = self.npt_azim if self.integration_method == "sigma_clip": common_param["thres"] = self.sigma_clip_thresold, common_param["max_iter"] = self.sigma_clip_max_iter if self.integration_method == "medfilt1d": common_param["percentile"] = self.medfilt1d_percentile for i in self.input_files: logger.debug("process %s", i) idx, data = self.queue_out.get() if data is None: self.log_error("Failed reading file: %s" % self.input_files[idx], do_raise=False) continue if self.save_raw: self.queue_saver.put((idx, data)) out = method(data, **common_param) res[idx] = out.intensity if self.error_model: sigma[idx] = out.sigma self.queue_out.task_done() self.queue_in.join() self.queue_out.join() if self.queue_saver is not None: self.queue_saver.join() self.save_result(out, res, sigma) if self.input.get("delete_incoming"): for fname in self.input_files: try: os.unlink(fname) except IOError as err: self.log_warning(err)
def setup(self, kwargs=None): """Perform the setup of the job. mainly parsing of the kwargs. :param kwargs: dict with parmaters. :return: None """ logger.debug("IntegrateManyFrames.setup") Plugin.setup(self, kwargs) self.input_files = self.input.get("input_files") if not self.input_files: self.log_error("InputError: input_files not in input.") if not isinstance(self.input_files, list): self.input_files = glob.glob(self.input_files) self.input_files.sort() if "output_file" not in self.input: self.log_error("InputWarning: output_file not in input, save in input directory", do_raise=False) self.output_file = os.path.join(os.path.dirname(self.input_files[0]), "output.h5") else: self.output_file = os.path.abspath(self.input["output_file"]) if not self.output_file.endswith(".h5"): self.output_file = self.output_file + ".h5" poni_file = self.input.get("poni_file") if not poni_file: self.log_error("InputError: poni_file not in input.") self.ai = pyFAI.load(poni_file) # stored = self._ais.get(poni_file, ai) # if stored is ai: # self.ai = stored # else: # self.ai = copy.deepcopy(stored) self.npt = int(self.input.get("npt", self.npt)) self.npt_azim = self.input.get("npt_azim", self.npt_azim) self.unit = self.input.get("unit", self.unit) self.wavelength = self.input.get("wavelength", self.wavelength) if os.path.exists(self.input.get("mask", "")): self.mask = fabio.open(self.input["mask"]).data self.dummy = self.input.get("dummy", self.dummy) self.delta_dummy = self.input.get("delta_dummy", self.delta_dummy) if self.input.get("do_polarziation"): self.polarization_factor = self.input.get("polarization_factor", self.polarization_factor) self.do_SA = self.input.get("do_SA", self.do_SA) self.norm = self.input.get("norm", self.norm) self.save_raw = self.input.get("save_raw", self.save_raw) self.integration_method = self.input.get("integration_method", self.integration_method) self.sigma_clip_thresold = self.input.get("sigma_clip_thresold", self.sigma_clip_thresold) self.sigma_clip_max_iter = self.input.get("sigma_clip_max_iter", self.sigma_clip_max_iter) self.medfilt1d_percentile = self.input.get("medfilt1d_percentile", self.medfilt1d_percentile) method = self.input.get("method", self.method) if "1" in self.integration_method: integration_dim = 1 else: integration_dim = 2 if isinstance(method, (str, unicode)): self.method = IntegrationMethod.select_old_method(integration_dim, method) else: self.method = IntegrationMethod.select_one_available(method, dim=integration_dim, degradable=True) print(self.method) self.raw_compression = self.input.get("raw_compression", self.raw_compression) if self.save_raw: self.prepare_raw_hdf5(self.raw_compression)
def __init__(self): Plugin.__init__(self) self.input_ds = None self.output_ds = None self.distortion = None
def teardown(self): Plugin.teardown(self)
def setup(self, kwargs=None): """ see class documentation """ Plugin.setup(self, kwargs)
def __init__(self): Plugin.__init__(self) self.input_metadata = None self.plugins = {"metadata":Metadata()}
def process(self): Plugin.process(self) logger.debug("IntegrateManyFrames.process") if self.integration_method == "integrate2d": res = numpy.zeros((len(self.input_files), self.npt_azim, self.npt), dtype=numpy.float32) # numpy array for storing data else: res = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data sigma = None if self.error_model or self.integration_method == "sigma_clip": if self.integration_method == "integrate2d": sigma = numpy.zeros((len(self.input_files), self.npt_azim, self.npt), dtype=numpy.float32) # numpy array for storing data else: sigma = numpy.zeros((len(self.input_files), self.npt), dtype=numpy.float32) # numpy array for storing data method = self.ai.__getattribute__(self.integration_method) common_param = {"method": self.method, "unit": self.unit, "dummy": self.dummy, "delta_dummy": self.delta_dummy, "mask": self.mask, "polarization_factor": self.polarization_factor, "normalization_factor": self.norm, "correctSolidAngle": self.do_SA} if self.integration_method in ("integrate1d", "integrate_radial"): common_param["npt"] = self.npt common_param["error_model"] = self.error_model common_param["safe"] = False else: common_param["npt_rad"] = self.npt common_param["npt_azim"] = self.npt_azim if self.integration_method == "sigma_clip": common_param["thres"] = self.sigma_clip_thresold, common_param["max_iter"] = self.sigma_clip_max_iter if self.integration_method == "medfilt1d": common_param["percentile"] = self.medfilt1d_percentile # prepare some tools cbf = fabio.open(self.input_files[0]) bo = ByteOffset(os.path.getsize(self.input_files[0]), cbf.data.size, devicetype="gpu") shape = cbf.data.shape for idx, fname in enumerate(self.input_files): logger.debug("process %s: %s", idx, fname) if fname.endswith("cbf"): raw = cbf.read(fname, only_raw=True) data = bo(raw, as_float=False).get().reshape(shape) else: data = fabio.open(fname).data if data is None: self.log_error("Failed reading file: %s" % self.input_files[idx], do_raise=False) continue if self.save_raw: self.raw_ds[idx] = data out = method(data, **common_param) res[idx] = out.intensity if self.error_model or self.integration_method == "sigma_clip": sigma[idx] = out.sigma self.save_result(out, res, sigma) if self.input.get("delete_incoming"): for fname in self.input_files: try: os.unlink(fname) except IOError as err: self.log_warning(err)
def setup(self, kwargs=None): """ see class documentation """ Plugin.setup(self, kwargs) if "output_dir" not in self.input: self.log_error("output_dir not in input") self.dest = os.path.abspath(self.input["output_dir"]) if "unit" in self.input: self.unit = self.input.get("unit") if "metadata_job" in self.input: job_id = int(self.input.get("metadata_job")) status = Job.synchronize_job(job_id, self.TIMEOUT) abort_time = time.time() + self.TIMEOUT while status == Job.STATE_UNINITIALIZED: # Wait for job to start time.sleep(1) status = Job.synchronize_job(job_id, self.TIMEOUT) if time.time() > abort_time: self.log_error("Timeout while waiting metadata plugin to finish") break if status == Job.STATE_SUCCESS: self.metadata_plugin = Job.getJobFromId(job_id) else: self.log_error("Metadata plugin ended in %s: aborting myself" % status) if not os.path.isdir(self.dest): os.makedirs(self.dest) c216_filename = os.path.abspath(self.input.get("c216_filename", "")) if (os.path.dirname(c216_filename) != self.dest) and (os.path.basename(c216_filename) not in os.listdir(self.dest)): self.output_hdf5["metadata"] = os.path.join(self.dest, os.path.basename(c216_filename)) m = threading.Thread(target=shutil.copy, name="copy metadata", args=(c216_filename, self.dest)) m.start() if "to_save" in self.input: to_save = self.input["to_save"][:] if type(to_save) in StringTypes: # fix a bug from spec ... self.to_save = [i.strip('[\\] ",') for i in to_save.split()] self.log_warning("processing planned: " + " ".join(self.to_save)) else: self.to_save = to_save if "image_file" not in self.input: self.log_error("image_file not in input") self.image_file = self.input["image_file"] if not os.path.exists(self.image_file): if not self.image_file.startswith("/"): # prepend the dirname of the c216 image_file = os.path.join(os.path.dirname(c216_filename), self.image_file) if os.path.exists(image_file): self.image_file = image_file else: self.log_error("image_file %s does not exist" % self.image_file) self.dark_filename = self.input.get("dark_filename") if "raw" in self.to_save: if os.path.dirname(self.image_file) != self.dest: t = threading.Thread(target=shutil.copy, name="copy raw", args=(self.image_file, self.dest)) t.start() self.output_hdf5["raw"] = os.path.join(self.dest, os.path.basename(self.image_file)) if type(self.dark_filename) in StringTypes and os.path.exists(self.dark_filename): if os.path.dirname(self.dark_filename) != self.dest: d = threading.Thread(target=shutil.copy, name="copy dark", args=(self.dark_filename, self.dest)) d.start() self.output_hdf5["dark"] = os.path.join(self.dest, os.path.basename(self.dark_filename)) self.scaling_factor = float(self.input.get("scaling_factor", 1.0)) self.correct_solid_angle = bool(self.input.get("correct_solid_angle", True)) self.correct_I1 = bool(self.input.get("correct_I1", True)) self.I1, self.t = self.load_I1_t(c216_filename)
def process(self): Plugin.process(self) if self.input is None: logger.warning("input is None") x = self.input.get("x", 0) self.output["result"] = x * x * x
def teardown(self): Plugin.teardown(self) logger.debug("PluginPyFAIv1_0.teardown")
def postProcess(self): Plugin.postProcess(self) logger.debug("PluginPyFAIv1_0.postProcess")