def test_plugin(self): "Test a stand alone (dummy-) plugin" p = Plugin() p.setup() p.process() p.teardown() logger.debug(p.output)
def test_plugin(self): "Test a stand alone (dummy-) plugin" p = Plugin() p.setup() p.process() p.teardown() logger.debug(p.output)
def setup(self, kwargs=None): logger.debug("IntegrateMultiframe.setup") Plugin.setup(self, kwargs) self.ispyb = Ispyb._fromdict(self.input.get("ispyb", {})) self.sample = Sample._fromdict(self.input.get("sample", {})) self.input_file = self.input.get("input_file") if self.input_file is None: self.log_error("No input file provided", do_raise=True) self.output_file = self.input.get("output_file") if self.output_file is None: lst = list(os.path.splitext(self.input_file)) lst.insert(1, "-integrate") self.output_file = "".join(lst) self.log_warning( f"No output file provided, using: {self.output_file}") self.nb_frames = len(self.input.get("frame_ids", [])) self.npt = self.input.get("npt", self.npt) self.unit = pyFAI.units.to_unit(self.input.get("unit", self.unit)) self.poni = self.input.get("poni_file") if self.poni is None: self.log_error("No poni-file provided! aborting", do_raise=True) self.mask = self.input.get("mask_file") self.energy = self.input.get("energy") if self.energy is None: self.log_error("No energy provided! aborting", do_raise=True) else: self.energy = numpy.float32( self.energy ) #It is important to fix the datatype of the energy self.monitor_values = numpy.array(self.input.get("monitor_values", 1), dtype=numpy.float64) self.normalization_factor = float( self.input.get("normalization_factor", 1))
def setup(self, kwargs): Plugin.setup(self, kwargs) logger.debug("PluginPyFAIv1_0.setup") ai = pyFAI.AzimuthalIntegrator() if sdi.geometryFit2D is not None: xsGeometry = sdi.geometryFit2D detector = self.getDetector(xsGeometry.detector) d = {"direct": EDUtilsUnit.getSIValue(xsGeometry.distance) * 1000, #fit2D takes the distance in mm "centerX": xsGeometry.beamCentreInPixelsX.value , "centerY":xsGeometry.beamCentreInPixelsY.value , "tilt": xsGeometry.angleOfTilt.value, "tiltPlanRotation": xsGeometry.tiltRotation.value} d.update(detector.getFit2D()) ai.setFit2D(**d) elif sdi.geometryPyFAI is not None: xsGeometry = sdi.geometryPyFAI detector = self.getDetector(xsGeometry.detector) d = {"dist": EDUtilsUnit.getSIValue(xsGeometry.sampleDetectorDistance), "poni1": EDUtilsUnit.getSIValue(xsGeometry.pointOfNormalIncidence1), "poni2": EDUtilsUnit.getSIValue(xsGeometry.pointOfNormalIncidence2), "rot1": EDUtilsUnit.getSIValue(xsGeometry.rotation1), "rot2": EDUtilsUnit.getSIValue(xsGeometry.rotation2), "rot3": EDUtilsUnit.getSIValue(xsGeometry.rotation3)} d.update(detector.getPyFAI()) ai.setPyFAI(**d) else: strError = "Geometry definition in %s, not recognized as a valid geometry%s %s" % (sdi, os.linesep, sdi.marshal()) self.ERROR(strError) raise RuntimeError(strError) ######################################################################## # Choose the azimuthal integrator ######################################################################## with self.__class__._sem: if tuple(ai.param) in self.__class__._dictGeo: self.ai = self.__class__._dictGeo[tuple(ai.param)] else: self.__class__._dictGeo[tuple(ai.param)] = ai self.ai = ai self.data = EDUtilsArray.getArray(self.dataInput.input).astype(float) if sdi.dark is not None: self.data -= EDUtilsArray.getArray(sdi.dark) if sdi.flat is not None: self.data /= EDUtilsArray.getArray(sdi.flat) if sdi.mask is not None: self.mask = EDUtilsArray.getArray(sdi.mask) if sdi.wavelength is not None: self.ai.wavelength = EDUtilsUnit.getSIValue(sdi.wavelength) if sdi.output is not None: self.strOutputFile = sdi.output.path.value if sdi.dummy is not None: self.dummy = sdi.dummy.value if sdi.deltaDummy is not None: self.delta_dummy = sdi.deltaDummy.value if sdi.nbPt: self.nbPt = sdi.nbPt.value
def setup(self, kwargs=None): """Perform the setup of the job. mainly parsing of the kwargs. :param kwargs: dict with parmaters. :return: None """ logger.debug("IntegrateManyFrames.setup") Plugin.setup(self, kwargs) self.input_files = self.input.get("input_files") if not self.input_files: self.log_error("InputError: input_files not in input.") if "output_file" not in self.input: self.log_error("InputWarning: output_file not in input, save in input directory", do_raise=False) self.output_file = os.path.join(os.path.dirname(self.input_files[0]), "output.h5") else: self.output_file = os.path.abspath(self.input["output_file"]) if not self.output_file.endswith(".h5"): self.output_file = self.output_file + ".h5" poni_file = self.input.get("poni_file") if not poni_file: self.log_error("InputError: poni_file not in input.") ai = pyFAI.load(poni_file) stored = self._ais.get(poni_file, ai) if stored is ai: self.ai = stored else: self.ai = copy.deepcopy(stored) self.npt = int(self.input.get("npt", self.npt)) self.unit = self.input.get("unit", self.unit) self.wavelength = self.input.get("wavelength", self.wavelength) if os.path.exists(self.input.get("mask", "")): self.mask = fabio.open(self.input["mask"]).data self.dummy = self.input.get("dummy", self.dummy) self.delta_dummy = self.input.get("delta_dummy", self.delta_dummy) if self.input.get("do_polarziation"): self.polarization_factor = self.input.get("polarization_factor", self.polarization_factor) self.do_SA = self.input.get("do_SA", self.do_SA) self.norm = self.input.get("norm", self.norm) self.method = self.input.get("method", self.method) self.save_raw = self.input.get("save_raw", self.save_raw) self.raw_compression = self.input.get("raw_compression", self.raw_compression) if self.save_raw: dataset = self.prepare_raw_hdf5(self.raw_compression) self.queue_saver = Queue() self.raw_saver = RawSaver(self.queue_saver, self.quit_event, dataset) self.raw_saver.start() # create the pool of workers self.pool = Reader.build_pool((self.queue_in, self.queue_out, self.quit_event), self.pool_size)
def setup(self, kwargs=None): logger.debug("SubtractBuffer.setup") Plugin.setup(self, kwargs) self.sample_file = self.input.get("sample_file") if self.sample_file is None: self.log_error("No sample file provided", do_raise=True) self.output_file = self.input.get("output_file") if self.output_file is None: lst = list(os.path.splitext(self.sample_file)) lst.insert(1, "-sub") self.output_file = "".join(lst) self.log_warning(f"No output file provided, using: {self.output_file}") self.buffer_files = [os.path.abspath(fn) for fn in self.input.get("buffer_files", []) if os.path.exists(fn)]
def setup(self, kwargs=None): """ see class documentation """ Plugin.setup(self, kwargs) if "HS32F10" in self.input: self.input2.update(preproc(**self.input)) else: self.input2.update(self.input) # for debugging self.input["input2"] = self.input2 self.c216 = self.input2.get("c216", "id02/c216/0") self.cycle = self.input2.get("cycle", 1) if "hdf5_filename" not in self.input2: self.log_error("hdf5_filename not in input") self.hdf5_filename = self.input2.get("hdf5_filename") self.entry = self.input2.get("entry", "entry") self.instrument = self.input2.get("instrument", "ESRF-ID02")
def setup(self, kwargs=None): """ see class documentation """ Plugin.setup(self, kwargs) if "HS32F10" in self.input: self.input2.update(preproc(**self.input)) else: self.input2.update(self.input) # for debugging self.input["input2"] = self.input2 self.c216 = self.input2.get("c216", "id02/c216/0") self.cycle = self.input2.get("cycle", 1) if "hdf5_filename" not in self.input2: self.log_error("hdf5_filename not in input") self.hdf5_filename = self.input2.get("hdf5_filename") self.entry = self.input2.get("entry", "entry") self.instrument = self.input2.get("instrument", "ID02")
def setup(self, kwargs): Plugin.setup(self, kwargs) logger.debug("PluginPyFAIv1_0.setup") ai = pyFAI.AzimuthalIntegrator() #TODO: setup the integrator from the input # sdi = self.input.get("data") # if sdi.geometryFit2D is not None: # xsGeometry = sdi.geometryFit2D # detector = self.getDetector(xsGeometry.detector) # d = {"direct": EDUtilsUnit.getSIValue(xsGeometry.distance) * 1000, #fit2D takes the distance in mm # "centerX": xsGeometry.beamCentreInPixelsX.value , # "centerY":xsGeometry.beamCentreInPixelsY.value , # "tilt": xsGeometry.angleOfTilt.value, # "tiltPlanRotation": xsGeometry.tiltRotation.value} # d.update(detector.getFit2D()) # ai.setFit2D(**d) # elif sdi.geometryPyFAI is not None: # xsGeometry = sdi.geometryPyFAI # detector = self.getDetector(xsGeometry.detector) # d = {"dist": EDUtilsUnit.getSIValue(xsGeometry.sampleDetectorDistance), # "poni1": EDUtilsUnit.getSIValue(xsGeometry.pointOfNormalIncidence1), # "poni2": EDUtilsUnit.getSIValue(xsGeometry.pointOfNormalIncidence2), # "rot1": EDUtilsUnit.getSIValue(xsGeometry.rotation1), # "rot2": EDUtilsUnit.getSIValue(xsGeometry.rotation2), # "rot3": EDUtilsUnit.getSIValue(xsGeometry.rotation3)} # d.update(detector.getPyFAI()) # ai.setPyFAI(**d) # else: # strError = "Geometry definition in %s, not recognized as a valid geometry%s %s" % (sdi, os.linesep, sdi.marshal()) # self.ERROR(strError) # raise RuntimeError(strError) ######################################################################## # Choose the azimuthal integrator ######################################################################## with self.__class__._sem: if tuple(ai.param) in self.__class__._dictGeo: self.ai = self.__class__._dictGeo[tuple(ai.param)] else: self.__class__._dictGeo[tuple(ai.param)] = ai self.ai = ai
def setup(self, kwargs): logger.debug("Integrate.setup") Plugin.setup(self, kwargs) if "output_dir" not in self.input: self.log_error("output_dir not in input") self.dest_dir = os.path.abspath(self.input["output_dir"]) ponifile = self.input.get("poni_file", "") if not os.path.exists(ponifile): self.log_error("Ponifile: %s does not exist" % ponifile, do_raise=True) ai = pyFAI.load(ponifile) stored = self._ais.get(str(ai), ai) if stored is ai: self.ai = stored else: self.ai = stored.__deepcopy__() self.npt = int(self.input.get("npt", self.npt)) self.unit = self.input.get("unit", self.unit)
def setup(self, kwargs): logger.debug("Integrate.setup") Plugin.setup(self, kwargs) if "output_dir" not in self.input: self.log_error("output_dir not in input") # this needs to be added in the SPEC macro self.dest_dir = os.path.abspath(self.input["output_dir"]) if "json" not in self.input: self.log_error("json not in input") json_path = self.input.get("json", "") if not os.path.exists(json_path): self.log_error("Integration setup file (JSON): %s does not exist" % json_path, do_raise=True) self.json_data = json.load(open(json_path)) ai = make_ai(self.json_data) stored = self._ais.get(str(ai), ai) if stored is ai: self.ai = stored else: self.ai = stored.__deepcopy__() self.npt = int(self.json_data.get("npt", self.npt)) self.unit = self.json_data.get("unit", self.unit) self.wavelength = self.json_data.get("wavelength", self.wavelength) if os.path.exists(self.json_data["mask"]): self.mask = self.json_data.get("mask", self.mask) self.dummy = self.json_data.get("val_dummy", self.dummy) self.delta_dummy = self.json_data.get("delta_dummy", self.delta_dummy) if self.json_data["do_polarziation"]: self.polarization_factor = self.json_data.get( "polarization_factor", self.polarization_factor) self.do_SA = self.json_data.get("do_SA", self.do_SA) self.norm = self.json_data.get( "norm", self.norm) # need to be added in the spec macro
def setup(self, kwargs=None): """ see class documentation """ Plugin.setup(self, kwargs)
def setup(self, kwargs=None): """ see class documentation """ Plugin.setup(self, kwargs) if "output_dir" not in self.input: self.log_error("output_dir not in input") self.dest = os.path.abspath(self.input["output_dir"]) if "unit" in self.input: self.unit = self.input.get("unit") if "metadata_job" in self.input: job_id = int(self.input.get("metadata_job")) status = Job.synchronize_job(job_id, self.TIMEOUT) abort_time = time.time() + self.TIMEOUT while status == Job.STATE_UNINITIALIZED: # Wait for job to start time.sleep(1) status = Job.synchronize_job(job_id, self.TIMEOUT) if time.time() > abort_time: self.log_error("Timeout while waiting metadata plugin to finish") break if status == Job.STATE_SUCCESS: self.metadata_plugin = Job.getJobFromId(job_id) else: self.log_error("Metadata plugin ended in %s: aborting myself" % status) if not os.path.isdir(self.dest): os.makedirs(self.dest) c216_filename = os.path.abspath(self.input.get("c216_filename", "")) if (os.path.dirname(c216_filename) != self.dest) and (os.path.basename(c216_filename) not in os.listdir(self.dest)): self.output_hdf5["metadata"] = os.path.join(self.dest, os.path.basename(c216_filename)) m = threading.Thread(target=shutil.copy, name="copy metadata", args=(c216_filename, self.dest)) m.start() if "to_save" in self.input: to_save = self.input["to_save"][:] if type(to_save) in StringTypes: # fix a bug from spec ... self.to_save = [i.strip('[\\] ",') for i in to_save.split()] self.log_warning("processing planned: " + " ".join(self.to_save)) else: self.to_save = to_save if "image_file" not in self.input: self.log_error("image_file not in input") self.image_file = self.input["image_file"] if not os.path.exists(self.image_file): if not self.image_file.startswith("/"): # prepend the dirname of the c216 image_file = os.path.join(os.path.dirname(c216_filename), self.image_file) if os.path.exists(image_file): self.image_file = image_file else: self.log_error("image_file %s does not exist" % self.image_file) self.dark_filename = self.input.get("dark_filename") if "raw" in self.to_save: if os.path.dirname(self.image_file) != self.dest: t = threading.Thread(target=shutil.copy, name="copy raw", args=(self.image_file, self.dest)) t.start() self.output_hdf5["raw"] = os.path.join(self.dest, os.path.basename(self.image_file)) if type(self.dark_filename) in StringTypes and os.path.exists(self.dark_filename): if os.path.dirname(self.dark_filename) != self.dest: d = threading.Thread(target=shutil.copy, name="copy dark", args=(self.dark_filename, self.dest)) d.start() self.output_hdf5["dark"] = os.path.join(self.dest, os.path.basename(self.dark_filename)) self.scaling_factor = float(self.input.get("scaling_factor", 1.0)) self.correct_solid_angle = bool(self.input.get("correct_solid_angle", True)) self.correct_I1 = bool(self.input.get("correct_I1", True)) self.I1, self.t = self.load_I1_t(c216_filename)
def setup(self, kwargs): Plugin.setup(self, kwargs) logger.debug("PluginPyFAIv1_0.setup") ai = pyFAI.AzimuthalIntegrator() if sdi.geometryFit2D is not None: xsGeometry = sdi.geometryFit2D detector = self.getDetector(xsGeometry.detector) d = { "direct": EDUtilsUnit.getSIValue(xsGeometry.distance) * 1000, #fit2D takes the distance in mm "centerX": xsGeometry.beamCentreInPixelsX.value, "centerY": xsGeometry.beamCentreInPixelsY.value, "tilt": xsGeometry.angleOfTilt.value, "tiltPlanRotation": xsGeometry.tiltRotation.value } d.update(detector.getFit2D()) ai.setFit2D(**d) elif sdi.geometryPyFAI is not None: xsGeometry = sdi.geometryPyFAI detector = self.getDetector(xsGeometry.detector) d = { "dist": EDUtilsUnit.getSIValue(xsGeometry.sampleDetectorDistance), "poni1": EDUtilsUnit.getSIValue(xsGeometry.pointOfNormalIncidence1), "poni2": EDUtilsUnit.getSIValue(xsGeometry.pointOfNormalIncidence2), "rot1": EDUtilsUnit.getSIValue(xsGeometry.rotation1), "rot2": EDUtilsUnit.getSIValue(xsGeometry.rotation2), "rot3": EDUtilsUnit.getSIValue(xsGeometry.rotation3) } d.update(detector.getPyFAI()) ai.setPyFAI(**d) else: strError = "Geometry definition in %s, not recognized as a valid geometry%s %s" % ( sdi, os.linesep, sdi.marshal()) self.ERROR(strError) raise RuntimeError(strError) ######################################################################## # Choose the azimuthal integrator ######################################################################## with self.__class__._sem: if tuple(ai.param) in self.__class__._dictGeo: self.ai = self.__class__._dictGeo[tuple(ai.param)] else: self.__class__._dictGeo[tuple(ai.param)] = ai self.ai = ai self.data = EDUtilsArray.getArray(self.dataInput.input).astype(float) if sdi.dark is not None: self.data -= EDUtilsArray.getArray(sdi.dark) if sdi.flat is not None: self.data /= EDUtilsArray.getArray(sdi.flat) if sdi.mask is not None: self.mask = EDUtilsArray.getArray(sdi.mask) if sdi.wavelength is not None: self.ai.wavelength = EDUtilsUnit.getSIValue(sdi.wavelength) if sdi.output is not None: self.strOutputFile = sdi.output.path.value if sdi.dummy is not None: self.dummy = sdi.dummy.value if sdi.deltaDummy is not None: self.delta_dummy = sdi.deltaDummy.value if sdi.nbPt: self.nbPt = sdi.nbPt.value
def setup(self, kwargs=None): """Perform the setup of the job. mainly parsing of the kwargs. :param kwargs: dict with parmaters. :return: None """ logger.debug("IntegrateManyFrames.setup") Plugin.setup(self, kwargs) self.input_files = self.input.get("input_files") if not self.input_files: self.log_error("InputError: input_files not in input.") if not isinstance(self.input_files, list): self.input_files = glob.glob(self.input_files) self.input_files.sort() if "output_file" not in self.input: self.log_error( "InputWarning: output_file not in input, save in input directory", do_raise=False) self.output_file = os.path.join( os.path.dirname(self.input_files[0]), "output.h5") else: self.output_file = os.path.abspath(self.input["output_file"]) if not self.output_file.endswith(".h5"): self.output_file = self.output_file + ".h5" poni_file = self.input.get("poni_file") if not poni_file: self.log_error("InputError: poni_file not in input.") ai = pyFAI.load(poni_file) stored = self._ais.get(poni_file, ai) if stored is ai: self.ai = stored else: self.ai = copy.deepcopy(stored) self.npt = int(self.input.get("npt", self.npt)) self.npt_azim = self.input.get("npt_azim", self.npt_azim) self.unit = self.input.get("unit", self.unit) self.wavelength = self.input.get("wavelength", self.wavelength) if os.path.exists(self.input.get("mask", "")): self.mask = fabio.open(self.input["mask"]).data self.dummy = self.input.get("dummy", self.dummy) self.delta_dummy = self.input.get("delta_dummy", self.delta_dummy) if self.input.get("do_polarziation"): self.polarization_factor = self.input.get("polarization_factor", self.polarization_factor) self.do_SA = self.input.get("do_SA", self.do_SA) self.norm = self.input.get("norm", self.norm) self.method = self.input.get("method", self.method) self.save_raw = self.input.get("save_raw", self.save_raw) self.integration_method = self.input.get("integration_method", self.integration_method) self.sigma_clip_thresold = self.input.get("sigma_clip_thresold", self.sigma_clip_thresold) self.sigma_clip_max_iter = self.input.get("sigma_clip_max_iter", self.sigma_clip_max_iter) self.medfilt1d_percentile = self.input.get("medfilt1d_percentile", self.medfilt1d_percentile) self.raw_compression = self.input.get("raw_compression", self.raw_compression) if self.save_raw: dataset = self.prepare_raw_hdf5(self.raw_compression) self.queue_saver = Queue() self.raw_saver = RawSaver(self.queue_saver, self.quit_event, dataset) self.raw_saver.start() # create the pool of workers self.pool = Reader.build_pool( (self.queue_in, self.queue_out, self.quit_event), self.pool_size)
def setup(self, kwargs=None): """ see class documentation """ Plugin.setup(self, kwargs) if "output_dir" not in self.input: self.log_error("output_dir not in input") self.dest = os.path.abspath(self.input["output_dir"]) if "metadata_job" in self.input: job_id = int(self.input.get("metadata_job")) status = Job.synchronize_job(job_id, self.TIMEOUT) abort_time = time.time() + self.TIMEOUT while status == Job.STATE_UNINITIALIZED: # Wait for job to start time.sleep(1) status = Job.synchronize_job(job_id, self.TIMEOUT) if time.time() > abort_time: self.log_error( "Timeout while waiting metadata plugin to finish") break if status == Job.STATE_SUCCESS: self.metadata_plugin = Job.getJobFromId(job_id) else: self.log_error("Metadata plugin ended in %s: aborting myself" % status) if not os.path.isdir(self.dest): os.makedirs(self.dest) c216_filename = os.path.abspath(self.input.get("c216_filename", "")) if (os.path.dirname(c216_filename) != self.dest) and ( os.path.basename(c216_filename) not in os.listdir(self.dest)): self.output_hdf5["metadata"] = os.path.join( self.dest, os.path.basename(c216_filename)) m = threading.Thread(target=shutil.copy, name="copy metadata", args=(c216_filename, self.dest)) m.start() if "to_save" in self.input: to_save = self.input["to_save"][:] if type(to_save) in StringTypes: # fix a bug from spec ... self.to_save = [i.strip('[\\] ",') for i in to_save.split()] self.log_warning("processing planned: " + " ".join(self.to_save)) else: self.to_save = to_save if "image_file" not in self.input: self.log_error("image_file not in input") self.image_file = self.input["image_file"] if not os.path.exists(self.image_file): if not self.image_file.startswith("/"): # prepend the dirname of the c216 image_file = os.path.join(os.path.dirname(c216_filename), self.image_file) if os.path.exists(image_file): self.image_file = image_file else: self.log_error("image_file %s does not exist" % self.image_file) self.dark_filename = self.input.get("dark_filename") if "raw" in self.to_save: if os.path.dirname(self.image_file) != self.dest: t = threading.Thread(target=shutil.copy, name="copy raw", args=(self.image_file, self.dest)) t.start() self.output_hdf5["raw"] = os.path.join( self.dest, os.path.basename(self.image_file)) if type(self.dark_filename) in StringTypes and os.path.exists( self.dark_filename): if os.path.dirname(self.dark_filename) != self.dest: d = threading.Thread(target=shutil.copy, name="copy dark", args=(self.dark_filename, self.dest)) d.start() self.output_hdf5["dark"] = os.path.join( self.dest, os.path.basename(self.dark_filename)) self.scaling_factor = float(self.input.get("scaling_factor", 1.0)) self.correct_solid_angle = bool( self.input.get("correct_solid_angle", True)) self.correct_I1 = bool(self.input.get("correct_I1", True)) self.I1, self.t = self.load_I1_t(c216_filename)
def setup(self, kwargs=None): """ see class documentation """ Plugin.setup(self, kwargs)
def setup(self, kwargs=None): """Perform the setup of the job. mainly parsing of the kwargs. :param kwargs: dict with parmaters. :return: None """ logger.debug("IntegrateManyFrames.setup") Plugin.setup(self, kwargs) self.input_files = self.input.get("input_files") if not self.input_files: self.log_error("InputError: input_files not in input.") if not isinstance(self.input_files, list): self.input_files = glob.glob(self.input_files) self.input_files.sort() if "output_file" not in self.input: self.log_error("InputWarning: output_file not in input, save in input directory", do_raise=False) self.output_file = os.path.join(os.path.dirname(self.input_files[0]), "output.h5") else: self.output_file = os.path.abspath(self.input["output_file"]) if not self.output_file.endswith(".h5"): self.output_file = self.output_file + ".h5" poni_file = self.input.get("poni_file") if not poni_file: self.log_error("InputError: poni_file not in input.") self.ai = pyFAI.load(poni_file) # stored = self._ais.get(poni_file, ai) # if stored is ai: # self.ai = stored # else: # self.ai = copy.deepcopy(stored) self.npt = int(self.input.get("npt", self.npt)) self.npt_azim = self.input.get("npt_azim", self.npt_azim) self.unit = self.input.get("unit", self.unit) self.wavelength = self.input.get("wavelength", self.wavelength) if os.path.exists(self.input.get("mask", "")): self.mask = fabio.open(self.input["mask"]).data self.dummy = self.input.get("dummy", self.dummy) self.delta_dummy = self.input.get("delta_dummy", self.delta_dummy) if self.input.get("do_polarziation"): self.polarization_factor = self.input.get("polarization_factor", self.polarization_factor) self.do_SA = self.input.get("do_SA", self.do_SA) self.norm = self.input.get("norm", self.norm) self.save_raw = self.input.get("save_raw", self.save_raw) self.integration_method = self.input.get("integration_method", self.integration_method) self.sigma_clip_thresold = self.input.get("sigma_clip_thresold", self.sigma_clip_thresold) self.sigma_clip_max_iter = self.input.get("sigma_clip_max_iter", self.sigma_clip_max_iter) self.medfilt1d_percentile = self.input.get("medfilt1d_percentile", self.medfilt1d_percentile) method = self.input.get("method", self.method) if "1" in self.integration_method: integration_dim = 1 else: integration_dim = 2 if isinstance(method, (str, unicode)): self.method = IntegrationMethod.select_old_method(integration_dim, method) else: self.method = IntegrationMethod.select_one_available(method, dim=integration_dim, degradable=True) print(self.method) self.raw_compression = self.input.get("raw_compression", self.raw_compression) if self.save_raw: self.prepare_raw_hdf5(self.raw_compression)