def teardown(self): self.output["c216_filename"] = self.hdf5_filename if self.group: self.output["c216_path"] = self.group.name self.group.parent["end_time"] = numpy.string_(get_isotime()) if self.hdf5: self.hdf5.close() Plugin.teardown(self)
def save_result(self, out, I, sigma=None): """Save the result of the work as a HDF5 file :param out: scattering result :param I: Intensities as 2D array :param sigma: standard deviation of I as 2D array, is possible """ logger.debug("IntegrateManyFrames.save_result") isotime = numpy.string_(get_isotime()) try: nxs = pyFAI.io.Nexus(self.output_file, "a") except IOError as error: self.log_warning( "invalid HDF5 file %s: remove and re-create!\n%s" % (self.output_file, error)) os.unlink(self.output_file) nxs = pyFAI.io.Nexus(self.output_file) entry = nxs.new_entry("entry", program_name="dahu", title="ID15.IntegrateManyFrames ") entry["program_name"].attrs["version"] = dahu_version entry["plugin_name"] = numpy.string_(".".join( (os.path.splitext(os.path.basename(__file__))[0], self.__class__.__name__))) entry["plugin_name"].attrs["version"] = version entry["input"] = numpy.string_(json.dumps(self.input)) entry["input"].attrs["format"] = 'json' subentry = nxs.new_class(entry, "PyFAI", class_type="NXprocess") subentry["program"] = numpy.string_("PyFAI") subentry["version"] = numpy.string_(pyFAI.version) subentry["date"] = isotime subentry["processing_type"] = numpy.string_(self.integration_method) coll = nxs.new_class(subentry, "process_integrate1d", class_type="NXdata") metadata_grp = coll.require_group("parameters") for key, value in self.ai.getPyFAI().items(): metadata_grp[key] = numpy.string_(value) scale, unit = str(out.unit).split("_", 1) coll[scale] = out.radial.astype("float32") coll[scale].attrs["interpretation"] = "scalar" coll[scale].attrs["unit"] = unit coll["I"] = I.astype("float32") coll["I"].attrs["interpretation"] = "spectrum" coll["I"].attrs["axes"] = ["t", scale] coll["I"].attrs["signal"] = "1" if sigma is not None: coll["sigma"] = sigma.astype("float32") coll["sigma"].attrs["interpretation"] = "spectrum" coll["sigma"].attrs["axes"] = ["t", scale] nxs.close()
def save_result(self, out, I, sigma=None): """Save the result of the work as a HDF5 file :param out: scattering result :param I: Intensities as 2D array :param sigma: standard deviation of I as 2D array, is possible """ logger.debug("IntegrateManyFrames.save_result") isotime = numpy.string_(get_isotime()) try: nxs = pyFAI.io.Nexus(self.output_file, "a") except IOError as error: self.log_warning("invalid HDF5 file %s: remove and re-create!\n%s" % (self.output_file, error)) os.unlink(self.output_file) nxs = pyFAI.io.Nexus(self.output_file) entry = nxs.new_entry("entry", program_name="dahu", title="ID15.IntegrateManyFrames ") entry["program_name"].attrs["version"] = dahu_version entry["plugin_name"] = numpy.string_(".".join((os.path.splitext(os.path.basename(__file__))[0], self.__class__.__name__))) entry["plugin_name"].attrs["version"] = version entry["input"] = numpy.string_(json.dumps(self.input)) entry["input"].attrs["format"] = 'json' subentry = nxs.new_class(entry, "PyFAI", class_type="NXprocess") subentry["program"] = numpy.string_("PyFAI") subentry["version"] = numpy.string_(pyFAI.version) subentry["date"] = isotime subentry["processing_type"] = numpy.string_(self.integration_method) coll = nxs.new_class(subentry, "process_%s" % self.integration_method, class_type="NXdata") metadata_grp = coll.require_group("parameters") for key, value in self.ai.getPyFAI().items(): metadata_grp[key] = numpy.string_(value) scale, unit = str(out.unit).split("_", 1) coll[scale] = out.radial.astype("float32") coll[scale].attrs["interpretation"] = "scalar" coll[scale].attrs["unit"] = unit coll["I"] = I.astype("float32") coll["I"].attrs["interpretation"] = "spectrum" coll["I"].attrs["signal"] = "1" coll.attrs["signal"] = "I" coll.attrs["axes"] = [".", scale] if sigma is not None: coll["errors"] = sigma.astype("float32") coll["errors"].attrs["interpretation"] = "spectrum" nxs.close()
def create_hdf5(self): """ Create one HDF5 file per output Also initialize all workers """ basename = os.path.splitext(os.path.basename(self.image_file))[0] if basename.endswith("_raw"): basename = basename[:-4] json_config = json.dumps(self.input) isotime = numpy.string_(get_isotime()) detector_grp = self.input_nxs.find_detector(all=True) detector_name = "undefined" for grp in detector_grp: if "detector_information/name" in grp: detector_name = grp["detector_information/name"].value md_entry = self.metadata_nxs.get_entries()[0] instruments = self.metadata_nxs.get_class(md_entry, "NXinstrument") if instruments: collections = self.metadata_nxs.get_class(instruments[0], "NXcollection") to_copy = collections + detector_grp else: to_copy = detector_grp for ext in self.to_save: if ext == "raw": continue outfile = os.path.join(self.dest, "%s_%s.h5" % (basename, ext)) self.output_hdf5[ext] = outfile try: nxs = pyFAI.io.Nexus(outfile, "a") except IOError as error: self.log_warning("invalid HDF5 file %s: remove and re-create!\n%s" % (outfile, error)) os.unlink(outfile) nxs = pyFAI.io.Nexus(outfile) entry = nxs.new_entry("entry", program_name="dahu", title=self.image_file + ":" + self.images_ds.name) entry["program_name"].attrs["version"] = dahu.version entry["plugin_name"] = numpy.string_(".".join((os.path.splitext(os.path.basename(__file__))[0], self.__class__.__name__))) entry["plugin_name"].attrs["version"] = version entry["input"] = numpy.string_(json_config) entry["input"].attrs["format"] = 'json' entry["detector_name"] = numpy.string_(detector_name) subentry = nxs.new_class(entry, "PyFAI", class_type="NXprocess") subentry["program"] = numpy.string_("PyFAI") subentry["version"] = numpy.string_(pyFAI.version) subentry["date"] = isotime subentry["processing_type"] = numpy.string_(ext) coll = nxs.new_class(subentry, "process_" + ext, class_type="NXdata") metadata_grp = coll.require_group("parameters") for key, val in self.metadata.iteritems(): if type(val) in [str, unicode]: metadata_grp[key] = numpy.string_(val) else: metadata_grp[key] = val # copy metadata from other files: for grp in to_copy: grp_name = posixpath.split(grp.name)[-1] if grp_name not in coll: toplevel = coll.require_group(grp_name) for k, v in grp.attrs.items(): toplevel.attrs[k] = v else: toplevel = coll[grp_name] def grpdeepcopy(name, obj): nxs.deep_copy(name, obj, toplevel=toplevel, excluded=["data"]) grp.visititems(grpdeepcopy) shape = self.in_shape[:] if self.npt1_rad is None and "npt1_rad" in self.input: self.npt1_rad = int(self.input["npt1_rad"]) else: qmax = self.ai.qArray(self.in_shape[-2:]).max() dqmin = self.ai.deltaQ(self.in_shape[-2:]).min() * 2.0 self.npt1_rad = int(qmax / dqmin) if ext == "azim": if "npt2_rad" in self.input: self.npt2_rad = int(self.input["npt2_rad"]) else: qmax = self.ai.qArray(self.in_shape[-2:]).max() dqmin = self.ai.deltaQ(self.in_shape[-2:]).min() * 2.0 self.npt2_rad = int(qmax / dqmin) if "npt2_azim" in self.input: self.npt2_azim = int(self.input["npt2_azim"]) else: chi = self.ai.chiArray(self.in_shape[-2:]) self.npt2_azim = int(numpy.degrees(chi.max() - chi.min())) shape = (self.in_shape[0], self.npt2_azim, self.npt2_rad) ai = self.ai.__deepcopy__() worker = pyFAI.worker.Worker(ai, self.in_shape[-2:], (self.npt2_azim, self.npt2_rad), self.unit) if self.flat is not None: worker.ai.set_flatfield(self.flat) if self.dark is not None: worker.ai.set_darkcurrent(self.dark) worker.output = "numpy" if self.in_shape[0] < 5: worker.method = "splitbbox" else: worker.method = "ocl_csr_gpu" if self.correct_solid_angle: worker.set_normalization_factor(self.ai.pixel1 * self.ai.pixel2 / self.ai.dist / self.ai.dist) else: worker.set_normalization_factor(1.0) worker.correct_solid_angle = self.correct_solid_angle self.log_warning("Normalization factor: %s" % worker.normalization_factor) worker.dummy = self.dummy worker.delta_dummy = self.delta_dummy if self.input.get("do_polarization"): worker.polarization_factor = self.input.get("polarization_factor") self.workers[ext] = worker elif ext.startswith("ave"): if "_" in ext: unit = ext.split("_", 1)[1] npt1_rad = self.input.get("npt1_rad_"+unit, self.npt1_rad) else: unit = self.unit npt1_rad = self.npt1_rad shape = (self.in_shape[0], npt1_rad) worker = pyFAI.worker.Worker(self.ai, self.in_shape[-2:], (1, npt1_rad), unit=unit) worker.output = "numpy" if self.in_shape[0] < 5: worker.method = "splitbbox" else: worker.method = "ocl_csr_gpu" if self.correct_solid_angle: worker.set_normalization_factor(self.ai.pixel1 * self.ai.pixel2 / self.ai.dist / self.ai.dist) else: worker.set_normalization_factor(1.0) worker.correct_solid_angle = self.correct_solid_angle worker.dummy = self.dummy worker.delta_dummy = self.delta_dummy if self.input.get("do_polarization"): worker.polarization_factor = True self.workers[ext] = worker elif ext == "sub": worker = pyFAI.worker.PixelwiseWorker(dark=self.dark, dummy=self.dummy, delta_dummy=self.delta_dummy, ) self.workers[ext] = worker elif ext == "flat": worker = pyFAI.worker.PixelwiseWorker(dark=self.dark, flat=self.flat, dummy=self.dummy, delta_dummy=self.delta_dummy, ) self.workers[ext] = worker elif ext == "solid": worker = pyFAI.worker.PixelwiseWorker(dark=self.dark, flat=self.flat, solidangle=self.get_solid_angle(), dummy=self.dummy, delta_dummy=self.delta_dummy, polarization=self.polarization ) self.workers[ext] = worker elif ext == "dist": worker = pyFAI.worker.DistortionWorker(dark=self.dark, flat=self.flat, solidangle=self.get_solid_angle(), dummy=self.dummy, delta_dummy=self.delta_dummy, polarization=self.polarization, detector=self.ai.detector) self.workers[ext] = worker elif ext == "norm": worker = pyFAI.worker.DistortionWorker(dark=self.dark, flat=self.flat, solidangle=self.get_solid_angle(), dummy=self.dummy, delta_dummy=self.delta_dummy, polarization=self.polarization, detector=self.ai.detector) self.workers[ext] = worker else: self.log_warning("unknown treatment %s" % ext) output_ds = coll.create_dataset("data", shape, "float32", chunks=(1,) + shape[1:], maxshape=(None,) + shape[1:]) if self.t is not None: coll["t"] = self.t coll["t"].attrs["axis"] = "1" coll["t"].attrs["interpretation"] = "scalar" coll["t"].attrs["unit"] = "s" # output_ds.attrs["NX_class"] = "NXdata" -> see group output_ds.attrs["signal"] = "1" if ext == "azim": output_ds.attrs["axes"] = ["t", "chi", "q"] output_ds.attrs["interpretation"] = "image" elif ext == "ave": output_ds.attrs["axes"] = ["t", "q"] output_ds.attrs["interpretation"] = "spectrum" elif ext in ("sub", "flat", "solid", "dist"): output_ds.attrs["axes"] = "t" output_ds.attrs["interpretation"] = "image" else: output_ds.attrs["interpretation"] = "image" self.output_ds[ext] = output_ds
def create_hdf5(self): """ Create a HDF5 file and datastructure """ try: self.hdf5 = h5py.File(self.hdf5_filename, 'a') except IOError as error: os.unlink(self.hdf5_filename) self.log_warning("Unable to open %s: %s. Removing file and starting from scratch" % (self.hdf5_filename, error)) self.hdf5 = h5py.File(self.hdf5_filename) if not self.entry.endswith("_"): self.entry += "_" entries = len([i.startswith(self.entry) for i in self.hdf5]) self.entry = posixpath.join("", "%s%04d" % (self.entry, entries)) self.instrument = posixpath.join(self.entry, self.instrument) self.group = self.hdf5.require_group(self.instrument) self.group.parent.attrs["NX_class"] = "NXentry" self.group.attrs["NX_class"] = "NXinstrument" # TimeFrameGenerator self.tfg_grp = self.hdf5.require_group(posixpath.join(self.instrument, "TFG")) self.tfg_grp.attrs["NX_class"] = "NXcollection" self.tfg_grp["device"] = numpy.string_(self.c216) # MultiCounterScaler self.mcs_grp = self.hdf5.require_group(posixpath.join(self.instrument, "MCS")) self.mcs_grp.attrs["NX_class"] = "NXcollection" self.mcs_grp["device"] = numpy.string_(self.c216) # Static metadata self.info_grp = self.hdf5.require_group(posixpath.join(self.instrument, "parameters")) self.info_grp.attrs["NX_class"] = "NXcollection" for field, value in self.input2.get("Info", {}).items(): if field not in self.TO_SKIP and not isinstance(value, dict): try: value.encode("ascii") except UnicodeEncodeError: self.log_warning("Unicode Error in field %s: %s, skipping" % (field, value)) except AttributeError as err: self.log_warning("Attribute Error %s \n in field %s: %s, forcing to string." % (err, field, value)) self.info_grp[field] = numpy.string_(value) else: self.info_grp[field] = numpy.string_(value) start_time = self.input2.get("HMStartTime", get_isotime()) # Factor HS32F = self.input2.get("HS32F") if HS32F is not None: self.mcs_grp["HS32F"] = HS32F # Zero HS32Z = self.input2.get("HS32Z") if HS32Z is not None: self.mcs_grp["HS32Z"] = HS32Z # Name HS32N = self.input2.get("HS32N") if HS32N is not None: self.mcs_grp["HS32N"] = numpy.array([str(i) for i in HS32N]) # Mode HS32M = self.input2.get("HS32M") if HS32M is not None: self.mcs_grp["HS32M"] = HS32M if HS32N and HS32Z and HS32F: self.mcs_grp.require_group("interpreted") self.group.parent["title"] = numpy.string_("id02.metadata") self.group.parent["program"] = numpy.string_("Dahu") self.group.parent["start_time"] = numpy.string_(start_time)
def create_hdf5(self): """ Create one HDF5 file per output Also initialize all workers """ basename = os.path.splitext(os.path.basename(self.image_file))[0] if basename.endswith("_raw"): basename = basename[:-4] json_config = json.dumps(self.input) isotime = numpy.string_(get_isotime()) detector_grp = self.input_nxs.find_detector(all=True) detector_name = "undefined" for grp in detector_grp: if "detector_information/name" in grp: detector_name = grp["detector_information/name"].value md_entry = self.metadata_nxs.get_entries()[0] instruments = self.metadata_nxs.get_class(md_entry, "NXinstrument") if instruments: collections = self.metadata_nxs.get_class(instruments[0], "NXcollection") to_copy = collections + detector_grp else: to_copy = detector_grp for ext in self.to_save: if ext == "raw": continue outfile = os.path.join(self.dest, "%s_%s.h5" % (basename, ext)) self.output_hdf5[ext] = outfile try: nxs = pyFAI.io.Nexus(outfile, "a") except IOError as error: self.log_warning( "invalid HDF5 file %s: remove and re-create!\n%s" % (outfile, error)) os.unlink(outfile) nxs = pyFAI.io.Nexus(outfile) entry = nxs.new_entry("entry", program_name="dahu", title=self.image_file + ":" + self.images_ds.name) entry["program_name"].attrs["version"] = dahu.version entry["plugin_name"] = numpy.string_(".".join( (os.path.splitext(os.path.basename(__file__))[0], self.__class__.__name__))) entry["plugin_name"].attrs["version"] = version entry["input"] = numpy.string_(json_config) entry["detector_name"] = numpy.string_(detector_name) subentry = nxs.new_class(entry, "PyFAI", class_type="NXprocess") subentry["program"] = numpy.string_("PyFAI") subentry["version"] = numpy.string_(pyFAI.version) subentry["date"] = isotime subentry["processing_type"] = numpy.string_(ext) coll = nxs.new_class(subentry, "process_" + ext, class_type="NXdata") metadata_grp = coll.require_group("parameters") for key, val in self.metadata.iteritems(): if type(val) in [str, unicode]: metadata_grp[key] = numpy.string_(val) else: metadata_grp[key] = val # copy metadata from other files: for grp in to_copy: grp_name = posixpath.split(grp.name)[-1] if grp_name not in coll: toplevel = coll.require_group(grp_name) for k, v in grp.attrs.items(): toplevel.attrs[k] = v else: toplevel = coll[grp_name] def grpdeepcopy(name, obj): nxs.deep_copy(name, obj, toplevel=toplevel, excluded=["data"]) grp.visititems(grpdeepcopy) shape = self.in_shape[:] if ext == "azim": if "npt2_rad" in self.input: self.npt2_rad = int(self.input["npt2_rad"]) else: qmax = self.ai.qArray(self.in_shape[-2:]).max() dqmin = self.ai.deltaQ(self.in_shape[-2:]).min() * 2.0 self.npt2_rad = int(qmax / dqmin) if "npt2_azim" in self.input: self.npt2_azim = int(self.input["npt2_azim"]) else: chi = self.ai.chiArray(self.in_shape[-2:]) self.npt2_azim = int(numpy.degrees(chi.max() - chi.min())) shape = (self.in_shape[0], self.npt2_azim, self.npt2_rad) ai = self.ai.__deepcopy__() worker = pyFAI.worker.Worker(ai, self.in_shape[-2:], (self.npt2_azim, self.npt2_rad), "q_nm^-1") if self.flat is not None: worker.ai.set_flatfield(self.flat) if self.dark is not None: worker.ai.set_darkcurrent(self.dark) worker.output = "numpy" if self.in_shape[0] < 5: worker.method = "splitbbox" else: worker.method = "ocl_csr_gpu" if self.correct_solid_angle: worker.set_normalization_factor( self.ai.pixel1 * self.ai.pixel2 / self.ai.dist / self.ai.dist / self.scaling_factor) else: worker.set_normalization_factor(1.0 / self.scaling_factor) worker.correct_solid_angle = self.correct_solid_angle self.log_warning("Normalization factor: %s" % worker.normalization_factor) worker.dummy = self.dummy worker.delta_dummy = self.delta_dummy self.workers[ext] = worker elif ext == "ave": if "npt1_rad" in self.input: self.npt1_rad = int(self.input["npt1_rad"]) else: qmax = self.ai.qArray(self.in_shape[-2:]).max() dqmin = self.ai.deltaQ(self.in_shape[-2:]).min() * 2.0 self.npt1_rad = int(qmax / dqmin) shape = (self.in_shape[0], self.npt1_rad) worker = pyFAI.worker.Worker(self.ai, self.in_shape[-2:], (1, self.npt1_rad), "q_nm^-1") worker.output = "numpy" if self.in_shape[0] < 5: worker.method = "splitbbox" else: worker.method = "ocl_csr_gpu" if self.correct_solid_angle: worker.set_normalization_factor( self.ai.pixel1 * self.ai.pixel2 / self.ai.dist / self.ai.dist / self.scaling_factor) else: worker.set_normalization_factor(1.0 / self.scaling_factor) worker.correct_solid_angle = self.correct_solid_angle worker.dummy = self.dummy worker.delta_dummy = self.delta_dummy self.workers[ext] = worker elif ext == "sub": worker = pyFAI.worker.PixelwiseWorker(dark=self.dark) self.workers[ext] = worker elif ext == "flat": worker = pyFAI.worker.PixelwiseWorker(dark=self.dark, flat=self.flat) self.workers[ext] = worker elif ext == "solid": worker = pyFAI.worker.PixelwiseWorker( dark=self.dark, flat=self.flat, solidangle=self.get_solid_angle()) self.workers[ext] = worker elif ext == "dist": worker = pyFAI.worker.DistortionWorker( dark=self.dark, flat=self.flat, solidangle=self.get_solid_angle(), detector=self.ai.detector) self.workers[ext] = worker elif ext == "norm": worker = pyFAI.worker.DistortionWorker( dark=self.dark, flat=self.flat, solidangle=self.get_solid_angle(), detector=self.ai.detector) self.workers[ext] = worker else: self.log_warning("unknown treatment %s" % ext) output_ds = coll.create_dataset("data", shape, "float32", chunks=(1, ) + shape[1:], maxshape=(None, ) + shape[1:]) if self.t is not None: coll["t"] = self.t coll["t"].attrs["axis"] = "1" coll["t"].attrs["interpretation"] = "scalar" coll["t"].attrs["unit"] = "s" # output_ds.attrs["NX_class"] = "NXdata" -> see group output_ds.attrs["signal"] = "1" if ext == "azim": output_ds.attrs["axes"] = ["t", "chi", "q"] output_ds.attrs["interpretation"] = "image" elif ext == "ave": output_ds.attrs["axes"] = ["t", "q"] output_ds.attrs["interpretation"] = "spectrum" elif ext in ("sub", "flat", "solid", "dist"): output_ds.attrs["axes"] = "t" output_ds.attrs["interpretation"] = "image" else: output_ds.attrs["interpretation"] = "image" self.output_ds[ext] = output_ds
def create_hdf5(self): """ Create a HDF5 file and datastructure """ try: self.hdf5 = h5py.File(self.hdf5_filename, 'a') except IOError as error: os.unlink(self.hdf5_filename) self.log_warning( "Unable to open %s: %s. Removing file and starting from scratch" % (self.hdf5_filename, error)) self.hdf5 = h5py.File(self.hdf5_filename) if not self.entry.endswith("_"): self.entry += "_" entries = len([i.startswith(self.entry) for i in self.hdf5]) self.entry = posixpath.join("", "%s%04d" % (self.entry, entries)) self.instrument = posixpath.join(self.entry, self.instrument) self.group = self.hdf5.require_group(self.instrument) self.group.parent.attrs["NX_class"] = "NXentry" self.group.attrs["NX_class"] = "NXinstrument" # TimeFrameGenerator self.tfg_grp = self.hdf5.require_group( posixpath.join(self.instrument, "TFG")) self.tfg_grp.attrs["NX_class"] = "NXcollection" self.tfg_grp["device"] = numpy.string_(self.c216) # MultiCounterScaler self.mcs_grp = self.hdf5.require_group( posixpath.join(self.instrument, "MCS")) self.mcs_grp.attrs["NX_class"] = "NXcollection" self.mcs_grp["device"] = numpy.string_(self.c216) # Static metadata self.info_grp = self.hdf5.require_group( posixpath.join(self.instrument, "parameters")) self.info_grp.attrs["NX_class"] = "NXcollection" for field, value in self.input2.get("Info", {}).items(): if field not in self.TO_SKIP and not isinstance(value, dict): try: value.encode("ascii") except UnicodeEncodeError: self.log_warning( "Unicode Error in field %s: %s, skipping" % (field, value)) except AttributeError as err: self.log_warning( "Attribute Error %s \n in field %s: %s, forcing to string." % (err, field, value)) self.info_grp[field] = numpy.string_(value) else: self.info_grp[field] = numpy.string_(value) start_time = self.input2.get("HMStartTime", get_isotime()) # Factor HS32F = self.input2.get("HS32F") if HS32F is not None: self.mcs_grp["HS32F"] = HS32F # Zero HS32Z = self.input2.get("HS32Z") if HS32Z is not None: self.mcs_grp["HS32Z"] = HS32Z # Name HS32N = self.input2.get("HS32N") if HS32N is not None: self.mcs_grp["HS32N"] = numpy.array([str(i) for i in HS32N]) # Mode HS32M = self.input2.get("HS32M") if HS32M is not None: self.mcs_grp["HS32M"] = HS32M if HS32N and HS32Z and HS32F: self.mcs_grp.require_group("interpreted") self.group.parent["title"] = numpy.string_("id02.metadata") self.group.parent["program"] = numpy.string_("Dahu") self.group.parent["start_time"] = numpy.string_(start_time)
def create_hdf5(self): """ Create one HDF5 file per output Also initialize all workers """ basename = os.path.splitext(os.path.basename(self.image_file))[0] if basename.endswith("_raw"): basename = basename[:-4] isotime = str(get_isotime()) detector_grp = self.input_nxs.find_detector(all=True) detector_name = "undefined" for grp in detector_grp: if "detector_information/name" in grp: detector_name = grp["detector_information/name"][()] md_entry = self.metadata_nxs.get_entries()[0] instruments = self.metadata_nxs.get_class(md_entry, "NXinstrument") if instruments: collections = self.metadata_nxs.get_class(instruments[0], "NXcollection") to_copy = collections + detector_grp else: to_copy = detector_grp for ext in self.to_save: if ext == "raw": continue outfile = os.path.join(self.dest, "%s_%s.h5" % (basename, ext)) self.output_hdf5[ext] = outfile try: nxs = Nexus(outfile, mode="a", creator="dahu") except IOError as error: self.log_warning( "invalid HDF5 file %s: remove and re-create!\n%s" % (outfile, error)) os.unlink(outfile) nxs = Nexus(outfile, mode="w", creator="dahu") entry = nxs.new_entry("entry", program_name=self.input.get( "plugin_name", fully_qualified_name(self.__class__)), title=self.image_file + ":" + self.images_ds.name) entry["program_name"].attrs["version"] = __version__ #configuration config_grp = nxs.new_class(entry, "configuration", "NXnote") config_grp["type"] = "text/json" config_grp["data"] = json.dumps(self.input, indent=2, separators=(",\r\n", ": ")) entry["detector_name"] = str(detector_name) nxprocess = nxs.new_class(entry, "PyFAI", class_type="NXprocess") nxprocess["program"] = str("PyFAI") nxprocess["version"] = str(pyFAI.version) nxprocess["date"] = isotime nxprocess["processing_type"] = str(ext) nxdata = nxs.new_class(nxprocess, "result_" + ext, class_type="NXdata") entry.attrs["default"] = nxdata.name metadata_grp = nxprocess.require_group("parameters") for key, val in self.metadata.items(): if type(val) in StringTypes: metadata_grp[key] = str(val) else: metadata_grp[key] = val # copy metadata from other files: for grp in to_copy: grp_name = posixpath.split(grp.name)[-1] if grp_name not in nxdata: toplevel = nxprocess.require_group(grp_name) for k, v in grp.attrs.items(): toplevel.attrs[k] = v else: toplevel = nxprocess[grp_name] def grpdeepcopy(name, obj): nxs.deep_copy(name, obj, toplevel=toplevel, excluded=["data"]) grp.visititems(grpdeepcopy) shape = self.in_shape[:] if self.npt1_rad is None and "npt1_rad" in self.input: self.npt1_rad = int(self.input["npt1_rad"]) else: qmax = self.ai.qArray(self.in_shape[-2:]).max() dqmin = self.ai.deltaQ(self.in_shape[-2:]).min() * 2.0 self.npt1_rad = int(qmax / dqmin) if ext == "azim": if "npt2_rad" in self.input: self.npt2_rad = int(self.input["npt2_rad"]) else: qmax = self.ai.qArray(self.in_shape[-2:]).max() dqmin = self.ai.deltaQ(self.in_shape[-2:]).min() * 2.0 self.npt2_rad = int(qmax / dqmin) if "npt2_azim" in self.input: self.npt2_azim = int(self.input["npt2_azim"]) else: chi = self.ai.chiArray(self.in_shape[-2:]) self.npt2_azim = int(numpy.degrees(chi.max() - chi.min())) shape = (self.in_shape[0], self.npt2_azim, self.npt2_rad) ai = self.ai.__copy__() worker = Worker(ai, self.in_shape[-2:], (self.npt2_azim, self.npt2_rad), self.unit) if self.flat is not None: worker.ai.set_flatfield(self.flat) if self.dark is not None: worker.ai.set_darkcurrent(self.dark) worker.output = "numpy" if self.in_shape[0] < 5: worker.method = "splitbbox" else: worker.method = "ocl_csr_gpu" if self.correct_solid_angle: worker.set_normalization_factor( self.ai.pixel1 * self.ai.pixel2 / self.ai.dist / self.ai.dist) else: worker.set_normalization_factor(1.0) worker.correct_solid_angle = self.correct_solid_angle self.log_warning("Normalization factor: %s" % worker.normalization_factor) worker.dummy = self.dummy worker.delta_dummy = self.delta_dummy if self.input.get("do_polarization"): worker.polarization_factor = self.input.get( "polarization_factor") self.workers[ext] = worker elif ext.startswith("ave"): if "_" in ext: unit = ext.split("_", 1)[1] npt1_rad = self.input.get("npt1_rad_" + unit, self.npt1_rad) ai = self.ai.__copy__() else: unit = self.unit npt1_rad = self.npt1_rad ai = self.ai shape = (self.in_shape[0], npt1_rad) worker = Worker(ai, self.in_shape[-2:], (1, npt1_rad), unit=unit) worker.output = "numpy" if self.in_shape[0] < 5: worker.method = "splitbbox" else: worker.method = "ocl_csr_gpu" if self.correct_solid_angle: worker.set_normalization_factor( self.ai.pixel1 * self.ai.pixel2 / self.ai.dist / self.ai.dist) else: worker.set_normalization_factor(1.0) worker.correct_solid_angle = self.correct_solid_angle worker.dummy = self.dummy worker.delta_dummy = self.delta_dummy if self.input.get("do_polarization"): worker.polarization_factor = True self.workers[ext] = worker elif ext == "sub": worker = PixelwiseWorker( dark=self.dark, dummy=self.dummy, delta_dummy=self.delta_dummy, ) self.workers[ext] = worker elif ext == "flat": worker = PixelwiseWorker( dark=self.dark, flat=self.flat, dummy=self.dummy, delta_dummy=self.delta_dummy, ) self.workers[ext] = worker elif ext == "solid": worker = PixelwiseWorker( dark=self.dark, flat=self.flat, solidangle=self.get_solid_angle(), dummy=self.dummy, delta_dummy=self.delta_dummy, polarization=self.polarization, ) self.workers[ext] = worker elif ext == "dist": worker = DistortionWorker( dark=self.dark, flat=self.flat, solidangle=self.get_solid_angle(), dummy=self.dummy, delta_dummy=self.delta_dummy, polarization=self.polarization, detector=self.ai.detector, ) self.workers[ext] = worker if self.distortion is None: self.distortion = worker.distortion self.cache_dis = str(self.ai.detector) if self.cache_dis in self.cache: self.distortion.lut = self.cache[self.cache_dis] else: self.distortion.calc_LUT() self.cache[self.cache_dis] = self.distortion.lut else: worker.distortion = self.distortion elif ext == "norm": worker = DistortionWorker( dark=self.dark, flat=self.flat, solidangle=self.get_solid_angle(), dummy=self.dummy, delta_dummy=self.delta_dummy, polarization=self.polarization, detector=self.ai.detector, ) self.workers[ext] = worker if self.distortion is None and worker.distortion is not None: self.distortion = worker.distortion self.cache_dis = str(self.ai.detector) if self.cache_dis in self.cache: self.distortion.lut = self.cache[self.cache_dis] else: self.distortion.calc_LUT() self.cache[self.cache_dis] = self.distortion.lut else: worker.distortion = self.distortion else: self.log_warning("unknown treatment %s" % ext) if (len(shape) >= 3): compression = {k: v for k, v in COMPRESSION.items()} else: compression = {} output_ds = nxdata.create_dataset("data", shape, dtype=numpy.float32, chunks=(1, ) + shape[1:], maxshape=(None, ) + shape[1:], **compression) nxdata.attrs["signal"] = "data" # output_ds.attrs["signal"] = "1" entry.attrs["default"] = nxdata.name if self.variance_formula is not None: error_ds = nxdata.create_dataset("data_errors", shape, dtype=numpy.float32, chunks=(1, ) + shape[1:], maxshape=(None, ) + shape[1:], **compression) # nxdata.attrs["uncertainties"] = "errors" self.output_ds[ext + "_err"] = error_ds if self.t is not None: nxdata["t"] = self.t nxdata["t"].attrs["interpretation"] = "scalar" nxdata["t"].attrs["unit"] = "s" if ext == "azim": nxdata.attrs["axes"] = [".", "chi", "q"] output_ds.attrs["interpretation"] = "image" if self.variance_formula is not None: error_ds.attrs["interpretation"] = "image" elif ext == "ave": nxdata.attrs["axes"] = [".", "q"] output_ds.attrs["interpretation"] = "spectrum" if self.variance_formula is not None: error_ds.attrs["interpretation"] = "spectrum" else: output_ds.attrs["interpretation"] = "image" if self.variance_formula is not None: error_ds.attrs["interpretation"] = "image" self.output_ds[ext] = output_ds