def makeHDF5NeXus(self):
     self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5NeXus")
     with EDPluginHDF5.getFileLock(self.HDF5filename.path.value):
         #Seems strange to redefine h5Grp but if there is a flush in between: h5Grp could be closed 
         entry = EDPluginHDF5.getHDF5File(self.HDF5filename.path.value)[self.internalHDF5Path.value]
         if not "title" in  entry:
             entry.create_dataset("title", data=self.TITLE)
         if not "program" in  entry:
             entry.create_dataset("program", data="EDNA EDPluginControlFullFieldXASv1_0")
         if not "start_time" in  entry:
             entry.create_dataset("start_time", data=EDPluginHDF5.getIsoTime(self.start_time))
         ########################################################################
         # Huge hack: for scalar modification: use [()] to refer to the data !!!
         ########################################################################
         if "end_time" in  entry:
             entry["end_time"][()] = EDPluginHDF5.getIsoTime()
         else:
             entry.create_dataset("end_time", data=EDPluginHDF5.getIsoTime())
         if "duration" in  entry:
             entry["duration"][()] = time.time() - self.start_time
         else:
             entry.create_dataset("duration", data=time.time() - self.start_time, dtype="float")
         if self.NXdata not in entry:
             nxdata = entry.create_group(self.NXdata)
             for k, v in self.NXdataAttr.items():
                  nxdata.attrs[k] = v
         else:
             nxdata = entry[self.NXdata]
         for ds in [self.DSstack, self.DSenergy]:
             if (ds in entry) and (ds not in  nxdata):
                 nxdata[ds] = entry[ds]
 def hdf5_offset(self, index, offset):
     with EDPluginHDF5.getFileLock(self.xsdHDF5File.path.value):
         grp = EDPluginHDF5.getHDF5File(self.xsdHDF5File.path.value)[self.xsdHDF5Internal.value]
         ds = grp["Offsets"]
         if self.MaxOffset:
             if "MaxOffset" not in ds.attrs:
                 ds.attrs["MaxOffset"] = self.MaxOffset
         ds[index, :] = offset
예제 #3
0
 def hdf5_offset(self, index, offset):
     with EDPluginHDF5.getFileLock(self.xsdHDF5File.path.value):
         grp = EDPluginHDF5.getHDF5File(
             self.xsdHDF5File.path.value)[self.xsdHDF5Internal.value]
         ds = grp["Offsets"]
         if self.MaxOffset:
             if "MaxOffset" not in ds.attrs:
                 ds.attrs["MaxOffset"] = self.MaxOffset
         ds[index, :] = offset
 def makeHDF5OffsetStructure(self):
     self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5OffsetStructure")
     with EDPluginHDF5.getFileLock(self.HDF5filename.path.value):
         h5Grp = EDPluginHDF5.getHDF5File(self.HDF5filename.path.value)[self.internalHDF5Path.value]
         if "Offsets" in h5Grp:
             dataset = h5Grp["Offsets"]
         else:
             dataset = h5Grp.create_dataset("Offsets", shape=(1 + max(self.index, self.reference), 2), dtype="float32", maxshape=(None, 2), chunks=(1, 2))
         if self.index >= dataset.shape[0]:
             dataset.resize((self.index + 1, 2))
 def makeHDF5EnergyStructure(self):
     self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5EnergyStructure")
     h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value)
     with EDPluginHDF5.getFileLock(self.HDF5filename.path.value):
         if self.DSenergy in h5Grp:
             dataset = h5Grp[self.DSenergy]
         else:
             dataset = h5Grp.create_dataset(self.DSenergy, shape=(1 + max(self.index, self.reference),), dtype="float32", maxshape=(None,), chunks=(1,))
             for key in  EDPluginControlFullFieldXASv1_0.energyAttr:
                 dataset.attrs.create(key, EDPluginControlFullFieldXASv1_0.energyAttr[key])
         if self.index >= dataset.shape[0]:
             dataset.resize((self.index + 1,))
         dataset[self.index] = self.energy
 def makeHDF5MaxIntStructure(self, _fMaxIntensity):
     self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5MaxIntStructure")
     h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value)
     with EDPluginHDF5.getFileLock(self.HDF5filename.path.value):
         #Seems strange to redefine h5Grp but if there is a flush in between: h5Grp could be closed 
         h5Grp = EDPluginHDF5.getHDF5File(self.HDF5filename.path.value)[self.internalHDF5Path.value]
         if "maxInt" in h5Grp:
             dataset = h5Grp["maxInt"]
         else:
             dataset = h5Grp.create_dataset("maxInt", shape=(1 + max(self.index, self.reference),), dtype="float32", maxshape=(None,), chunks=(1,))
             for key in  EDPluginControlFullFieldXASv1_0.maxIntAttr:
                 dataset.attrs.create(key, EDPluginControlFullFieldXASv1_0.maxIntAttr[key])
         if self.index >= dataset.shape[0]:
             dataset.resize((self.index + 1,))
         dataset[self.index] = _fMaxIntensity
예제 #7
0
 def makeHDF5OffsetStructure(self):
     self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5OffsetStructure")
     with EDPluginHDF5.getFileLock(self.HDF5filename.path.value):
         h5Grp = EDPluginHDF5.getHDF5File(
             self.HDF5filename.path.value)[self.internalHDF5Path.value]
         if "Offsets" in h5Grp:
             dataset = h5Grp["Offsets"]
         else:
             dataset = h5Grp.create_dataset(
                 "Offsets",
                 shape=(1 + max(self.index, self.reference), 2),
                 dtype="float32",
                 maxshape=(None, 2),
                 chunks=(1, 2))
         if self.index >= dataset.shape[0]:
             dataset.resize((self.index + 1, 2))
예제 #8
0
 def makeHDF5EnergyStructure(self):
     self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5EnergyStructure")
     h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value,
                                          self.internalHDF5Path.value)
     with EDPluginHDF5.getFileLock(self.HDF5filename.path.value):
         if self.DSenergy in h5Grp:
             dataset = h5Grp[self.DSenergy]
         else:
             dataset = h5Grp.create_dataset(
                 self.DSenergy,
                 shape=(1 + max(self.index, self.reference), ),
                 dtype="float32",
                 maxshape=(None, ),
                 chunks=(1, ))
             for key in EDPluginControlFullFieldXASv1_0.energyAttr:
                 dataset.attrs.create(
                     key, EDPluginControlFullFieldXASv1_0.energyAttr[key])
         if self.index >= dataset.shape[0]:
             dataset.resize((self.index + 1, ))
         dataset[self.index] = self.energy
예제 #9
0
 def makeHDF5NeXus(self):
     self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5NeXus")
     with EDPluginHDF5.getFileLock(self.HDF5filename.path.value):
         #Seems strange to redefine h5Grp but if there is a flush in between: h5Grp could be closed
         entry = EDPluginHDF5.getHDF5File(
             self.HDF5filename.path.value)[self.internalHDF5Path.value]
         if not "title" in entry:
             entry.create_dataset("title", data=self.TITLE)
         if not "program" in entry:
             entry.create_dataset(
                 "program", data="EDNA EDPluginControlFullFieldXASv1_0")
         if not "start_time" in entry:
             entry.create_dataset("start_time",
                                  data=EDPluginHDF5.getIsoTime(
                                      self.start_time))
         ########################################################################
         # Huge hack: for scalar modification: use [()] to refer to the data !!!
         ########################################################################
         if "end_time" in entry:
             entry["end_time"][()] = EDPluginHDF5.getIsoTime()
         else:
             entry.create_dataset("end_time",
                                  data=EDPluginHDF5.getIsoTime())
         if "duration" in entry:
             entry["duration"][()] = time.time() - self.start_time
         else:
             entry.create_dataset("duration",
                                  data=time.time() - self.start_time,
                                  dtype="float")
         if self.NXdata not in entry:
             nxdata = entry.create_group(self.NXdata)
             for k, v in self.NXdataAttr.items():
                 nxdata.attrs[k] = v
         else:
             nxdata = entry[self.NXdata]
         for ds in [self.DSstack, self.DSenergy]:
             if (ds in entry) and (ds not in nxdata):
                 nxdata[ds] = entry[ds]
예제 #10
0
 def makeHDF5MaxIntStructure(self, _fMaxIntensity):
     self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5MaxIntStructure")
     h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value,
                                          self.internalHDF5Path.value)
     with EDPluginHDF5.getFileLock(self.HDF5filename.path.value):
         #Seems strange to redefine h5Grp but if there is a flush in between: h5Grp could be closed
         h5Grp = EDPluginHDF5.getHDF5File(
             self.HDF5filename.path.value)[self.internalHDF5Path.value]
         if "maxInt" in h5Grp:
             dataset = h5Grp["maxInt"]
         else:
             dataset = h5Grp.create_dataset(
                 "maxInt",
                 shape=(1 + max(self.index, self.reference), ),
                 dtype="float32",
                 maxshape=(None, ),
                 chunks=(1, ))
             for key in EDPluginControlFullFieldXASv1_0.maxIntAttr:
                 dataset.attrs.create(
                     key, EDPluginControlFullFieldXASv1_0.maxIntAttr[key])
         if self.index >= dataset.shape[0]:
             dataset.resize((self.index + 1, ))
         dataset[self.index] = _fMaxIntensity