def process(self, _edObject=None): EDPluginControl.process(self) h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) self.DEBUG("EDPluginControlFullFieldXASv1_0.process") self.makeHDF5OffsetStructure() if self.energy is not None: self.makeHDF5EnergyStructure() edPluginExecNormalize = self.loadPlugin(self.__strControlledNormalize) edPluginExecNormalize.connectSUCCESS(self.doSuccessExecNormalize) edPluginExecNormalize.connectFAILURE(self.doFailureExecNormalize) sdi = self.dataInput xsdInNorm = XSDataInputNormalize(data=sdi.data, flat=sdi.flat, dark=sdi.dark, dataScaleFactor=sdi.dataScaleFactor, darkScaleFactor=sdi.darkScaleFactor, flatScaleFactor=sdi.flatScaleFactor) if self.xsdNormalizedFilename is not None: xsdInNorm.output = XSDataImageExt(path=self.xsdNormalizedFilename.path) else: xsdInNorm.output = XSDataImageExt(shared=XSDataString("Normalized-%06i" % sdi.index.value)) edPluginExecNormalize.dataInput = xsdInNorm edPluginExecNormalize.executeSynchronous() if self.xsdAlignStack is not None: edPluginAlign = self.loadPlugin(self.__strControlledAlign) edPluginAlign.dataInput = self.xsdAlignStack edPluginAlign.connectSUCCESS(self.doSuccessExecAlign) edPluginAlign.connectFAILURE(self.doFailureExecAlign) edPluginAlign.executeSynchronous()
def preProcess(self, _edObject=None): EDPluginHDF5.preProcess(self) self.DEBUG("EDPluginHDF5StackImagesv10.preProcess") for onefile in self.dataInput.inputImageFile: if onefile is None: self.ERROR("Please investigate why EDPluginHDF5StackImagesv10.dataInput.inputImageFile is a list containing None !!!!") self.setFailure() continue if onefile.path is not None: self.listImageFilenames.append(onefile.path.value) if onefile.date is not None: self.listImageDates.append(onefile.date.value) if onefile.number is not None: self.listForcedPositions.append(onefile.number.value) self.listArray.append(EDUtilsArray.getArray(onefile)) for oneArray in self.dataInput.inputArray: self.listArray.append(EDUtilsArray.xsDataToArray(oneArray)) if self.dataInput.index != []: self.listForcedPositions = [i.value for i in self.dataInput.index] if self.dataInput.getDeleteInputImage() is not None: self.bDeleteImage = bool(self.dataInput.deleteInputImage.value) if self.listForcedPositions != []: EDAssert.equal(len(self.listForcedPositions), max(len(self.listImageFilenames), len(self.listArray)), "Forced position list has a good length") if self.listImageDates != []: EDAssert.equal(len(self.listImageDates) , len(self.listImageFilenames), "listImageDates has the same size as listImageFilenames") self.hdf5group = EDPluginHDF5.createStructure(self.strHDF5Filename, self.strHDF5Path, self.dictExtraAttributes)
def process(self, _edObject=None): EDPluginControl.process(self) h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) self.DEBUG("EDPluginControlFullFieldXASv1_0.process") self.makeHDF5OffsetStructure() if self.energy is not None: self.makeHDF5EnergyStructure() edPluginExecNormalize = self.loadPlugin(self.__strControlledNormalize) edPluginExecNormalize.connectSUCCESS(self.doSuccessExecNormalize) edPluginExecNormalize.connectFAILURE(self.doFailureExecNormalize) sdi = self.dataInput xsdInNorm = XSDataInputNormalize(data=sdi.data, flat=sdi.flat, dark=sdi.dark, dataScaleFactor=sdi.dataScaleFactor, darkScaleFactor=sdi.darkScaleFactor, flatScaleFactor=sdi.flatScaleFactor) if self.xsdNormalizedFilename is not None: xsdInNorm.output = XSDataImageExt( path=self.xsdNormalizedFilename.path) else: xsdInNorm.output = XSDataImageExt( shared=XSDataString("Normalized-%06i" % sdi.index.value)) edPluginExecNormalize.dataInput = xsdInNorm edPluginExecNormalize.executeSynchronous() if self.xsdAlignStack is not None: edPluginAlign = self.loadPlugin(self.__strControlledAlign) edPluginAlign.dataInput = self.xsdAlignStack edPluginAlign.connectSUCCESS(self.doSuccessExecAlign) edPluginAlign.connectFAILURE(self.doFailureExecAlign) edPluginAlign.executeSynchronous()
def makeHDF5EnergyStructure(self): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5EnergyStructure") h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): if self.DSenergy in h5Grp: dataset = h5Grp[self.DSenergy] else: dataset = h5Grp.create_dataset(self.DSenergy, shape=(1 + max(self.index, self.reference),), dtype="float32", maxshape=(None,), chunks=(1,)) for key in EDPluginControlFullFieldXASv1_0.energyAttr: dataset.attrs.create(key, EDPluginControlFullFieldXASv1_0.energyAttr[key]) if self.index >= dataset.shape[0]: dataset.resize((self.index + 1,)) dataset[self.index] = self.energy
def makeHDF5MaxIntStructure(self, _fMaxIntensity): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5MaxIntStructure") h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): #Seems strange to redefine h5Grp but if there is a flush in between: h5Grp could be closed h5Grp = EDPluginHDF5.getHDF5File(self.HDF5filename.path.value)[self.internalHDF5Path.value] if "maxInt" in h5Grp: dataset = h5Grp["maxInt"] else: dataset = h5Grp.create_dataset("maxInt", shape=(1 + max(self.index, self.reference),), dtype="float32", maxshape=(None,), chunks=(1,)) for key in EDPluginControlFullFieldXASv1_0.maxIntAttr: dataset.attrs.create(key, EDPluginControlFullFieldXASv1_0.maxIntAttr[key]) if self.index >= dataset.shape[0]: dataset.resize((self.index + 1,)) dataset[self.index] = _fMaxIntensity
def makeHDF5EnergyStructure(self): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5EnergyStructure") h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): if self.DSenergy in h5Grp: dataset = h5Grp[self.DSenergy] else: dataset = h5Grp.create_dataset( self.DSenergy, shape=(1 + max(self.index, self.reference), ), dtype="float32", maxshape=(None, ), chunks=(1, )) for key in EDPluginControlFullFieldXASv1_0.energyAttr: dataset.attrs.create( key, EDPluginControlFullFieldXASv1_0.energyAttr[key]) if self.index >= dataset.shape[0]: dataset.resize((self.index + 1, )) dataset[self.index] = self.energy
def makeHDF5MaxIntStructure(self, _fMaxIntensity): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5MaxIntStructure") h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): #Seems strange to redefine h5Grp but if there is a flush in between: h5Grp could be closed h5Grp = EDPluginHDF5.getHDF5File( self.HDF5filename.path.value)[self.internalHDF5Path.value] if "maxInt" in h5Grp: dataset = h5Grp["maxInt"] else: dataset = h5Grp.create_dataset( "maxInt", shape=(1 + max(self.index, self.reference), ), dtype="float32", maxshape=(None, ), chunks=(1, )) for key in EDPluginControlFullFieldXASv1_0.maxIntAttr: dataset.attrs.create( key, EDPluginControlFullFieldXASv1_0.maxIntAttr[key]) if self.index >= dataset.shape[0]: dataset.resize((self.index + 1, )) dataset[self.index] = _fMaxIntensity