def preProcess(self, _edObject=None): EDPluginHDF5.preProcess(self) self.DEBUG("EDPluginHDF5StackImagesv10test.preProcess") for onefile in self.dataInput.inputImageFile: if onefile.path is not None: self.listImageFilenames.append(onefile.path.value) if onefile.date is not None: self.listImageDates.append(onefile.date.value) if onefile.number is not None: self.listForcedPositions.append(onefile.number.value) self.listArray.append(EDUtilsArray.getArray(onefile)) for oneArray in self.dataInput.getInputArray(): self.listArray.append(EDUtilsArray.xsDataToArray(oneArray)) if self.dataInput.index != []: self.listForcedPositions = [i.value for i in self.dataInput.index] if self.dataInput.getDeleteInputImage() is not None: self.bDeleteImage = bool(self.dataInput.deleteInputImage.value) if self.listForcedPositions != []: EDAssert.equal(len(self.listForcedPositions), max(len(self.listImageFilenames), len(self.listArray)), "Forced position list has a good length") if self.listImageDates != []: EDAssert.equal(len(self.listImageDates) , len(self.listImageFilenames), "listImageDates has the same size as listImageFilenames")
def process(self, _edObject=None): EDPluginHDF5.process(self) self.DEBUG("EDPluginHDF5MapOfSpectrav10.process") maxSize = (((self.meshScan["SlowMotorSteps"]), (self.meshScan["FastMotorSteps"]))) for filename in self.listSpectrumFilenames: self.listArray.append(fabio.open(filename).data) if self.listForcedPositions == []: for oneArray in self.listArray: self.processOneSpectrum(oneArray, position=maxSize, maxSize=maxSize) else: for i in range(len(self.listForcedPositions)): fSlowPosition = (self.listForcedPositions[i]["Slow"] - self.meshScan["SlowMotorStart"]) / \ (self.meshScan["SlowMotorStop"] - self.meshScan["SlowMotorStart"]) * \ (self.meshScan["SlowMotorSteps" ]) fFastPosition = (self.listForcedPositions[i]["Fast"] - self.meshScan["FastMotorStart"]) / \ (self.meshScan["FastMotorStop"] - self.meshScan["FastMotorStart"]) * \ (self.meshScan["FastMotorSteps" ]) self.processOneSpectrum( self.listArray[i], (int(round(fSlowPosition)), int(round(fFastPosition))), maxSize=maxSize)
def makeHDF5NeXus(self): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5NeXus") with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): #Seems strange to redefine h5Grp but if there is a flush in between: h5Grp could be closed entry = EDPluginHDF5.getHDF5File(self.HDF5filename.path.value)[self.internalHDF5Path.value] if not "title" in entry: entry.create_dataset("title", data=self.TITLE) if not "program" in entry: entry.create_dataset("program", data="EDNA EDPluginControlFullFieldXASv1_0") if not "start_time" in entry: entry.create_dataset("start_time", data=EDPluginHDF5.getIsoTime(self.start_time)) ######################################################################## # Huge hack: for scalar modification: use [()] to refer to the data !!! ######################################################################## if "end_time" in entry: entry["end_time"][()] = EDPluginHDF5.getIsoTime() else: entry.create_dataset("end_time", data=EDPluginHDF5.getIsoTime()) if "duration" in entry: entry["duration"][()] = time.time() - self.start_time else: entry.create_dataset("duration", data=time.time() - self.start_time, dtype="float") if self.NXdata not in entry: nxdata = entry.create_group(self.NXdata) for k, v in self.NXdataAttr.items(): nxdata.attrs[k] = v else: nxdata = entry[self.NXdata] for ds in [self.DSstack, self.DSenergy]: if (ds in entry) and (ds not in nxdata): nxdata[ds] = entry[ds]
def preProcess(self, _edObject=None): EDPluginHDF5.preProcess(self) self.DEBUG("EDPluginHDF5StackImagesv10.preProcess") for onefile in self.dataInput.inputImageFile: if onefile is None: self.ERROR("Please investigate why EDPluginHDF5StackImagesv10.dataInput.inputImageFile is a list containing None !!!!") self.setFailure() continue if onefile.path is not None: self.listImageFilenames.append(onefile.path.value) if onefile.date is not None: self.listImageDates.append(onefile.date.value) if onefile.number is not None: self.listForcedPositions.append(onefile.number.value) self.listArray.append(EDUtilsArray.getArray(onefile)) for oneArray in self.dataInput.inputArray: self.listArray.append(EDUtilsArray.xsDataToArray(oneArray)) if self.dataInput.index != []: self.listForcedPositions = [i.value for i in self.dataInput.index] if self.dataInput.getDeleteInputImage() is not None: self.bDeleteImage = bool(self.dataInput.deleteInputImage.value) if self.listForcedPositions != []: EDAssert.equal(len(self.listForcedPositions), max(len(self.listImageFilenames), len(self.listArray)), "Forced position list has a good length") if self.listImageDates != []: EDAssert.equal(len(self.listImageDates) , len(self.listImageFilenames), "listImageDates has the same size as listImageFilenames") self.hdf5group = EDPluginHDF5.createStructure(self.strHDF5Filename, self.strHDF5Path, self.dictExtraAttributes)
def process(self, _edObject=None): EDPluginHDF5.process(self) self.DEBUG("EDPluginHDF5StackImagesv10.process") length = len(self.listForcedPositions) if length == 0: for oneFilename in self.listImageFilenames: header, data = self.readImage(oneFilename) self.DEBUG("Writing image %s on top of the stack" % (oneFilename)) self.processOneImage(data, position=None, filename=oneFilename, dictHeaders=header) for oneArray in self.listArray: self.DEBUG("Writing image from array on top of the stack") self.processOneImage(oneArray) else: if len(self.listImageFilenames) == length: for i in xrange(length): self.DEBUG("Writing image %s at position %i" % (self.listImageFilenames[i], self.listForcedPositions[i])) header, data = self.readImage(self.listImageFilenames[i]) self.processOneImage(data, position=self.listForcedPositions[i], filename=self.listImageFilenames[i], dictHeaders=header) elif len(self.listArray) == length: for i in xrange(length): self.DEBUG("Writing image from array at position %i" % (self.listForcedPositions[i])) self.processOneImage(self.listArray[i], self.listForcedPositions[i])
def hdf5_offset(self, index, offset): with EDPluginHDF5.getFileLock(self.xsdHDF5File.path.value): grp = EDPluginHDF5.getHDF5File(self.xsdHDF5File.path.value)[self.xsdHDF5Internal.value] ds = grp["Offsets"] if self.MaxOffset: if "MaxOffset" not in ds.attrs: ds.attrs["MaxOffset"] = self.MaxOffset ds[index, :] = offset
def hdf5_offset(self, index, offset): with EDPluginHDF5.getFileLock(self.xsdHDF5File.path.value): grp = EDPluginHDF5.getHDF5File( self.xsdHDF5File.path.value)[self.xsdHDF5Internal.value] ds = grp["Offsets"] if self.MaxOffset: if "MaxOffset" not in ds.attrs: ds.attrs["MaxOffset"] = self.MaxOffset ds[index, :] = offset
def makeHDF5OffsetStructure(self): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5OffsetStructure") with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): h5Grp = EDPluginHDF5.getHDF5File(self.HDF5filename.path.value)[self.internalHDF5Path.value] if "Offsets" in h5Grp: dataset = h5Grp["Offsets"] else: dataset = h5Grp.create_dataset("Offsets", shape=(1 + max(self.index, self.reference), 2), dtype="float32", maxshape=(None, 2), chunks=(1, 2)) if self.index >= dataset.shape[0]: dataset.resize((self.index + 1, 2))
def __init__(self): """ """ EDPluginHDF5.__init__(self) self.setXSDataInputClass(XSDataInputHDF5StackImages) self.strHDF5Filename = None self.listImageFilenames = [] self.listForcedPositions = [] self.listImageDates = [] self.listArray = [] self.bDeleteImage = False self.hdf5group = None #Name of the group(directory) where we will work
def makeHDF5EnergyStructure(self): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5EnergyStructure") h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): if self.DSenergy in h5Grp: dataset = h5Grp[self.DSenergy] else: dataset = h5Grp.create_dataset(self.DSenergy, shape=(1 + max(self.index, self.reference),), dtype="float32", maxshape=(None,), chunks=(1,)) for key in EDPluginControlFullFieldXASv1_0.energyAttr: dataset.attrs.create(key, EDPluginControlFullFieldXASv1_0.energyAttr[key]) if self.index >= dataset.shape[0]: dataset.resize((self.index + 1,)) dataset[self.index] = self.energy
def makeHDF5MaxIntStructure(self, _fMaxIntensity): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5MaxIntStructure") h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): #Seems strange to redefine h5Grp but if there is a flush in between: h5Grp could be closed h5Grp = EDPluginHDF5.getHDF5File(self.HDF5filename.path.value)[self.internalHDF5Path.value] if "maxInt" in h5Grp: dataset = h5Grp["maxInt"] else: dataset = h5Grp.create_dataset("maxInt", shape=(1 + max(self.index, self.reference),), dtype="float32", maxshape=(None,), chunks=(1,)) for key in EDPluginControlFullFieldXASv1_0.maxIntAttr: dataset.attrs.create(key, EDPluginControlFullFieldXASv1_0.maxIntAttr[key]) if self.index >= dataset.shape[0]: dataset.resize((self.index + 1,)) dataset[self.index] = _fMaxIntensity
def __init__(self): """ """ EDPluginHDF5.__init__(self) self.setXSDataInputClass(XSDataInputHDF5MapSpectra) self.listSpectrumFilenames = [] self.listForcedPositions = [] self.listSpectrumFileType = [] self.listArray = [] self.meshScan = {"FastMotorSteps":0, "FastMotorStart":0, "FastMotorStop":0, "SlowMotorSteps":0, "SlowMotorStart":0, "SlowMotorStop":0, } self.bDeleteSpectrum = False
def process(self, _edObject=None): EDPluginControl.process(self) h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) self.DEBUG("EDPluginControlFullFieldXASv1_0.process") self.makeHDF5OffsetStructure() if self.energy is not None: self.makeHDF5EnergyStructure() edPluginExecNormalize = self.loadPlugin(self.__strControlledNormalize) edPluginExecNormalize.connectSUCCESS(self.doSuccessExecNormalize) edPluginExecNormalize.connectFAILURE(self.doFailureExecNormalize) sdi = self.dataInput xsdInNorm = XSDataInputNormalize(data=sdi.data, flat=sdi.flat, dark=sdi.dark, dataScaleFactor=sdi.dataScaleFactor, darkScaleFactor=sdi.darkScaleFactor, flatScaleFactor=sdi.flatScaleFactor) if self.xsdNormalizedFilename is not None: xsdInNorm.output = XSDataImageExt( path=self.xsdNormalizedFilename.path) else: xsdInNorm.output = XSDataImageExt( shared=XSDataString("Normalized-%06i" % sdi.index.value)) edPluginExecNormalize.dataInput = xsdInNorm edPluginExecNormalize.executeSynchronous() if self.xsdAlignStack is not None: edPluginAlign = self.loadPlugin(self.__strControlledAlign) edPluginAlign.dataInput = self.xsdAlignStack edPluginAlign.connectSUCCESS(self.doSuccessExecAlign) edPluginAlign.connectFAILURE(self.doFailureExecAlign) edPluginAlign.executeSynchronous()
def makeHDF5OffsetStructure(self): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5OffsetStructure") with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): h5Grp = EDPluginHDF5.getHDF5File( self.HDF5filename.path.value)[self.internalHDF5Path.value] if "Offsets" in h5Grp: dataset = h5Grp["Offsets"] else: dataset = h5Grp.create_dataset( "Offsets", shape=(1 + max(self.index, self.reference), 2), dtype="float32", maxshape=(None, 2), chunks=(1, 2)) if self.index >= dataset.shape[0]: dataset.resize((self.index + 1, 2))
def process(self, _edObject=None): EDPluginControl.process(self) h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) self.DEBUG("EDPluginControlFullFieldXASv1_0.process") self.makeHDF5OffsetStructure() if self.energy is not None: self.makeHDF5EnergyStructure() edPluginExecNormalize = self.loadPlugin(self.__strControlledNormalize) edPluginExecNormalize.connectSUCCESS(self.doSuccessExecNormalize) edPluginExecNormalize.connectFAILURE(self.doFailureExecNormalize) sdi = self.dataInput xsdInNorm = XSDataInputNormalize(data=sdi.data, flat=sdi.flat, dark=sdi.dark, dataScaleFactor=sdi.dataScaleFactor, darkScaleFactor=sdi.darkScaleFactor, flatScaleFactor=sdi.flatScaleFactor) if self.xsdNormalizedFilename is not None: xsdInNorm.output = XSDataImageExt(path=self.xsdNormalizedFilename.path) else: xsdInNorm.output = XSDataImageExt(shared=XSDataString("Normalized-%06i" % sdi.index.value)) edPluginExecNormalize.dataInput = xsdInNorm edPluginExecNormalize.executeSynchronous() if self.xsdAlignStack is not None: edPluginAlign = self.loadPlugin(self.__strControlledAlign) edPluginAlign.dataInput = self.xsdAlignStack edPluginAlign.connectSUCCESS(self.doSuccessExecAlign) edPluginAlign.connectFAILURE(self.doFailureExecAlign) edPluginAlign.executeSynchronous()
def __init__(self): """ """ EDPluginHDF5.__init__(self) self.setXSDataInputClass(XSDataInputHDF5MapSpectra) self.listSpectrumFilenames = [] self.listForcedPositions = [] self.listSpectrumFileType = [] self.listArray = [] self.meshScan = { "FastMotorSteps": 0, "FastMotorStart": 0, "FastMotorStop": 0, "SlowMotorSteps": 0, "SlowMotorStart": 0, "SlowMotorStop": 0, } self.bDeleteSpectrum = False
def postProcess(self, _edObject=None): EDPluginHDF5.postProcess(self) self.DEBUG("EDPluginHDF5MapOfSpectrav10.postProcess") xsDataResult = XSDataResultHDF5MapSpectra() if os.path.isfile(self.strHDF5Filename): xsDataFile = XSDataFile() xsDataFile.setPath(XSDataString(self.strHDF5Filename)) xsDataResult.setHDF5File(xsDataFile) xsDataResult.setInternalHDF5Path(XSDataString(self.strHDF5Path)) self.setDataOutput(xsDataResult) # Delete input images if requested if self.bDeleteSpectrum: for oneImage in self.listSpectrumFilenames: os.remove(oneImage) # De-Allocate memory self.listSpectrumFilenames = [] self.listForcedPositions = [] self.listSpectrumFileType = [] self.listArray = []
def postProcess(self, _edObject=None): EDPluginHDF5.postProcess(self) self.DEBUG("EDPluginHDF5StackImagesv10.postProcess") xsDataResult = XSDataResultHDF5StackImages() if os.path.isfile(self.strHDF5Filename): xsDataFile = XSDataFile(path=XSDataString(value=self.strHDF5Filename)) xsDataResult.setHDF5File(xsDataFile) xsDataResult.setInternalHDF5Path(XSDataString(self.strHDF5Path)) self.setDataOutput(xsDataResult) # Delete input images if requested if self.bDeleteImage: for oneImage in self.listImageFilenames: os.remove(oneImage) #De-allocate memory no more used. self.listImageFilenames = [] self.listForcedPositions = [] self.listImageDates = [] self.listArray = []
def postProcess(self, _edObject=None): EDPluginHDF5.postProcess(self) self.DEBUG("EDPluginHDF5StackImagesv10test.postProcess") xsDataResult = XSDataResultHDF5StackImages() if os.path.isfile(self.strHDF5Filename): xsDataFile = XSDataFile(path=XSDataString(value=self.strHDF5Filename)) xsDataResult.setHDF5File(xsDataFile) xsDataResult.setInternalHDF5Path(XSDataString(self.strHDF5Path)) self.setDataOutput(xsDataResult) # Delete input images if requested if self.bDeleteImage: for oneImage in self.listImageFilenames: os.remove(oneImage) #De-allocate memory no more used. self.listImageFilenames = [] self.listForcedPositions = [] self.listImageDates = [] self.listArray = []
def process(self, _edObject=None): EDPluginHDF5.process(self) self.DEBUG("EDPluginHDF5MapOfSpectrav10.process") maxSize = (((self.meshScan["SlowMotorSteps" ]), (self.meshScan["FastMotorSteps" ]))) for filename in self.listSpectrumFilenames: self.listArray.append(fabio.open(filename).data) if self.listForcedPositions == []: for oneArray in self.listArray: self.processOneSpectrum(oneArray, position=maxSize, maxSize=maxSize) else: for i in range(len(self.listForcedPositions)): fSlowPosition = (self.listForcedPositions[i]["Slow"] - self.meshScan["SlowMotorStart"]) / \ (self.meshScan["SlowMotorStop"] - self.meshScan["SlowMotorStart"]) * \ (self.meshScan["SlowMotorSteps" ]) fFastPosition = (self.listForcedPositions[i]["Fast"] - self.meshScan["FastMotorStart"]) / \ (self.meshScan["FastMotorStop"] - self.meshScan["FastMotorStart"]) * \ (self.meshScan["FastMotorSteps" ]) self.processOneSpectrum(self.listArray[i], (int(round(fSlowPosition)), int(round(fFastPosition))), maxSize=maxSize)
def makeHDF5EnergyStructure(self): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5EnergyStructure") h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): if self.DSenergy in h5Grp: dataset = h5Grp[self.DSenergy] else: dataset = h5Grp.create_dataset( self.DSenergy, shape=(1 + max(self.index, self.reference), ), dtype="float32", maxshape=(None, ), chunks=(1, )) for key in EDPluginControlFullFieldXASv1_0.energyAttr: dataset.attrs.create( key, EDPluginControlFullFieldXASv1_0.energyAttr[key]) if self.index >= dataset.shape[0]: dataset.resize((self.index + 1, )) dataset[self.index] = self.energy
def preProcess(self, _edObject=None): EDPluginHDF5.preProcess(self) self.DEBUG("EDPluginHDF5MapOfSpectrav10.preProcess") for oneSpectrum in self.getDataInput().getInputSpectrumFile(): if oneSpectrum.getPath() is not None: strFileName = oneSpectrum.getPath().getValue() self.DEBUG("getInputSpectrumFile: %s" % strFileName) self.listSpectrumFilenames.append(strFileName) elif oneSpectrum.getArray() is not None: self.DEBUG("getInputArray: %s" % strFileName) self.listArray.append(EDUtilsArray.xsDataToArray()) else: self.ERROR("A spectrum should either contain an image file or an array.") self.setFailure() raise if (oneSpectrum.getFastMotorPosition() is not None) and (oneSpectrum.getSlowMotorPosition() is not None): self.listForcedPositions.append({"Slow":oneSpectrum.getSlowMotorPosition().getValue(), "Fast":oneSpectrum.getFastMotorPosition().getValue()}) if oneSpectrum.getFileType() is not None: self.listSpectrumFileType.append(oneSpectrum.getFileType.getValue()) if oneSpectrum.getMeshScan() is not None: XSDMesh = oneSpectrum.getMeshScan() # self.DEBUG(XSDMesh.marshal()) self.meshScan["FastMotorSteps"] = XSDMesh.getFastMotorSteps().getValue() self.meshScan["FastMotorStart"] = XSDMesh.getFastMotorStart().getValue() self.meshScan["FastMotorStop" ] = XSDMesh.getFastMotorStop().getValue() self.meshScan["SlowMotorSteps"] = XSDMesh.getSlowMotorSteps().getValue() self.meshScan["SlowMotorStart"] = XSDMesh.getSlowMotorStart().getValue() self.meshScan["SlowMotorStop" ] = XSDMesh.getSlowMotorStop().getValue() self.DEBUG("MeshScan= %s" % self.meshScan) if self.getDataInput().getDeleteInputSpectrum() is not None: if self.getDataInput().getDeleteInputSpectrum() in [True, "true", "True", 1, "1"]: self.bDeleteSpectrum = True if len(self.listForcedPositions) > 0 : EDAssert.equal(len(self.listForcedPositions), max(len(self.listSpectrumFilenames), len(self.listArray)), _strComment="list of forced position has the right size") self.hdf5group = self.createStructure(self.strHDF5Filename, self.strHDF5Path, self.dictExtraAttributes)
def process(self, _edObject=None): EDPluginHDF5.process(self) self.DEBUG("EDPluginHDF5StackImagesv10test.process") length = len(self.listForcedPositions) if length == 0: for oneFilename in self.listImageFilenames: header, data = self.readImage(oneFilename) self.DEBUG("Writing image %s on top of the stack" % (oneFilename)) self.processOneImage(data, position=None, filename=oneFilename, dictHeaders=header) for oneArray in self.listArray: self.DEBUG("Writing image from array on top of the stack") self.processOneImage(oneArray) else: if len(self.listImageFilenames) == length : for i in xrange(length): self.DEBUG("Writing image %s at position %i" % (self.listImageFilenames[i], self.listForcedPositions[i])) header, data = self.readImage(self.listImageFilenames[i]) self.processOneImage(data, position=self.listForcedPositions[i], filename=self.listImageFilenames[i], dictHeaders=header) elif len(self.listArray) == length: for i in xrange(length): self.DEBUG("Writing image from array at position %i" % (self.listForcedPositions[i])) self.processOneImage(self.listArray[i], self.listForcedPositions[i])
def makeHDF5NeXus(self): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5NeXus") with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): #Seems strange to redefine h5Grp but if there is a flush in between: h5Grp could be closed entry = EDPluginHDF5.getHDF5File( self.HDF5filename.path.value)[self.internalHDF5Path.value] if not "title" in entry: entry.create_dataset("title", data=self.TITLE) if not "program" in entry: entry.create_dataset( "program", data="EDNA EDPluginControlFullFieldXASv1_0") if not "start_time" in entry: entry.create_dataset("start_time", data=EDPluginHDF5.getIsoTime( self.start_time)) ######################################################################## # Huge hack: for scalar modification: use [()] to refer to the data !!! ######################################################################## if "end_time" in entry: entry["end_time"][()] = EDPluginHDF5.getIsoTime() else: entry.create_dataset("end_time", data=EDPluginHDF5.getIsoTime()) if "duration" in entry: entry["duration"][()] = time.time() - self.start_time else: entry.create_dataset("duration", data=time.time() - self.start_time, dtype="float") if self.NXdata not in entry: nxdata = entry.create_group(self.NXdata) for k, v in self.NXdataAttr.items(): nxdata.attrs[k] = v else: nxdata = entry[self.NXdata] for ds in [self.DSstack, self.DSenergy]: if (ds in entry) and (ds not in nxdata): nxdata[ds] = entry[ds]
def makeHDF5MaxIntStructure(self, _fMaxIntensity): self.DEBUG("EDPluginControlFullFieldXASv1_0.makeHDF5MaxIntStructure") h5Grp = EDPluginHDF5.createStructure(self.HDF5filename.path.value, self.internalHDF5Path.value) with EDPluginHDF5.getFileLock(self.HDF5filename.path.value): #Seems strange to redefine h5Grp but if there is a flush in between: h5Grp could be closed h5Grp = EDPluginHDF5.getHDF5File( self.HDF5filename.path.value)[self.internalHDF5Path.value] if "maxInt" in h5Grp: dataset = h5Grp["maxInt"] else: dataset = h5Grp.create_dataset( "maxInt", shape=(1 + max(self.index, self.reference), ), dtype="float32", maxshape=(None, ), chunks=(1, )) for key in EDPluginControlFullFieldXASv1_0.maxIntAttr: dataset.attrs.create( key, EDPluginControlFullFieldXASv1_0.maxIntAttr[key]) if self.index >= dataset.shape[0]: dataset.resize((self.index + 1, )) dataset[self.index] = _fMaxIntensity
def preProcess(self, _edObject=None): EDPluginHDF5.preProcess(self) self.DEBUG("EDPluginHDF5MapOfSpectrav10.preProcess") for oneSpectrum in self.getDataInput().getInputSpectrumFile(): if oneSpectrum.getPath() is not None: strFileName = oneSpectrum.getPath().getValue() self.DEBUG("getInputSpectrumFile: %s" % strFileName) self.listSpectrumFilenames.append(strFileName) elif oneSpectrum.getArray() is not None: self.DEBUG("getInputArray: %s" % strFileName) self.listArray.append(EDUtilsArray.xsDataToArray()) else: self.ERROR( "A spectrum should either contain an image file or an array." ) self.setFailure() raise if (oneSpectrum.getFastMotorPosition() is not None) and (oneSpectrum.getSlowMotorPosition() is not None): self.listForcedPositions.append({ "Slow": oneSpectrum.getSlowMotorPosition().getValue(), "Fast": oneSpectrum.getFastMotorPosition().getValue() }) if oneSpectrum.getFileType() is not None: self.listSpectrumFileType.append( oneSpectrum.getFileType.getValue()) if oneSpectrum.getMeshScan() is not None: XSDMesh = oneSpectrum.getMeshScan() # self.DEBUG(XSDMesh.marshal()) self.meshScan["FastMotorSteps"] = XSDMesh.getFastMotorSteps( ).getValue() self.meshScan["FastMotorStart"] = XSDMesh.getFastMotorStart( ).getValue() self.meshScan["FastMotorStop"] = XSDMesh.getFastMotorStop( ).getValue() self.meshScan["SlowMotorSteps"] = XSDMesh.getSlowMotorSteps( ).getValue() self.meshScan["SlowMotorStart"] = XSDMesh.getSlowMotorStart( ).getValue() self.meshScan["SlowMotorStop"] = XSDMesh.getSlowMotorStop( ).getValue() self.DEBUG("MeshScan= %s" % self.meshScan) if self.getDataInput().getDeleteInputSpectrum() is not None: if self.getDataInput().getDeleteInputSpectrum() in [ True, "true", "True", 1, "1" ]: self.bDeleteSpectrum = True if len(self.listForcedPositions) > 0: EDAssert.equal( len(self.listForcedPositions), max(len(self.listSpectrumFilenames), len(self.listArray)), _strComment="list of forced position has the right size") self.hdf5group = self.createStructure(self.strHDF5Filename, self.strHDF5Path, self.dictExtraAttributes)