Exemple #1
0
    def saveResults(self):
        if self.dfDict is None:
            return
        filePath = self.pathList[self.selectMapidx]
        energy = self.out.energy
        saveDataChoice = self.saveResultBox.currentIndex()
        if saveDataChoice != 5:  # save a single result
            saveDataType = self.arrayList[saveDataChoice]
            dirName, csvName, h5Name = self.saveToFiles(energy, self.dfDict, filePath, saveDataType)
            if h5Name is None:
                MsgBox(f'Processed data was saved as csv file at: \n{dirName + csvName}')
            else:
                MsgBox(
                    f'Processed data was saved as: \n\ncsv file at: {dirName + csvName} and \n\nHDF5 file at: {dirName + h5Name}')
        else:  # save all results
            csvList = []
            h5List = []
            for saveDataType in self.arrayList:
                dirName, csvName, h5Name = self.saveToFiles(energy, self.dfDict, filePath, saveDataType)
                csvList.append(csvName)
                h5List.append(h5Name)

            allcsvName = (', ').join(csvList)
            if h5Name is None:
                MsgBox(f'Processed data was saved as csv files at: \n{dirName + allcsvName}')
            else:
                allh5Name = (', ').join(h5List)
                MsgBox(
                    f'Processed data was saved as: \n\ncsv files at: {dirName + allcsvName} and \n\nHDF5 files at: {dirName + allh5Name}')

        # save parameter
        xlsName = csvName[:-4] + '_param.xlsx'
        self.dfDict['param'].to_excel(dirName + xlsName)
Exemple #2
0
 def isBaseFitOK(self, anchors, kind, w_regions):
     """
     Check is there is enough anchor points to fit higher order baseline
     :param anchors: anchor points
     :param kind: fitting method
     :return: decide if there is enough anchor points for baseline fit
     """
     # parse anchor points
     self.parse_anchors(anchors)
     if self.preprocess_method == 'rubberband':
         # decide if num of anchor points is enough for 'quadratic' or 'cubic' fit
         if len(self.wav_anchor) < 2:
             MsgBox('Baseline fitting needs at least 2 anchor points.\n' +
                    'Please add more "anchor points" to correctly fit the baseline.', type='error')
             return False
         elif len(self.wav_anchor) < 3 and kind == 'quadratic':
             MsgBox('Quadratic baseline needs more than 2 anchor points.\n' +
                    'Please add more "anchor points" to correctly fit the baseline.', type='error')
             return False
         elif len(self.wav_anchor) < 4 and kind == 'cubic':
             MsgBox('Cubic baseline needs more than 3 anchor points.\n' +
                    'Please add more "anchor points" to correctly fit the baseline.', type='error')
             return False
         else:
             return True
     elif self.preprocess_method == 'kohler':
         try:  # read w_regions
             self.w_regions = eval(w_regions)
         except :
             MsgBox('Fitting regions format is not correct.\n' +
                    'Please consult default values.', type='error')
         if self.w_regions is not None:
             return True
         else:
             return False
 def saveResults(self):
     if (hasattr(self, 'PCA') and self.PCA is not None) or (hasattr(self, 'NMF') and self.NMF is not None)\
             or (hasattr(self, 'MCR') and self.MCR is not None):
         name = self.method
         df_fac_components = pd.DataFrame(getattr(self, name).components_,
                                          columns=self.wavenumbers_select)
         df_data_fac = pd.DataFrame(getattr(self, self.data_fac_name),
                                    index=self.df_row_idx)
         df_fac_components.to_csv(name + '_components.csv')
         df_data_fac.to_csv(name + '_data.csv')
         np.savetxt(name + '_mapRowSplit.csv',
                    np.array(self.dataRowSplit),
                    fmt='%d',
                    delimiter=',')
         MsgBox(name + ' components successfully saved!')
     else:
         MsgBox('No factorization components available.')
Exemple #4
0
 def saveBtnClicked(self):
     if hasattr(self, 'irMap') and (self.filePath != ''):
         h5Name = self.fileName[:-4] + '.h5'
         try:
             self.irMap.write_as_hdf5(self.filePath + h5Name)
             MsgBox(
                 f'Map to HDF5 conversion complete! \nFile Location: {self.filePath + h5Name}'
             )
             self.infoBox.setText(
                 f'HDF5 File Location: {self.filePath + h5Name}')
         except Exception as error:
             MsgBox(error.args[0], 'error')
             saveFilePath, saveFileName, canceled = uiSaveFile(
                 'Save HDF5 file', self.path, "HDF5 Files (*.h5)")
             if not canceled:
                 # make sure the saveFileName end with .h5
                 if not saveFileName.endswith('h5'):
                     saveFileName = saveFileName.split('.')[0] + '.h5'
                 # save file
                 try:
                     self.irMap.write_as_hdf5(saveFilePath + saveFileName)
                     MsgBox(
                         f'Map to HDF5 conversion complete! \nFile Location: {saveFilePath + saveFileName}'
                     )
                     self.infoBox.setText(
                         f'HDF5 File Location: {saveFilePath + saveFileName}'
                     )
                 except Exception:
                     MsgBox(error.args[0] + '\nSave HDF5 file failed.',
                            'error')
                     self.infoBox.setText(f'Save HDF5 file failed.')
             else:
                 self.infoBox.setText(
                     f'Save file canceled. No HDF5 file was saved.')
     else:
         MsgBox(
             f"IR map object doesn't exist or file path is incorrect. \nPlease open an Omnic map file first."
         )
Exemple #5
0
    def updateSpecPlot(self):
        # get current map idx and selected spectrum idx
        specidx = self.getCurrentSpecid()
        if not self.isMapOpen():
            return
        elif self.specItemModel.rowCount() == 0:
            MsgBox('No spectrum is loaded.\nPlease click "Load spectra" to import data.')
            return
        elif specidx is None:
            return

        # get plotchoice
        plotChoice = self.normBox.currentIndex()

        # create Preprocessor object
        self.out = Preprocessor(self.wavenumberList[self.selectMapidx], self.dataSets[self.selectMapidx][specidx])
        baselineOK = self.out.rubber_band(**self.processArgs) and self.out.kohler(**self.processArgs)

        if not baselineOK:
            return

        # make results report
        if plotChoice != 0:
            self.getReport(self.out, plotChoice)

        # if not batch processing, show plots
        if not self.isBatchProcessOn:
            # clean up plots
            self.rawSpectra.clearAll()
            self.resultSpectra.clearAll()
            if plotChoice == 0:  # plot raw spectrum
                self.infoBox.setText('')  # clear txt
                self.rawSpectra.plotBase(self.out, plotType='raw')
            elif plotChoice == 1:  # plot raw, kohler
                self.rawSpectra.plotBase(self.out, plotType='kohler_base')
                self.resultSpectra.plotBase(self.out, plotType='kohler')
            elif plotChoice == 2:  # plot raw, rubberband
                self.rawSpectra.plotBase(self.out, plotType='rubber_base')
                self.resultSpectra.plotBase(self.out, plotType='rubberband')
            elif plotChoice == 3:  # plot raw, kohler 2nd derivative
                self.rawSpectra.plotBase(self.out, plotType='kohler_base')
                self.resultSpectra.plotBase(self.out, plotType='deriv2_kohler')
            elif plotChoice == 4:  # plot raw, rubberband 2nd derivative
                self.rawSpectra.plotBase(self.out, plotType='rubber_base')
                self.resultSpectra.plotBase(self.out, plotType='deriv2_rubberband')

            if plotChoice in [1, 3]:
                self.parameter.child('Preprocess method').setValue('Kohler_EMSC', blockSignal=self.updateMethod)
            elif plotChoice in [2, 4]:
                self.parameter.child('Preprocess method').setValue('Rubberband', blockSignal=self.updateMethod)
    def setHeader(self, field: str):

        self.headers = [
            self.headermodel.item(i).header
            for i in range(self.headermodel.rowCount())
        ]
        self.field = field
        wavenum_align = []
        self.imgShapes = []
        self.rc2indList = []
        self.ind2rcList = []
        self._dataSets = {'spectra': [], 'volume': []}

        # get wavenumbers, imgShapes
        for header in self.headers:
            dataEvent = next(header.events(fields=[field]))
            self.wavenumbers = dataEvent['wavenumbers']
            self.N_w = len(self.wavenumbers)
            wavenum_align.append(
                (round(self.wavenumbers[0]),
                 self.N_w))  # append (first wavenum value, wavenum length)
            self.imgShapes.append(dataEvent['imgShape'])
            self.rc2indList.append(dataEvent['rc_index'])
            self.ind2rcList.append(dataEvent['index_rc'])
            # load data
            data = None
            try:  # spectra datasets
                data = header.meta_array('spectra')
            except IndexError:
                msg.logMessage(
                    'Header object contained no frames with field '
                    '{field}'
                    '.', msg.ERROR)
            if data is not None:
                self._dataSets['spectra'].append(data)
            # NMF path sets
            volumeEvent = next(header.events(fields=['volume']))
            path = volumeEvent['path']  # readin filepath
            self._dataSets['volume'].append(path)

        # init maps
        if len(self.imgShapes) > 0:
            self.showComponents((self.wavenumbers, None, None, None))

        if wavenum_align and (wavenum_align.count(wavenum_align[0]) !=
                              len(wavenum_align)):
            MsgBox(
                'Length of wavenumber arrays of displayed maps are not equal. \n'
                'Perform PCA or NMF on these maps will lead to error.', 'warn')
 def saveCluster(self):
     if hasattr(self, 'cluster_map') and hasattr(self, 'mean_spectra'):
         filePath = self.pathList[self.selectMapidx]
         # get dirname and old filename
         dirName = os.path.dirname(filePath)
         oldFileName = os.path.basename(filePath)
         n_clusters = self.parameter['Clusters']
         for i in range(n_clusters):
             # save dataFrames to csv file
             csvName = oldFileName[:-3] + f'_cluster{i+1}.csv'
             newFilePath = os.path.join(dirName, csvName)
             self.dfGroups[i].to_csv(newFilePath)
         MsgBox(
             f'Cluster spectra groups were successfully saved at: {newFilePath}!'
         )
Exemple #8
0
 def loadRoi(self):
     filePath, fileName, canceled = uiGetFile('Open ROI state file',
                                              self.path,
                                              "Pickle Files (*.pkl)")
     if not canceled:
         with open(filePath + fileName, 'rb') as f:
             roiStates = pickle.load(f)
         self.roiBtn.setChecked(roiStates['roiBtn'])
         self.roi.setState(roiStates['roiState'])
         if roiStates['roiBtn']:
             self.roi.show()
         self.autoMaskBtn.setChecked(roiStates['maskBtn'])
         self.selectMaskBtn.setChecked(True)
         for k, v in roiStates['parameter'].items():
             self.parameter[k] = v
         MsgBox(f'ROI states were loaded from: \n{filePath + fileName}')
     else:
         return
Exemple #9
0
 def saveRoi(self):
     parameterDict = {
         name: self.parameter[name]
         for name in self.parameter.names.keys()
     }
     roiStates = {
         'roiBtn': self.roiBtn.isChecked(),
         'maskBtn': self.autoMaskBtn.isChecked(),
         'roiState': self.roi.getState(),
         'parameter': parameterDict
     }
     filePath, fileName, canceled = uiSaveFile('Save ROI state', self.path,
                                               "Pickle Files (*.pkl)")
     if not canceled:
         with open(filePath + fileName, 'wb') as f:
             pickle.dump(roiStates, f)
         MsgBox(
             f'ROI state file was saved! \nFile Location: {filePath + fileName}'
         )
 def computeEmbedding(self):
     # get current map idx
     if not self.isMapOpen():
         return
     msg.showMessage('Compute embedding.')
     # Select wavenumber region
     wavROIList = []
     for entry in self.parameter['Wavenumber Range'].split(','):
         try:
             wavROIList.append(val2ind(int(entry), self.wavenumbers))
         except:
             continue
     if len(wavROIList) % 2 == 0:
         wavROIList = sorted(wavROIList)
         wavROIidx = []
         for i in range(len(wavROIList) // 2):
             wavROIidx += list(
                 range(wavROIList[2 * i], wavROIList[2 * i + 1] + 1))
     else:
         msg.logMessage('"Wavenumber Range" values must be in pairs',
                        msg.ERROR)
         MsgBox('Clustering computation aborted.', 'error')
         return
     self.wavenumbers_select = self.wavenumbers[wavROIidx]
     self.N_w = len(self.wavenumbers_select)
     # get current dataset
     if self.selectedPixels is None:
         n_spectra = len(self.data)
         self.dataset = np.zeros((n_spectra, self.N_w))
         for i in range(n_spectra):
             self.dataset[i, :] = self.data[i][wavROIidx]
     else:
         n_spectra = len(self.selectedPixels)
         self.dataset = np.zeros((n_spectra, self.N_w))
         for i in range(n_spectra):  # i: ith selected pixel
             row_col = tuple(self.selectedPixels[i])
             self.dataset[i, :] = self.data[self.rc2ind[row_col]][wavROIidx]
     # get parameters and compute embedding
     n_components = self.parameter['Components']
     if self.parameter['Embedding'] == 'UMAP':
         n_neighbors = self.parameter['Neighbors']
         metric = self.parameter['Metric']
         min_dist = np.clip(self.parameter['Min Dist'], 0, 1)
         self.umap = UMAP(n_neighbors=n_neighbors,
                          min_dist=min_dist,
                          n_components=n_components,
                          metric=metric,
                          random_state=0)
         self.embedding = self.umap.fit_transform(self.dataset)
     elif self.parameter['Embedding'] == 'PCA':
         # normalize and mean center
         if self.parameter['Normalization'] == 'L1':  # normalize
             data_norm = Normalizer(norm='l1').fit_transform(self.dataset)
         elif self.parameter['Normalization'] == 'L2':
             data_norm = Normalizer(norm='l2').fit_transform(self.dataset)
         else:
             data_norm = self.dataset
         # subtract mean
         data_centered = StandardScaler(
             with_std=False).fit_transform(data_norm)
         # Do PCA
         self.PCA = PCA(n_components=n_components)
         self.PCA.fit(data_centered)
         self.embedding = self.PCA.transform(data_centered)
     # save embedding to standardModelItem
     self.item.embedding = self.embedding
     # update cluster map
     self.computeCluster()
Exemple #11
0
    def run(self):

        n_files = len(self.filePaths)
        for i, filePath in enumerate(self.filePaths):
            folderPath = os.path.dirname(filePath) + '/'
            fileName = os.path.basename(filePath)
            # set sample_id
            if self.sampleName == 'None':
                sample_info = ir_map.sample_info(sample_id=fileName[:-4])
            else:
                sample_info = ir_map.sample_info(sample_id=self.sampleName)
            # try open omnic map and show image
            try:
                irMap = read_and_convert(filePath, sample_info=sample_info)
            except Exception as error:
                self.sigText.emit(error.args[0] +
                                  f'\nFailed to open file: {fileName}.')
                MsgBox(error.args[0] + f'\nFailed to open file: {fileName}.')
                break
            else:
                # check whether to perform T->A conversion
                spec0 = irMap.imageCube[0, 0, :]
                maxSpecY = np.max(spec0)
                if (not self.T2AConvertStatus) and (maxSpecY >=
                                                    self.minYLimit):
                    userMsg = YesNoDialog(
                        f'max(Y) of the first spectrum is greater than {self.minYLimit}, \
                            while the "Auto T->A" box is not checked. \nPlease make sure data format is in absorbance.\
                            \nDo you want to perform "Auto T->A" conversion?')
                    # get user choice
                    userMsg.addButton(QMessageBox.YesToAll)
                    userChoice = userMsg.choice()
                    if userChoice == QMessageBox.YesToAll:  # set 'auto T->A' on
                        self.sigT2A.emit(True)
                    if (userChoice
                            == QMessageBox.YesToAll) or (userChoice
                                                         == QMessageBox.Yes):
                        irMap.imageCube = -np.log10(irMap.imageCube / 100 +
                                                    self.epsilon)
                        irMap.data = -np.log10(irMap.data / 100 + self.epsilon)
                        self.sigText.emit(
                            f'User chooses to perform T->A conversion in {fileName}.'
                        )
                    else:
                        self.sigText.emit(
                            f'User chooses not to perform T->A conversion in {fileName}.'
                        )
                elif maxSpecY >= self.minYLimit:
                    irMap.imageCube = -np.log10(irMap.imageCube / 100 +
                                                self.epsilon)
                    irMap.data = -np.log10(irMap.data / 100 + self.epsilon)
                    self.sigText.emit(
                        f'T->A conversion is performed in {fileName}.')
                else:
                    self.sigText.emit(
                        f"{fileName}'s datatype is absorbance. \nT->A conversion is not performed."
                    )

                dataCube = np.moveaxis(np.flipud(irMap.imageCube), -1, 0)
                # set up required data/properties in self.imageview and show image
                row, col = irMap.imageCube.shape[0], irMap.imageCube.shape[1]
                wavenumbers = irMap.wavenumbers
                rc2ind = {tuple(x[1:]): x[0] for x in irMap.ind_rc_map}
                self.sigImage.emit((row, col, wavenumbers, rc2ind, dataCube))

                # save hdf5
                h5Name = fileName[:-4] + '.h5'
                try:
                    irMap.write_as_hdf5(folderPath + h5Name)
                    self.sigText.emit(
                        f'#{i + 1} out of {n_files} maps HDF5-conversion complete! \
                            \nHDF5 File Location: {folderPath + h5Name}')
                except Exception as error:
                    MsgBox(error.args[0], 'error')
                    break
            # QTimer.singleShot(0, lambda: self.infoBox.setText(f'Start processing #{i + 1} out of {n_files} files.'))
        # MsgBox('All file conversion complete!')
        self.quit()
Exemple #12
0
    def batchBtnClicked(self):
        folderPath, canceled = uiGetDir('Select a folder', self.path)
        if canceled:
            self.infoBox.setText('Open folder canceled.')
            return

        filePaths = glob(folderPath + '*.map')
        # if no map file was found
        if not filePaths:
            MsgBox('No .map file was found.\nPlease select another folder')
            self.infoBox.setText(
                'No .map file was found in the selected folder.')
            return

        #try to use thread
        # mapConverter = BatchMapConverter(self.T2AConvert.isChecked(), self.sampleName.text(),\
        #                                  self.epsilon, self.minYLimit, filePaths)
        # mapConverter.sigText.connect(lambda x:self.infoBox.setText(x))
        # mapConverter.sigImage.connect(lambda x:self.updateImage(*x))
        # mapConverter.sigT2A.connect(lambda x:self.T2AConvert.setChecked(x))
        # mapConverter.start()

        # ToDo : change this Long loop to thread
        n_files = len(filePaths)
        for i, filePath in enumerate(filePaths):
            fileName = os.path.basename(filePath)
            # set sample_id
            if self.sampleName.text() == 'None':
                sample_info = ir_map.sample_info(sample_id=fileName[:-4])
            else:
                sample_info = ir_map.sample_info(
                    sample_id=self.sampleName.text())
            # try open omnic map and show image
            try:
                irMap = read_and_convert(filePath, sample_info=sample_info)
            except Exception as error:
                self.infoBox.setText(error.args[0] +
                                     f'\nFailed to open file: {fileName}.')
                MsgBox(error.args[0] + f'\nFailed to open file: {fileName}.')
                break
            else:
                # check whether to perform T->A conversion
                spec0 = irMap.imageCube[0, 0, :]
                maxSpecY = np.max(spec0)
                if (not self.T2AConvert.isChecked()) and (maxSpecY >=
                                                          self.minYLimit):
                    userMsg = YesNoDialog(
                        f'max(Y) of the first spectrum is greater than {self.minYLimit}, \
                    while the "Auto T->A" box is not checked. \nPlease make sure data format is in absorbance.\
                    \nDo you want to perform "Auto T->A" conversion?')
                    # get user choice
                    userMsg.addButton(QMessageBox.YesToAll)
                    userChoice = userMsg.choice()
                    if userChoice == QMessageBox.YesToAll:  # set 'auto T->A' on
                        self.T2AConvert.setChecked(True)
                    if (userChoice
                            == QMessageBox.YesToAll) or (userChoice
                                                         == QMessageBox.Yes):
                        irMap.imageCube = -np.log10(irMap.imageCube / 100 +
                                                    self.epsilon)
                        irMap.data = -np.log10(irMap.data / 100 + self.epsilon)
                        self.infoBox.setText(
                            f'User chooses to perform T->A conversion in {fileName}.'
                        )
                    else:
                        self.infoBox.setText(
                            f'User chooses not to perform T->A conversion in {fileName}.'
                        )
                elif maxSpecY >= self.minYLimit:
                    irMap.imageCube = -np.log10(irMap.imageCube / 100 +
                                                self.epsilon)
                    irMap.data = -np.log10(irMap.data / 100 + self.epsilon)
                    self.infoBox.setText(
                        f'T->A conversion is performed in {fileName}.')
                else:
                    self.infoBox.setText(
                        f"{fileName}'s datatype is absorbance. \nT->A conversion is not performed."
                    )

                dataCube = np.moveaxis(np.flipud(irMap.imageCube), -1, 0)
                # set up required data/properties in self.imageview and show image
                row, col = irMap.imageCube.shape[0], irMap.imageCube.shape[1]
                wavenumbers = irMap.wavenumbers
                rc2ind = {tuple(x[1:]): x[0] for x in irMap.ind_rc_map}
                self.updateImage(row, col, wavenumbers, rc2ind, dataCube)

                # save hdf5
                h5Name = fileName[:-4] + '.h5'
                try:
                    irMap.write_as_hdf5(folderPath + h5Name)
                    self.infoBox.setText(
                        f'#{i+1} out of {n_files} maps HDF5-conversion complete! \
                    \nHDF5 File Location: {folderPath + h5Name}')
                except Exception as error:
                    MsgBox(error.args[0], 'error')
                    break
            QApplication.processEvents()
        MsgBox('All file conversion complete!')
Exemple #13
0
    def batchProcess(self):
        # get current map idx
        if not self.isMapOpen():
            return
        elif self.specItemModel.rowCount() == 0:
            MsgBox('No spectrum is loaded.\nPlease click "Load spectra" to import data.')
            return
        # check if baseline fit OK
        if self.out is None:
            self.out = Preprocessor(self.wavenumberList[self.selectMapidx], self.dataSets[self.selectMapidx][0])

        # get plotchoice
        plotChoice = self.normBox.currentIndex()
        if plotChoice != 0:
            # calculate rubberband and kohler baseline
            baselineOK = self.out.rubber_band(**self.processArgs) and self.out.kohler(**self.processArgs)
        else:
            MsgBox('Plot type is "Raw spectrum".\nPlease change plot type to "Kohler" or "Rubberband".')
            return
        if not baselineOK:
            return

        # notice to user
        userMsg = YesNoDialog(f'Ready to batch process selected spectra.\nDo you want to continue?')
        userChoice = userMsg.choice()
        if userChoice == QMessageBox.No:  # user choose to stop
            return

        self.isBatchProcessOn = True

        # init resultSetsDict, paramsDict
        self.resultSetsDict = {}
        self.paramsDict = {}
        self.paramsDict['specID'] = []
        self.paramsDict['row_column'] = []
        ind2rc = self.ind2rcList[self.selectMapidx]
        energy = self.out.energy
        n_energy = len(energy)
        for item in self.arrayList:
            self.resultSetsDict[item] = np.empty((0, n_energy))
        for item in self.reportList:
            self.paramsDict[item] = []
        # batch process begins
        n_spectra = self.specItemModel.rowCount()
        for i in range(n_spectra):
            msg.showMessage(f'Processing {i + 1}/{n_spectra} spectra')
            # select each spec and collect results
            self.specSelectModel.select(self.specItemModel.index(i, 0), QItemSelectionModel.ClearAndSelect)
            # get spec idx
            currentSpecItem = self.specItemModel.item(i)
            self.paramsDict['specID'].append(currentSpecItem.idx)
            self.paramsDict['row_column'].append(ind2rc[currentSpecItem.idx])
            # append all results into a single array/list
            for item in self.arrayList:
                self.resultSetsDict[item] = np.append(self.resultSetsDict[item], self.resultDict[item].reshape(1, -1),
                                                      axis=0)
            for item in self.reportList:
                self.paramsDict[item].append(self.resultDict[item])

        # result collection completed. convert paramsDict to df
        self.dfDict = {}
        self.dfDict['param'] = pd.DataFrame(self.paramsDict).set_index('specID')
        for item in self.arrayList:
            # convert resultSetsDict to df
            self.dfDict[item] = pd.DataFrame(self.resultSetsDict[item], columns=energy.tolist(),
                                        index=self.paramsDict['specID'])

        # batch process completed
        self.isBatchProcessOn = False
        msg.showMessage(f'Batch processing is completed! Saving results to csv files.')
        #  save df to files
        self.saveResults()
    def calculate(self):

        N = self.parameter['Components']
        #set decompose method
        if self.parameter['Method'] == 'PCA':
            self.method = 'PCA'
            self.field = 'spectra'
        elif self.parameter['Method'] == 'NMF':
            self.method = 'NMF'
            self.field = 'volume'
        elif self.parameter['Method'] == 'MCR':
            self.method = 'MCR'
            self.field = 'spectra'

        if hasattr(self, '_dataSets'):
            wavROIList = []
            for entry in self.parameter['Wavenumber Range'].split(','):
                try:
                    wavROIList.append(val2ind(int(entry), self.wavenumbers))
                except:
                    continue
            # Select wavenumber region
            if len(wavROIList) % 2 == 0:
                wavROIList = sorted(wavROIList)
                wavROIidx = []
                for i in range(len(wavROIList) // 2):
                    wavROIidx += list(
                        range(wavROIList[2 * i], wavROIList[2 * i + 1] + 1))
            else:
                msg.logMessage('"Wavenumber Range" values must be in pairs',
                               msg.ERROR)
                MsgBox('Factorization computation aborted.', 'error')
                return

            self.wavenumbers_select = self.wavenumbers[wavROIidx]
            # get map ROI selected region
            self.selectedPixelsList = [
                self.headermodel.item(i).selectedPixels
                for i in range(self.headermodel.rowCount())
            ]
            self.df_row_idx = []  # row index for dataframe data_fac

            msg.showMessage('Start computing', self.method + '. Image shape:',
                            str(self.imgShapes))
            self.dataRowSplit = [
                0
            ]  # remember the starting/end row positions of each dataset
            if self.field == 'spectra':  # PCA workflow
                self.N_w = len(self.wavenumbers_select)
                self._allData = np.empty((0, self.N_w))

                for i, data in enumerate(
                        self._dataSets['spectra']):  # i: map idx
                    if self.selectedPixelsList[i] is None:
                        n_spectra = len(data)
                        tmp = np.zeros((n_spectra, self.N_w))
                        for j in range(n_spectra):
                            tmp[j, :] = data[j][wavROIidx]
                            self.df_row_idx.append((self.ind2rcList[i][j], j))
                    else:
                        n_spectra = len(self.selectedPixelsList[i])
                        tmp = np.zeros((n_spectra, self.N_w))
                        for j in range(n_spectra):  # j: jth selected pixel
                            row_col = tuple(self.selectedPixelsList[i][j])
                            tmp[j, :] = data[self.rc2indList[i]
                                             [row_col]][wavROIidx]
                            self.df_row_idx.append(
                                (row_col, self.rc2indList[i][row_col]))

                    self.dataRowSplit.append(self.dataRowSplit[-1] + n_spectra)
                    self._allData = np.append(self._allData, tmp, axis=0)

                if len(self._allData) > 0:
                    if self.method == 'PCA':
                        self.data_fac_name = 'data_PCA'  # define pop up plots labels
                        # normalize and mean center
                        if self.parameter[
                                'Normalization'] == 'L1':  # normalize
                            data_norm = Normalizer(norm='l1').fit_transform(
                                self._allData)
                        elif self.parameter['Normalization'] == 'L2':
                            data_norm = Normalizer(norm='l2').fit_transform(
                                self._allData)
                        else:
                            data_norm = self._allData
                        #subtract mean
                        data_centered = StandardScaler(
                            with_std=False).fit_transform(data_norm)
                        # Do PCA
                        self.PCA = PCA(n_components=N)
                        self.PCA.fit(data_centered)
                        self.data_PCA = self.PCA.transform(data_centered)
                        # pop up plots
                        self.popup_plots()
                    elif self.method == 'MCR':
                        self.data_fac_name = 'data_MCR'  # define pop up plots labels
                        # Do ICA to find initial estimate of ST matrix
                        self.ICA = FastICA(n_components=N)
                        self.ICA.fit(self._allData)
                        # Do MCR
                        self.MCR = McrAR(max_iter=100,
                                         c_regr=self.parameter['C regressor'],
                                         st_regr='NNLS',
                                         tol_err_change=1e-6,
                                         tol_increase=0.5)
                        self.MCR.fit(self._allData, ST=self.ICA.components_)
                        self.MCR.components_ = self.MCR.ST_opt_
                        self.data_MCR = self.MCR.C_opt_
                        #test ICA
                        # self.MCR = self.ICA
                        # self.data_MCR = self.ICA.transform(self._allData)
                        # pop up plots
                        self.popup_plots()
                else:
                    msg.logMessage(
                        'The data matrix is empty. No PCA is performed.',
                        msg.ERROR)
                    MsgBox('The data matrix is empty. No PCA is performed.',
                           'error')
                    self.PCA, self.data_PCA = None, None
                    self.MCR, self.data_MCR = None, None
                # emit PCA and transformed data
                if self.method == 'PCA':
                    self.sigPCA.emit((self.wavenumbers_select, self.PCA,
                                      self.data_PCA, self.dataRowSplit))
                elif self.method == 'MCR':
                    self.sigPCA.emit((self.wavenumbers_select, self.MCR,
                                      self.data_MCR, self.dataRowSplit))

            elif self.field == 'volume':  # NMF workflow
                data_files = []
                wav_masks = []
                row_idx = np.array([], dtype='int')
                self.allDataRowSplit = [0]  # row split for complete datasets

                for i, file in enumerate(self._dataSets['volume']):
                    ir_data, fmt = read_map.read_all_formats(file)
                    n_spectra = ir_data.data.shape[0]
                    self.allDataRowSplit.append(self.allDataRowSplit[-1] +
                                                n_spectra)
                    data_files.append(ir_data)
                    ds = data_prep.data_prepper(ir_data)
                    wav_masks.append(ds.decent_bands)
                    # row selection
                    if self.selectedPixelsList[i] is None:
                        row_idx = np.append(
                            row_idx,
                            np.arange(self.allDataRowSplit[-2],
                                      self.allDataRowSplit[-1]))
                        for k, v in self.rc2indList[i].items():
                            self.df_row_idx.append((k, v))
                    else:
                        n_spectra = len(self.selectedPixelsList[i])
                        for j in range(n_spectra):
                            row_col = tuple(self.selectedPixelsList[i][j])
                            row_idx = np.append(
                                row_idx, self.allDataRowSplit[-2] +
                                self.rc2indList[i][row_col])
                            self.df_row_idx.append(
                                (row_col, self.rc2indList[i][row_col]))

                    self.dataRowSplit.append(
                        self.dataRowSplit[-1] +
                        n_spectra)  # row split for ROI selected rows

                # define pop up plots labels
                self.data_fac_name = 'data_NMF'

                if len(self.df_row_idx) > 0:
                    # aggregate datasets
                    ir_data_agg = aggregate_data(self._dataSets['volume'],
                                                 data_files, wav_masks)
                    col_idx = list(
                        set(wavROIidx) & set(ir_data_agg.master_wmask))
                    self.wavenumbers_select = self.wavenumbers[col_idx]
                    ir_data_agg.data = ir_data_agg.data[:, col_idx]
                    ir_data_agg.data = ir_data_agg.data[row_idx, :]
                    # perform NMF
                    self.NMF = NMF(n_components=N)
                    self.data_NMF = self.NMF.fit_transform(ir_data_agg.data)
                    # pop up plots
                    self.popup_plots()
                else:
                    msg.logMessage(
                        'The data matrix is empty. No NMF is performed.',
                        msg.ERROR)
                    MsgBox('The data matrix is empty. No NMF is performed.',
                           'error')
                    self.NMF, self.data_NMF = None, None
                # emit NMF and transformed data : data_NMF
                self.sigPCA.emit((self.wavenumbers_select, self.NMF,
                                  self.data_NMF, self.dataRowSplit))