def process(self, In): df = In if self._ctrlWidget.saveAllowed(): kwargs = self.ctrlWidget().prepareInputArguments() fileName = QtGui.QFileDialog.getSaveFileName( None, "Save As..", "export.xlsx", "Excel files (*.xls *.xlsx)")[0] if fileName: with BusyCursor(): df.to_excel(fileName, **kwargs) if self._ctrlWidget.toClipbord(): with BusyCursor(): df.to_clipboard(excel=True) return
def handleSave(self, useSelection=False, missing_value=u''): missing_value = unicode(missing_value) model = self.model() if useSelection: selection = self.selectionModel().selection().indexes() if selection: topLeft = selection[0] bottomRight = selection[-1] rows = xrange(topLeft.row(), bottomRight.row() + 1) columns = xrange(topLeft.column(), bottomRight.column() + 1) else: return None else: rows = xrange(model.rowCount()) columns = xrange(model.columnCount()) fn = self.fileSaveAs() if fn: try: with BusyCursor(), open(unicode(fn), 'wb') as stream: if fn.endswith('.tsv'): dialect = 'excel-tab' else: dialect = 'excel' writer = csv.writer(stream, dialect=dialect) # Write header if self.horizontalHeadersSet: header_row = [] if self.verticalHeadersSet: header_row.append(u'') for c in columns: header_row.append( unicode(self.horizontalHeaderItem(c))) writer.writerow(header_row) # Write data for row in rows: rowdata = [] if self.verticalHeadersSet: rowdata.append( unicode(self.verticalHeaderItem(row))) for column in columns: index = model.index(row, column) item = model.data(index) if item: rowdata.append(unicode(item)) else: rowdata.append(missing_value) writer.writerow(rowdata) QtGui.QMessageBox.information( None, 'Export table to file', 'File `{0}` saved successfully'.format(fn)) except Exception, err: QtGui.QMessageBox.critical( None, 'Export table to file', 'File `{2}` cannot be saved:\n{0}\n{1}'.format( Exception, err, fn))
def process(self, df): if df is None: del self.item self.item = None return {'Curve': None, 'pd.Series': None } if self.item is None: self.item = PlotDataItem(clipToView=False) colname = [col for col in df.columns if isNumpyNumeric(df[col].dtype)] self._ctrlWidget.param('Y:signal').setLimits(colname) colname = [col for col in df.columns if isNumpyDatetime(df[col].dtype)] self._ctrlWidget.param('X:datetime').setLimits(colname) with BusyCursor(): kwargs = self.ctrlWidget().prepareInputArguments() #self.item = PlotDataItem(clipToView=False) t = df[kwargs['X:datetime']].values # part 1 timeSeries = pd.DataFrame(data=df[kwargs['Y:signal']].values, index=t, columns=[kwargs['Y:signal']]) # part 2 # convert time b = t.astype(np.dtype('datetime64[s]')) timeStamps = b.astype(np.int64)-kwargs['tz correct']*60*60+time.timezone # now create curve pen = fn.mkPen(color=kwargs['color'], width=kwargs['width'], style=kwargs['style']) self.item.setData(timeStamps, df[kwargs['Y:signal']].values, pen=pen, name=kwargs['Y:signal']) self.item.setSymbol(kwargs['symbol']) if kwargs['symbol'] is not None: self.item.setSymbolPen(kwargs['color']) self.item.setSymbolBrush(kwargs['color']) self.item.setSymbolSize(kwargs['symbolSize']) return {'Curve': self.item, 'pd.Series': timeSeries }
def togglePoints(self, enable): #print('togglePoints', enable) with BusyCursor(): for TSitem in self.parent().TSitems().values(): if enable: symbol = 'x' else: symbol = None
def process(self, In): df = In if df is None: return self.CW().param('eq').setValue('') if self._df_id != id(df): #print 'df new' self._df_id = id(df) self.CW().disconnect_valueChanged2upd(self.CW().param('datetime')) self.CW().disconnect_valueChanged2upd(self.CW().param('sig')) colname = [ col for col in df.columns if isNumpyDatetime(df[col].dtype) ] self.CW().param('datetime').setLimits(colname) colname = [ col for col in df.columns if isNumpyNumeric(df[col].dtype) ] self.CW().param('sig').setLimits(colname) self.CW().connect_valueChanged2upd(self.CW().param('datetime')) self.CW().connect_valueChanged2upd(self.CW().param('sig')) # ------------------------------------------------------ # now update our range selectors kwargs = self.CW().prepareInputArguments() t_vals = df[kwargs['datetime']].values t_min = pd.to_datetime(str(min(t_vals))) t_max = pd.to_datetime(str(max(t_vals))) self.CW().disconnect_valueChanged2upd(self.CW().param('t0')) self.CW().disconnect_valueChanged2upd(self.CW().param('t1')) self.CW().param('t0').setValue(t_min.strftime('%Y-%m-%d %H:%M:%S')) self.CW().param('t0').setDefault( t_min.strftime('%Y-%m-%d %H:%M:%S')) self.CW().param('t1').setValue(t_max.strftime('%Y-%m-%d %H:%M:%S')) self.CW().param('t1').setDefault( t_max.strftime('%Y-%m-%d %H:%M:%S')) if self.CW().p['ranges'] is True: self.CW().connect_valueChanged2upd(self.CW().param('t0')) self.CW().connect_valueChanged2upd(self.CW().param('t1')) # get params once again kwargs = self.CW().prepareInputArguments() # ------------------------------------------------------ with BusyCursor(): df_out, eq_str, function, self.fig = pandas_fourier_analysis( df, kwargs['sig'], date_name=kwargs['datetime'], ranges=kwargs['ranges'], N_MAX_POW=kwargs['N_MAX_POW'], generate_plot=True) self.CW().param('eq').setValue(eq_str) self._PLOT_REQUESTED = False return {'params': df_out, 'f(t)': function}
def process(self, df_gw, df_w, E): if df_gw is None or df_w is None: raise Exception( 'Hydrograph data not found in terminals `df_gw` or `df_w`') return {'tlag': None} if E in [None, nan]: raise Exception('Tidal efficiency is invalid: E={0}'.format(E)) return {'tlag': None} self.CW().param('tlag_grp', 'tlag = ').setValue('?') colname = [ col for col in df_gw.columns if not isNumpyDatetime(df_gw[col].dtype) ] self.CW().param('gw').setLimits(colname) colname = [ col for col in df_gw.columns if isNumpyDatetime(df_gw[col].dtype) ] self.CW().param('gw_dtime').setLimits(colname) colname = [ col for col in df_w.columns if not isNumpyDatetime(df_w[col].dtype) ] self.CW().param('river').setLimits(colname) colname = [ col for col in df_w.columns if isNumpyDatetime(df_w[col].dtype) ] self.CW().param('river_dtime').setLimits(colname) if not self.CW().param('E_grp', 'manual_E').value(): self.CW().disconnect_valueChanged2upd(self.CW().param( 'E_grp', 'E')) self.CW().param('E_grp', 'E').setValue( E) # maybe this will provoke process onceagain. self.CW().connect_valueChanged2upd(self.CW().param('E_grp', 'E')) kwargs = self.CW().prepareInputArguments() E = kwargs['E'] # and i would have to block the signals here... with BusyCursor(): if kwargs['method'] == '1) Erskine 1991': tlag = timelag_erskine1991_method( df_gw, kwargs['gw'], kwargs['gw_dtime'], df_w, kwargs['river'], kwargs['river_dtime'], E, tlag_tuple=(kwargs['t1'], kwargs['t2'], kwargs['t_step']), log=True) else: raise Exception('Method <%s> not yet implemented' % kwargs['method']) self.CW().param('tlag_grp', 'tlag = ').setValue(str(tlag)) return {'tlag': tlag}
def load_slides(self, indexes: [QModelIndex]) -> None: with BusyCursor(): self.workspace_model.beginResetModel() for index in indexes: if index.isValid(): item = index.model().getItem(index) item.load() self.workspace_model.endResetModel() self.hub.broadcast(SlideLoadedMessage(self))
def process(self, In): if isinstance(In, np.recarray): receivedColumns = In.dtype.names else: if In is not None: receivedColumns = In.columns else: with BusyCursor(): self.deleteAllColumns() self._ctrlWidget.setFields() return with BusyCursor(): for colName in receivedColumns: self._ctrlWidget.addDfColumn(colName) self._ctrlWidget.param(colName, 'name').setValue(colName) self._ctrlWidget.spw().setData(In) self._ctrlWidget.setFields() return
def import_mask(self, file_path: str) -> None: with BusyCursor(): filename, file_extension = os.path.splitext(file_path) file_name = os.path.basename(file_path) file_extension = file_extension.lower() if file_extension == '.tiff' or file_extension == '.tif': mask = Mask(file_name, file_path, MaskType.TIFF, MaskLoader) mask.load() if mask is not None: self.workspace_model.beginResetModel() self.workspace_model.workspace_data.add_slide(mask) self.workspace_model.endResetModel()
def process(self, In): gc.collect() # populate USE COLUMNS param, but only on item received, not when we click button if not self._ctrlWidget.calculateNAllowed( ) and not self._ctrlWidget.applyAllowed(): self._ctrlWidget.param('Apply to columns').clearChildren() with BusyCursor(): df = copy.deepcopy(In) # check out http://docs.scipy.org/doc/numpy-dev/neps/datetime-proposal.html colnames = [ col for col in df.columns if isNumpyDatetime(df[col].dtype) ] + [None] self._ctrlWidget.param('datetime').setLimits(colnames) self._ctrlWidget.param('datetime').setValue(colnames[0]) # populate (Apply to columns) param, but only on item received, not when we click button if not self._ctrlWidget.calculateNAllowed( ) and not self._ctrlWidget.applyAllowed(): colnames = [ col for col in df.columns if isNumpyNumeric(df[col].dtype) ] for col_name in colnames: # cycle through each column... self._ctrlWidget.param('Apply to columns').addChild({ 'name': col_name, 'type': 'bool', 'value': True }) kwargs = self.ctrlWidget().prepareInputArguments() if self._ctrlWidget.calculateNAllowed(): N = serfes.get_number_of_measurements_per_day( df, datetime=kwargs['datetime'], log=kwargs['log']) self._ctrlWidget.param('N').setValue(N) if self._ctrlWidget.applyAllowed(): if kwargs['N'] in [None, '']: QtGui.QMessageBox.warning( None, "Node: {0}".format(self.nodeName), 'First set number of measurements per day in parameter `N`' ) raise ValueError( 'First set number of measurements per day in parameter `N`' ) result = serfes.filter_wl_71h_serfes1991(df, **kwargs) return {'Out': result}
def import_slide(self, file_path: str) -> None: with BusyCursor(): filename, file_extension = os.path.splitext(file_path) file_name = os.path.basename(file_path) file_extension = file_extension.lower() if file_extension == '.mcd': slide = Slide(file_name, file_path, SlideType.MCD, McdLoader) elif file_extension == '.tiff' or file_extension == '.tif': if filename.endswith('.ome'): slide = Slide(file_name, file_path, SlideType.OMETIFF, OmeTiffLoader) elif file_extension == '.txt': slide = Slide(file_name, file_path, SlideType.TXT, TxtLoader) if slide is not None: self.workspace_model.beginResetModel() self.workspace_model.workspace_data.add_slide(slide) self.workspace_model.endResetModel() self.hub.broadcast(SlideImportedMessage(self))
def on_pushButton_viewPlot_clicked(self): """ open nice graphic representation of our data""" with BusyCursor(): try: df = self.parent().getPandasDataModel().df columns = self.parent().getPandasHeaderModel().selectedColumns( ) #consider only the selected columns datetime_cols = [ col for col in columns if isNumpyDatetime(df[col].dtype) ] numeric_cols = [ col for col in columns if isNumpyNumeric(df[col].dtype) ] datetime_col = datetime_cols[0] if len( datetime_cols ) > 0 else None #plot with x=datetime if possible if self.checkBox_separateSubplots.isChecked( ) and len(numeric_cols) > 1: ''' Do the plotting of each selected numerical column on an individual subplot ''' f, axes = plt.subplots(len(numeric_cols), sharex=True) for ax, numeric_col in zip(axes, numeric_cols): df.plot(x=datetime_col, y=numeric_col, ax=ax) legend = ax.legend(shadow=True) # Fine-tune figure; make subplots close to each other and hide x ticks for all but bottom plot. #f.subplots_adjust(hspace=0) plt.setp([a.get_xticklabels() for a in f.axes[:-1]], visible=False) else: ''' Plot all selected numerical columns together on a single subplot ''' f, ax = plt.subplots(1) for numeric_col in numeric_cols: df.plot(x=datetime_col, y=numeric_col, ax=ax) legend = ax.legend(shadow=True) f.show() except Exception as exp: self._parent.setException(exp) return
def process(self, In): df = In self._ctrlWidget.param('Period Check Params', 'Warnings').setValue('?') colname = [col for col in df.columns if isNumpyNumeric(df[col].dtype)] self._ctrlWidget.param('column').setLimits(colname) colname = [col for col in df.columns if isNumpyDatetime(df[col].dtype)] self._ctrlWidget.param('datetime').setLimits(colname) kwargs = self._ctrlWidget.prepareInputArguments() with BusyCursor(): peaks = detectPeaks_ts(df, kwargs.pop('column'), plot=self._plotRequired, **kwargs) self._ctrlWidget.param('Period Check Params', 'Warnings').setValue( str(len(peaks[peaks['check'] == False]))) return {'peaks': peaks}
def process(self, In): df = In if df is not None: # when we recieve a new dataframe into terminal - update possible selection list if not self._ctrlWidget.plotAllowed(): colname = [col for col in df.columns if isNumpyNumeric(df[col].dtype)] self._ctrlWidget.param('y').setLimits(colname) self._ctrlWidget.param('x').setLimits(colname) if self._ctrlWidget.plotAllowed(): kwargs = self.ctrlWidget().prepareInputArguments() with BusyCursor(): if self._ctrlWidget.param('plot overheads').value() is True: y_name = kwargs['y'][0] x_name = kwargs['x'][0] overhead_name = y_name+' - '+x_name df[overhead_name] = df[y_name]-df[x_name] kwargs['y'] = [overhead_name] plot_pandas.plot_pandas_scatter_special1(df, **kwargs) if self._ctrlWidget.param('plot overheads').value() is True: del df[overhead_name]
def process(self, W_peaks, GW_peaks): N_md = '?' df_w = W_peaks df_gw = GW_peaks colname = [ col for col in df_w.columns if isNumpyDatetime(df_w[col].dtype) ] self._ctrlWidget.param('Closest Time', 'Match Column').setLimits(colname) kwargs = self._ctrlWidget.prepareInputArguments() with BusyCursor(): mode = kwargs.pop('Match Option') if mode == 'Closest Time': matched_peaks = match_peaks(df_w, df_gw, kwargs.pop('Match Column'), **kwargs) N_md = matched_peaks['md_N'].count() self._ctrlWidget.param('MATCHED/PEAKS').setValue('{0}/{1}'.format( N_md, len(df_w))) return {'matched': matched_peaks}
def process(self, tides): if tides is None: return if self._df_id != id(tides): #print 'df new' self._df_id = id(tides) self.CW().param('tides_grp', 'n_sig').setValue(len(tides)-1) self.CW().disconnect_valueChanged2upd(self.CW().param('tides_grp', 'A')) self.CW().disconnect_valueChanged2upd(self.CW().param('tides_grp', 'omega')) self.CW().disconnect_valueChanged2upd(self.CW().param('tides_grp', 'phi')) colname = [col for col in tides.columns if isNumpyNumeric(tides[col].dtype)] self.CW().param('tides_grp', 'A').setLimits(colname) self.CW().param('tides_grp', 'omega').setLimits(colname) self.CW().param('tides_grp', 'phi').setLimits(colname) self.CW().param('tides_grp', 'A').setValue(colname[0]) self.CW().param('tides_grp', 'omega').setValue(colname[1]) self.CW().param('tides_grp', 'phi').setValue(colname[2]) self.CW().connect_valueChanged2upd(self.CW().param('tides_grp', 'A')) self.CW().connect_valueChanged2upd(self.CW().param('tides_grp', 'omega')) self.CW().connect_valueChanged2upd(self.CW().param('tides_grp', 'phi')) self.CW().disconnect_valueChanged2upd(self.CW().param('W')) W = tides[self.CW().p['tides_grp', 'A']][0] # 1st value from column `A` self.CW().param('W').setValue(W) self.CW().param('W').setDefault(W) self.CW().connect_valueChanged2upd(self.CW().param('W')) kwargs = self.CW().prepareInputArguments() kwargs['tides'] = {} for i in xrange(len(tides)): if not np.isnan(tides.iloc[i][kwargs['df_A']]) and np.isnan(tides.iloc[i][kwargs['df_omega']]): continue #skipping 0-frequency amplitude kwargs['tides'][str(i)] = {} kwargs['tides'][str(i)]['A'] = tides.iloc[i][kwargs['df_A']] kwargs['tides'][str(i)]['omega'] = tides.iloc[i][kwargs['df_omega']] kwargs['tides'][str(i)]['phi'] = tides.iloc[i][kwargs['df_phi']] #print i, ': a={0}, omega={1}, phi={2}'.format(kwargs['tides'][str(i)]['A'], kwargs['tides'][str(i)]['omega'], kwargs['tides'][str(i)]['phi'] ) with BusyCursor(): if kwargs['eq'] == 'tide': df = generate_tide(kwargs['t0'], kwargs['dt'], kwargs['tend'], components=kwargs['tides'], W=kwargs['W'], F=kwargs['F'], label=kwargs['label'], equation=kwargs['eq']) elif kwargs['eq'] == 'ferris': df = generate_tide(kwargs['t0'], kwargs['dt'], kwargs['tend'], components=kwargs['tides'], W=kwargs['W'], F=kwargs['F'], label=kwargs['label'], equation=kwargs['eq'], D=kwargs['ferris']['D'], x=kwargs['ferris']['x']) elif kwargs['eq'] == 'xia': df = generate_tide(kwargs['t0'], kwargs['dt'], kwargs['tend'], components=kwargs['tides'], W=kwargs['W'], F=kwargs['F'], label=kwargs['label'], equation=kwargs['eq'], x=kwargs['xia']['x'], alpha=kwargs['xia']['alpha'], beta=kwargs['xia']['beta'], theta=kwargs['xia']['theta'], L=kwargs['xia']['L'], K1=kwargs['xia']['K1'], b1=kwargs['xia']['b1'], K=kwargs['xia']['K'], b=kwargs['xia']['b'], K_cap=kwargs['xia']['K_cap'], b_cap=kwargs['xia']['b_cap']) else: df = None return {'sig': df}
def copyAll(self): """Copy all data to clipboard.""" with BusyCursor(): self.serialize(useSelection=False)
def copySel(self): """Copy selected data to clipboard.""" with BusyCursor(): data = self.serialize(useSelection=True) if data: QtGui.QApplication.clipboard().setText(data)
def process(self, df, md_peaks): E = None self.CW().param('E = ').setValue(str(E)) self.CW().param('gw').setWritable(True) if df is not None: for name in ['river', 'gw', 'datetime']: self.CW().disconnect_valueChanged2upd(self.CW().param(name)) colname = [ col for col in df.columns if isNumpyNumeric(df[col].dtype) ] self.CW().param('river').setLimits(colname) self.CW().param('gw').setLimits(colname) colname = [ col for col in df.columns if isNumpyDatetime(df[col].dtype) ] self.CW().param('datetime').setLimits(colname) for name in ['river', 'gw', 'datetime']: self.CW().connect_valueChanged2upd(self.CW().param(name)) kwargs = self.ctrlWidget().prepareInputArguments() if kwargs['method'] == '1) STD': E = tidalEfficiency_method1(df, kwargs['river'], kwargs['gw']) E_c = None elif kwargs['method'] == '2) Cyclic amplitude' or kwargs[ 'method'] == '3) Cyclic STD': if md_peaks is None: msg = 'To use method `{0}` please provide "matched-peaks" data in terminal `md_peaks` (a valid data-set can be created with node `Match Peaks`)'.format( kwargs['method']) QtGui.QMessageBox.warning( None, "Node: {0}".format(self.nodeName), msg) raise ValueError(msg) self.CW().disconnect_valueChanged2upd(self.CW().param('gw')) self.CW().param('gw').setWritable(False) self.CW().param('gw').setLimits(['see matched peaks']) self.CW().connect_valueChanged2upd(self.CW().param('gw')) mPeaks_slice = md_peaks.loc[~md_peaks['md_N'].isin( [np.nan, None])] # select only valid cycles if kwargs['method'] == '2) Cyclic amplitude': E, E_cyclic = tidalEfficiency_method2( mPeaks_slice['tidal_range'], mPeaks_slice['md_tidal_range']) elif kwargs['method'] == '3) Cyclic STD': with BusyCursor(): river_name = mPeaks_slice['name'][0] well_name = mPeaks_slice['md_name'][0] E, E_cyclic = tidalEfficiency_method3( df, river_name, well_name, kwargs['datetime'], mPeaks_slice['time_min'], mPeaks_slice['time_max'], mPeaks_slice['md_time_min'], mPeaks_slice['md_time_max']) # now do nice output table E_c = pd.DataFrame({ 'N': mPeaks_slice['N'], 'md_N': mPeaks_slice['md_N'], 'E_cyclic': E_cyclic, }) else: raise Exception('Method <%s> is not yet implemented' % kwargs['method']) self.CW().param('E = ').setValue('{0:.4f}'.format(E)) return {'E': E, 'E_cyclic': E_c}
def process(self, In): df = In self.CW().param('check_grp', 'MIN_grp', 'warn').setValue('?') self.CW().param('check_grp', 'MAX_grp', 'warn').setValue('?') self.CW().param('check_grp', 'ALL_grp', 'warn').setValue('?') self.CW().param('check_grp', 'warn_sum').setValue('?') self.CW().param('out_grp', 'raw_nmin').setValue('?') self.CW().param('out_grp', 'raw_nmax').setValue('?') self.CW().param('out_grp', 'raw_n_all').setValue('?') self.CW().param('out_grp', 'n_cycles').setValue('?') self.CW().param('Peak Detection Params', 'order').setValue('?') if df is None: return {'raw': None, 'peaks': None} colname = [col for col in df.columns if isNumpyNumeric(df[col].dtype)] self.CW().param('column').setLimits(colname) colname = [col for col in df.columns if isNumpyDatetime(df[col].dtype)] self.CW().param('datetime').setLimits(colname) kwargs = self.CW().prepareInputArguments() kwargs['split'] = True with BusyCursor(): kwargs['order'] = prepare_order( kwargs['T'], kwargs['hMargin'], prepare_datetime(df, datetime=kwargs['datetime'])) self.CW().param('Peak Detection Params', 'order').setValue(str(kwargs['order'])) #peaks = detectPeaks_ts(df, kwargs.pop('column'), plot=self._plotRequired, **kwargs) extra, raw, peaks = full_peak_detection_routine( df, col=kwargs.pop('column'), date_col=kwargs.pop('datetime'), IDs2mask=kwargs.pop('IDs2mask'), valid_range=kwargs.pop('valid_range'), plot=self._plotRequired, **kwargs) n_warn_min = len(extra['warnings']['MIN']) n_warn_max = len(extra['warnings']['MAX']) n_warn_all = len(extra['warnings']['ALL']) self.CW().param('check_grp', 'MIN_grp', 'warn').setValue(n_warn_min) self.CW().param('check_grp', 'MAX_grp', 'warn').setValue(n_warn_max) self.CW().param('check_grp', 'ALL_grp', 'warn').setValue(n_warn_all) self.CW().param('check_grp', 'warn_sum').setValue(n_warn_min + n_warn_max + n_warn_all) self.CW().param('out_grp', 'raw_nmin').setValue(extra['raw_nmin']) self.CW().param('out_grp', 'raw_nmax').setValue(extra['raw_nmax']) if raw is not None: self.CW().param('out_grp', 'raw_n_all').setValue(len(raw.index)) if peaks is not None: self.CW().param('out_grp', 'n_cycles').setValue(len(peaks.index)) return {'raw': raw, 'peaks': peaks}
def process(self, display=True): kwargs = self.ctrlWidget().prepareInputArguments() with BusyCursor(): df = pd.read_csv(**kwargs) return {'Out': df}
def save_workspace(self, path: str) -> None: with BusyCursor(): self.workspace_model.save_workspace(path)
def process(self, tides): #print datetime.datetime.now(), "\t>>> update called", df = None if id(tides) != self._tides_id or tides is None: logger.debug( 'clearing genCurveNodeCtrlWidget_v2 (Tide Components Section) on_process()' ) #print 'setting tides components' self._tides_id = id(tides) if hasattr(self, '_ctrlWidget'): self._ctrlWidget.clearTideComponents() if tides is None: #print '\t > returning None' return {'sig': None} self._ctrlWidget.on_tides_received(tides) kwargs = self._ctrlWidget.prepareInputArguments() # now prepare the amplitudes kwargs['tides'] = {} for i in xrange(len(tides)): if not np.isnan(tides.iloc[i][kwargs['df_A']]) and np.isnan( tides.iloc[i][kwargs['df_omega']]): continue #skipping 0-frequency amplitude kwargs['tides'][str(i)] = {} kwargs['tides'][str(i)]['A'] = tides.iloc[i][kwargs['df_A']] kwargs['tides'][str(i)]['omega'] = tides.iloc[i][ kwargs['df_omega']] kwargs['tides'][str(i)]['phi'] = tides.iloc[i][kwargs['df_phi']] # finally do the calculations with BusyCursor(): #print '\t > doing the calculations' if kwargs['eq'] == 'tide': df = generate_tide(kwargs['t0'], kwargs['dt'], kwargs['tend'], components=kwargs['tides'], W=kwargs['W'], F=kwargs['F'], label=kwargs['label'], equation=kwargs['eq']) elif kwargs['eq'] == 'ferris': df = generate_tide(kwargs['t0'], kwargs['dt'], kwargs['tend'], components=kwargs['tides'], W=kwargs['W'], F=kwargs['F'], label=kwargs['label'], equation=kwargs['eq'], D=kwargs['ferris']['D'], x=kwargs['x']) elif kwargs['eq'] == 'xia': df = generate_tide(kwargs['t0'], kwargs['dt'], kwargs['tend'], components=kwargs['tides'], W=kwargs['W'], F=kwargs['F'], label=kwargs['label'], equation=kwargs['eq'], x=kwargs['x'], alpha=kwargs['xia']['alpha'], beta=kwargs['xia']['beta'], theta=kwargs['xia']['theta'], L=kwargs['xia']['L'], K1=kwargs['xia']['K1'], b1=kwargs['xia']['b1'], K=kwargs['xia']['K'], b=kwargs['xia']['b'], K_cap=kwargs['xia']['K_cap'], b_cap=kwargs['xia']['b_cap']) elif kwargs['eq'] == 'song': df = generate_tide(kwargs['t0'], kwargs['dt'], kwargs['tend'], components=kwargs['tides'], W=kwargs['W'], F=kwargs['F'], label=kwargs['label'], equation=kwargs['eq'], x=kwargs['x'], order=kwargs['song']['order'], b=kwargs['song']['b'], n_e=kwargs['song']['n_e'], kf=kwargs['song']['K'], b2msl=kwargs['song']['b2msl']) else: df = None return {'sig': df}
def _update_images(self): with BusyCursor(): try: image = self.data['image'] image_markers = self.data['image_markers'] except KeyError: # some data (like image) is not defined/loaded return try: roi, _ = self.data['roi'].getArraySlice( image, self.imageView.imageItem) except AttributeError: # self.data['roi'] is None roi = None # first check if some markers are outside the ROI position and remove them if roi: mask = np.ones_like(image_markers) mask[roi] = 0 for cc in zip(*image_markers.nonzero()): label = image_markers[cc] if mask[cc]: # marker is outside ROI image_markers[cc] = 0 self.data['lb_act_map'].pop(label, None) # reduce image size to ROI image = image[roi] image_markers = image_markers[roi] th = float(self.box_threshold.value()) # compute threshold overlay image_th = _compute_threshold_image(image, th) if image_markers.any(): image_watershed = _compute_watershed(image_th, image_markers) else: # no markers are present image_watershed = np.zeros_like(image) # compute overlay # todo: soft-code alpha colors = [] try: label_to_action = self.data['lb_act_map'] for i, action in label_to_action.items(): if action == mACTION_ADD: color = mACCEPT_COLORS[i % len(mACCEPT_COLORS)] else: color = mIGNORE_COLORS[i % len(mIGNORE_COLORS)] colors.append(color) except KeyError: pass # add the color for threshold areas colors.append(THRESHOLD_COLOR) # to color the threshold(ed) area (i.e. the background) a different color, we assign a dummy label (-1) tmp = np.zeros_like(image) tmp[image_th == BACKGROUND_LABEL] = -1 # todo: soft-code alpha image_overlay = label2rgb(tmp + image_watershed, image, alpha=0.4, bg_label=BACKGROUND_LABEL, bg_color=None, colors=colors) # draw markers for cc in zip(*image_markers.nonzero()): label = image_markers[cc] action = label_to_action[label] color = mCROSS_COLORS[ 0] if action == mACTION_ADD else mCROSS_COLORS[1] # todo: soft-code size _draw_cross(image_overlay, cc, 20, color) if roi: # restore images to the proper size tmp = np.zeros(shape=self.imageView.image.shape[0:2], dtype=image_watershed.dtype) tmp[roi] = image_watershed image_watershed = tmp tmp = np.stack([self.data['image'] / 255 for _ in range(3)], axis=-1) tmp[roi] = image_overlay image_overlay = tmp tmp = np.ones_like(self.data['image']) * BACKGROUND_LABEL tmp[roi] = image_th image_th = tmp # save to data container self.data['image_watershed'] = image_watershed self.data['image_th'] = image_th self.data['image_overlay'] = image_overlay # update image view self._update_imageview()
def load_workspace(self, path: str) -> None: with BusyCursor(): self.clear() self.workspace_model.beginResetModel() self.workspace_model.load_workspace(path) self.workspace_model.endResetModel()