def make_upload_log_internal(resp_root, osm_root, date, cid, user): placesids = {} for child in resp_root: version = None if 'new_version' in child.attrib: version = child.attrib['new_version'] placesids[child.attrib['old_id']] = (child.attrib['new_id'], child.tag, version) gisids = {} for child in osm_root: action = child.tag # create, delete, update for grandchild in child: source_id = None # skip elements with no tags (typically vertices) has_tags = False for tag in grandchild.findall('tag'): has_tags = True if tag.attrib['k'] == 'nps:source_system_key_value': source_id = tag.attrib['v'] break if has_tags: tempid = grandchild.attrib['id'] gisids[tempid] = (action, source_id) data = DataTable() data.fieldnames = ['date_time', 'user_name', 'changeset', 'action', 'element', 'places_id', 'version', 'source_id'] data.fieldtypes = ['DATE', 'TEXT', 'LONG', 'TEXT', 'TEXT', 'TEXT', 'LONG', 'TEXT'] for tempid in gisids: load = gisids[tempid] diff = placesids[tempid] row = [date, user, cid, load[0], diff[1], diff[0], diff[2], load[1]] data.rows.append(row) return data
def make_upload_log_from_changeset_id(cid, server, logger): try: logger.info("Requesting info on changeset {} from server.".format(cid)) except AttributeError: pass # busy wait on server for changeset info elements = None countdown = 10 # times 3 seconds = 30 seconds before giving up. while countdown: elements = server.get_sourceids_for_changeset(cid) if elements: break else: if "404" in server.error: try: logger.info("Changeset not found or not ready. Waiting...") except AttributeError: pass time.sleep(3) countdown -= 1 else: raise UploadError("Server failure requesting source ids for changeset. " + server.error) if not elements: raise UploadError("Changeset not found. It may not be ready yet, try again in a little while.") try: element_root = Et.fromstring(elements) except Et.ParseError as e: raise UploadError("Info returned from server is invalid ({0}).".format(e.message)) if element_root.tag != "osm": raise UploadError("Info returned from server is invalid (no root osm element).") try: logger.info("Building link table.") except AttributeError: pass data = DataTable() data.fieldnames = ['date_time', 'user_name', 'changeset', 'action', 'element', 'places_id', 'version', 'source_id'] data.fieldtypes = ['DATE', 'TEXT', 'LONG', 'TEXT', 'TEXT', 'TEXT', 'LONG', 'TEXT'] try: for element in element_root: cid = int(element.attrib['changeset_id']) version = int(element.attrib['version']) try: date = datetime.datetime.strptime(element.attrib['timestamp'], "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # on rare occassions, there are no partial seconds, and the format is truncated date = datetime.datetime.strptime(element.attrib['timestamp'], "%Y-%m-%dT%H:%M:%SZ") row = [date, element.attrib['user'], cid, element.attrib['action'], element.attrib['element'], element.attrib['places_id'], version, element.attrib['gis_id']] data.rows.append(row) except (IndexError, AttributeError, KeyError) as e: raise UploadError("Element info returned from server is invalid ({0}).".format(e.message)) try: logger.info("Created link table.") except AttributeError: pass return data
def displayResult(self): dt = DataTable() table = dt.displayDataTable() g = Graph() polarity_value = g.displayGraph() table.append(polarity_value) return table
def get_non_extended_th_table(self, f): """return a copy of the theory table associated with f, where the extra rows are delete""" if f.with_extra_x: tmp_dt = DataTable(axarr=[]) nrow = self.tables[f.file_name_short].num_rows tmp_dt.data = self.tables[f.file_name_short].data[ f.nextramin:nrow - f.nextramax, :] tmp_dt.num_rows = tmp_dt.data.shape[0] tmp_dt.num_columns = tmp_dt.data.shape[1] return tmp_dt else: return self.tables[f.file_name_short]
def getAllScans(): dataFilename = "shelve/codeScanTable" #persistent dictionary that will hold the CodeScan records dataTable = DataTable(dataFilename) #establish connection to the persistent dataTable scan_list = list() scan_list = dataTable.getAllRows() print scan_list dataTable.closeTable() #close the file connection return scan_list
def getAllScans(): dataFilename = "shelve/codeScanTable" #persistent dictionary that will hold the CodeScan records dataTable = DataTable( dataFilename) #establish connection to the persistent dataTable scan_list = list() scan_list = dataTable.getAllRows() print scan_list dataTable.closeTable() #close the file connection return scan_list
def index(): if request.method == 'GET': if 'Authenticated' in session.keys( ) and session['Authenticated'] == True: if request.args.get('sql'): sql = request.args.get('sql') sql = urllib.unquote(sql) x = DataTable(db_uri) x.makeTable(sql, css_id="first_test_table", width=8) return render_template('base.html', table1=x, sql=sql) else: x = DataTable(db_uri) sql = "SELECT customer_id, first_name FROM customer LIMIT 5;" x.makeTable(sql, css_id="first_test_table", width=8) return render_template('base.html', table1=x, sql=sql) else: return render_template('login.html') if request.method == 'POST': passphrase = request.form.get('passphrase') if passphrase == 'lockpick': session['Authenticated'] = True session.permanent = True return redirect(url_for('index')) else: return render_template('login.html', error=True)
def __init__(self, master=None): data = TableData() data.insertLine(TableLine()) data.data[0].data = ['asd', 'dsa'] self.datatable = DataTable(master) self.datatable.headers = ['nome', 'idade']
def do_open(self, line): """Open file(s) [description] Arguments: - line {str} -- FILENAMES (pattern expansion characters -- \*, ? -- allowed """ if CmdBase.mode != CmdMode.GUI: f_names = glob.glob(line) if not f_names: f_names = line.split( ) #allow to provide multiple file names separated by a space else: f_names = line newtables = [] if (line == "" or len(f_names) == 0): message = "No valid file names provided" if CmdBase.mode != CmdMode.GUI: print(message) return return (message, None, None) f_ext = [os.path.splitext(x)[1].split('.')[-1] for x in f_names] if (f_ext.count(f_ext[0]) != len(f_ext)): message = "File extensions of files must be equal!" if CmdBase.mode != CmdMode.GUI: print(message) print(f_names) return return (message, None, None) if (f_ext[0] in self.parent_application.filetypes): ft = self.parent_application.filetypes[f_ext[0]] for f in f_names: if not os.path.isfile(f): print("File \"%s\" does not exists" % f) continue # next file name df = ft.read_file(f, self, self.parent_application.axarr) unique = True for file in self.files: if df.file_name_short == file.file_name_short: #check if file already exists in current ds unique = False if unique: self.files.append(df) self.current_file = df newtables.append(df) for th_name in self.theories: #add a theory table self.theories[th_name].tables[ df.file_name_short] = DataTable( self.parent_application.axarr, "TH_" + df.file_name_short) if CmdBase.mode == CmdMode.GUI: return (True, newtables, f_ext[0]) else: message = "File type \"%s\" does not exists" % f_ext[0] if CmdBase.mode != CmdMode.GUI: print(message) return return (message, None, None)
def __init__(self, file_name="", file_type=None, parent_dataset=None, axarr=None): """ **Constructor** [description] Keyword Arguments: - file_name {str} -- Full path - file_type {[type]} -- [description] (default: {None}) - parent_dataset {[type]} -- [description] (default: {None}) - axarr {[type]} -- [description] (default: {None}) """ self.file_full_path = os.path.abspath(file_name) tmpname = os.path.basename(self.file_full_path) self.file_name_short = os.path.splitext(tmpname)[0] self.file_type = file_type self.parent_dataset = parent_dataset self.axarr = axarr #plot attributes self.marker = None self.color = None self.filled = None self.size = None # Shift variables self.isshifted = [False] * DataTable.MAX_NUM_SERIES self.xshift = [0] * DataTable.MAX_NUM_SERIES self.yshift = [0] * DataTable.MAX_NUM_SERIES self.header_lines = [] self.file_parameters = {} self.active = True self.data_table = DataTable(axarr, self.file_name_short) # extra theory xrange self.with_extra_x = False self.theory_xmin = "None" self.theory_xmax = "None" self.theory_logspace = True self.th_num_pts = 10 # number of points self.nextramin = 0 self.nextramax = 0
def plot_theory_stuff(self): """[summary] [description] """ if not self.view_modes: return data_table_tmp = DataTable(self.axarr) data_table_tmp.num_columns = 2 nmodes = self.parameters["nmodes"].value data_table_tmp.num_rows = nmodes data_table_tmp.data = np.zeros((nmodes, 2)) tau = np.logspace(self.parameters["logtmin"].value, self.parameters["logtmax"].value, nmodes) data_table_tmp.data[:, 0] = tau for i in range(nmodes): if self.stop_theory_flag: break data_table_tmp.data[i, 1] = np.power( 10, self.parameters["logJ%02d" % i].value) view = self.parent_dataset.parent_application.current_view try: x, y, success = view.view_proc(data_table_tmp, None) except TypeError as e: print(e) return self.graphicmodes.set_data(x, y) for i in range(data_table_tmp.MAX_NUM_SERIES): for nx in range(len(self.axarr)): self.axarr[nx].lines.remove(data_table_tmp.series[nx][i])
def plot_theory_stuff(self): """[summary] [description] """ data_table_tmp = DataTable(self.axarr) data_table_tmp.num_columns = 2 data_table_tmp.num_rows = 100 data_table_tmp.data = np.zeros((100, 2)) times = np.logspace(-2, 3, 100) data_table_tmp.data[:, 0] = times data_table_tmp.data[:, 1] = 0 fparamaux = {} fparamaux["gdot"] = 1e-8 G = self.parameters["G"].value tauD = self.parameters["tauD"].value data_table_tmp.data[:, 1] += G * fparamaux["gdot"] * tauD * ( 1 - np.exp(-times / tauD)) if self.flow_mode == FlowMode.uext: data_table_tmp.data[:, 1] *= 3.0 view = self.parent_dataset.parent_application.current_view try: x, y, success = view.view_proc(data_table_tmp, fparamaux) except TypeError as e: print(e) return self.LVEenvelopeseries.set_data(x[:, 0], y[:, 0])
def do_new_dummy_file(self, fname="", xrange=[], yval=0, zval=[], fparams={}, file_type=None): """Create File form xrange and file parameters xrange: list of x points yval: float fparam: dict containing file parameter names and values """ if fname == "": filename = "dummy_" + "_".join( [pname + "%.3g" % fparams[pname] for pname in fparams]) + "." + file_type.extension else: filename = fname + "_".join( [pname + "%.3g" % fparams[pname] for pname in fparams]) + "." + file_type.extension f = File(file_name=filename, file_type=file_type, parent_dataset=self, axarr=self.parent_application.axarr) f.file_parameters = fparams dt = f.data_table dt.num_columns = len(file_type.col_names) dt.num_rows = len(xrange) dt.data = np.zeros((dt.num_rows, dt.num_columns)) dt.data[:, 0] = xrange if isinstance(yval, list): for i in range(1, dt.num_columns): dt.data[:, i] = yval[:] else: for i in range(1, dt.num_columns): dt.data[:, i] = yval if zval != [] and dt.num_columns > 2: dt.data[:, 2] = zval[:] unique = True for file in self.files: if f.file_name_short == file.file_name_short: #check if file already exists in current ds unique = False if unique: self.files.append(f) self.current_file = f for th_name in self.theories: #add a theory table self.theories[th_name].tables[f.file_name_short] = DataTable( self.parent_application.axarr, "TH_" + f.file_name_short) self.theories[th_name].function(f) if CmdBase.mode == CmdMode.GUI: return f, True else: return None, False
def openFunc(self, fileName): """ Open the file at the specified path The file has to be JSON (for ot it's CSV though) """ print(fileName) #TODO temporary, to be change by json call try: table, name = csvParser(fileName) except Exception as inst: message = ErrorMessage(str(inst)) message.exec_() return if isinstance(table, Group): dataTable = DataTable(name, table, "Members") elif isinstance(table, Table): dataTable = DataTable(name, table, "Tresury") else: return self.mainWindow_.contentTab.addTable(dataTable)
def defaulf(self, obj): if isinstance(obj, DataBaseConnection): e = DataBaseConnection() e.fromJson(obj) return e elif isinstance(obj, DataBase): return DataBase(obj) elif isinstance(obj, DataTable): return DataTable(obj) elif isinstance(obj, DataRow): return DataRow(obj) elif isinstance(obj, set): return set(obj) else: JSONEncoder.default(self, obj)
def download_csv(): if request.args.get('sql'): sql = request.args.get('sql') sql = urllib.unquote(sql) x = DataTable(db_uri) results = x.makeTable(sql) headers = x.columns f = open('auto_increment.txt', 'r') auto_increment = int(f.read()) + 1 print(auto_increment) f.close() f = open('auto_increment.txt', 'w') f.write(str(auto_increment)) f.close() fn = 'static/output_{}.csv'.format(auto_increment) with open(fn, 'w') as f: writer = csv.writer(f) writer.writerow(headers) for i, row in enumerate(results): writer.writerow(row) return send_file(fn, mimetype='text/csv', attachment_filename='output.csv', as_attachment=True)
def newFunc(self): """ Creates a new table, it opens a popup for the user to chose the type and name """ items = ["Members", "Finances"] tableType = QInputDialog.getItem(self, "New table", "table type", items, 0, False) if tableType[1]: tableName = QInputDialog.getText(self, "New table", "table name") if tableName[1]: if tableType == "Members": table = Group(tableName[1]) else: table = Table(tableName[0]) dataTable = DataTable(name=tableName[0], table=table, tableType=tableType[0]) self.mainWindow_.contentTab.addTable(dataTable)
def update_current_view_xrange(self): view = self.parent_dataset.parent_application.current_view tmp_dt = DataTable(axarr=[]) tmp_dt.data = np.empty((1, 3)) tmp_dt.data[:] = np.nan tmp_dt.num_rows = 1 tmp_dt.num_columns = 3 try: xmin = float(self.th_xmin.text()) except ValueError: self.view_xmin.setText('N/A') else: tmp_dt.data[0, 0] = xmin x, y, success = view.view_proc(tmp_dt, self.file.file_parameters) self.view_xmin.setText("%.4g" % x[0, 0]) try: xmax = float(self.th_xmax.text()) except ValueError: self.view_xmax.setText('N/A') else: tmp_dt.data[0, 0] = xmax x, y, success = view.view_proc(tmp_dt, self.file.file_parameters) self.view_xmax.setText("%.4g" % x[0, 0])
def test_add_column_invalid_type(self): a_table = DataTable('A') self.assertRaises(Exception, a_table.add_column, ('col', 'invalid'))
def show_all(self): self.pause_game() table = DataTable(self.database_worker.get_all()) central_widget = table self.setCentralWidget(central_widget)
def __init__(self, dbname, sex): DataTable.__init__(self, dbname) self.sex = sex
class File(object): """Basic class that describes elements of a DataSet [description] """ def __init__(self, file_name="", file_type=None, parent_dataset=None, axarr=None): """ **Constructor** [description] Keyword Arguments: - file_name {str} -- Full path - file_type {[type]} -- [description] (default: {None}) - parent_dataset {[type]} -- [description] (default: {None}) - axarr {[type]} -- [description] (default: {None}) """ self.file_full_path = os.path.abspath(file_name) tmpname = os.path.basename(self.file_full_path) self.file_name_short = os.path.splitext(tmpname)[0] self.file_type = file_type self.parent_dataset = parent_dataset self.axarr = axarr #plot attributes self.marker = None self.color = None self.filled = None self.size = None # Shift variables self.isshifted = [False] * DataTable.MAX_NUM_SERIES self.xshift = [0] * DataTable.MAX_NUM_SERIES self.yshift = [0] * DataTable.MAX_NUM_SERIES self.header_lines = [] self.file_parameters = {} self.active = True self.data_table = DataTable(axarr, self.file_name_short) # extra theory xrange self.with_extra_x = False self.theory_xmin = "None" self.theory_xmax = "None" self.theory_logspace = True self.th_num_pts = 10 # number of points self.nextramin = 0 self.nextramax = 0 def __str__(self): """[summary] [description] """ return '%s: %s' % (self.file_full_path, self.file_parameters) def mincol(self, col): """Minimum value in data_table column col [description] """ return self.data_table.mincol(col) def minpositivecol(self, col): """Minimum positive value in data_table column col [description] """ return self.data_table.minpositivecol(col) def maxcol(self, col): """Maximum value in data_table column col [description] """ return self.data_table.maxcol(col)
def setUp(self): self.addCleanup(self.my_cleanup, ('cleanup executado')) self.table = DataTable('A')
class DataTableTest(unittest.TestCase): def setUp(self): self.addCleanup(self.my_cleanup, ('cleanup executado')) self.table = DataTable('A') def my_cleanup(self, msg): print(msg) def test_get_name(self): self.assertEqual("A", self.table._get_name()) def test_set_name(self): self.table._set_name("aurelio") self.assertEqual("aurelio", self.table._get_name()) def test_delete_name(self): self.assertRaises(AttributeError, self.table._del_name) def test_add_column(self): #table = DataTable("Empreendimento") self.assertEqual(0, len(self.table._columns)) self.table.add_column('BId', 'bigint') self.assertEqual(1, len(self.table._columns)) self.table.add_column('value', 'numeric') self.assertEqual(2, len(self.table._columns)) self.table.add_column('desc', 'varchar') self.assertEqual(3, len(self.table._columns)) def test_add_column_invalid_type(self): a_table = DataTable('A') self.assertRaises(Exception, a_table.add_column, ('col', 'invalid')) def test_add_column_invalid_type_fail(self): #a_table = DataTable("Empreendimento") error = False try: self.table.add_column('col', 'invalid') except: error = True if not error: self.fail("Chamada não gerou erro, mas deveria")
def recordSession(param, sessionKey, searchType): dataFilename = "shelve/codeScanTable" #persistent dictionary that will hold the CodeScan records dataTable = DataTable(dataFilename) #establish connection to the persistent dataTable rowData = dict() rowData['runDate'] = str(today_date) rowData['searchType'] = searchType rowData['sessionKey'] = sessionKey rowData['elapsedTime'] = '' try: if param['elapsedTime'] is not None: rowData['elapsedTime'] = param['elapsedTime'] except: pass rowData['top'] = '' try: if param['top'] is not None: rowData['top'] = param['top'] except: pass rowData['series'] = '' try: if param['series'] is not None: rowData['series'] = param['series'] except: pass rowData['label'] = '' try: if param['label'] is not None: rowData['label'] = param['label'] except: pass rowData['command'] = '' try: if param['command'] is not None: rowData['command'] = param['command'] except: pass rowData['commandArgument'] = '' try: if param['commandArgument'] is not None: rowData['commandArgument'] = param['commandArgument'] except: pass rowData['files'] = '' try: if param['files'] is not None: rowData['files'] = param['files'] except: pass rowData['contents'] = '' try: if param['contents'] is not None: rowData['contents'] = param['contents'] except: pass rowData['processSize'] = '' try: if param['processSize'] is not None: rowData['processSize'] = param['processSize'] except: pass rowData['container'] = '' try: if param['container'] is not None: rowData['container'] = param['container'] except: pass rowData['title'] = '' try: if param['title'] is not None: rowData['title'] = param['title'] except: pass rowData['commandButton'] = '' try: if param['commandButton'] is not None: rowData['commandButton'] = param['commandButton'] except: pass rowData['componentName'] = '' try: if param['componentName'] is not None: rowData['componentName'] = param['componentName'] except: pass rowData['tag'] = '' try: if param['tag'] is not None: rowData['tag'] = param['tag'] except: pass rowData['attribute'] = '' try: if param['attribute'] is not None: rowData['attribute'] = param['attribute'] except: pass rowData['attribute_value'] = '' try: if param['attribute_value'] is not None: rowData['attribute_value'] = param['attribute_value'] except: pass rowNum = dataTable.insertRow(rowData) dataTable.closeTable() #close the file connection
def benchFileNamed(self, name): contents = open(name).read() for x in xrange(self._iters): # we duplicate lines to reduce the overhead of the loop dt = DataTable() dt.readString(contents) dt = DataTable() dt.readString(contents) dt = DataTable() dt.readString(contents) dt = DataTable() dt.readString(contents) dt = DataTable() dt.readString(contents) dt = DataTable() dt.readString(contents) dt = DataTable() dt.readString(contents) dt = DataTable() dt.readString(contents)
def recordSession(param, sessionKey, searchType): dataFilename = "shelve/codeScanTable" #persistent dictionary that will hold the CodeScan records dataTable = DataTable( dataFilename) #establish connection to the persistent dataTable rowData = dict() rowData['runDate'] = str(today_date) rowData['searchType'] = searchType rowData['sessionKey'] = sessionKey rowData['elapsedTime'] = '' try: if param['elapsedTime'] is not None: rowData['elapsedTime'] = param['elapsedTime'] except: pass rowData['top'] = '' try: if param['top'] is not None: rowData['top'] = param['top'] except: pass rowData['series'] = '' try: if param['series'] is not None: rowData['series'] = param['series'] except: pass rowData['label'] = '' try: if param['label'] is not None: rowData['label'] = param['label'] except: pass rowData['command'] = '' try: if param['command'] is not None: rowData['command'] = param['command'] except: pass rowData['commandArgument'] = '' try: if param['commandArgument'] is not None: rowData['commandArgument'] = param['commandArgument'] except: pass rowData['files'] = '' try: if param['files'] is not None: rowData['files'] = param['files'] except: pass rowData['contents'] = '' try: if param['contents'] is not None: rowData['contents'] = param['contents'] except: pass rowData['processSize'] = '' try: if param['processSize'] is not None: rowData['processSize'] = param['processSize'] except: pass rowData['container'] = '' try: if param['container'] is not None: rowData['container'] = param['container'] except: pass rowData['title'] = '' try: if param['title'] is not None: rowData['title'] = param['title'] except: pass rowData['commandButton'] = '' try: if param['commandButton'] is not None: rowData['commandButton'] = param['commandButton'] except: pass rowData['componentName'] = '' try: if param['componentName'] is not None: rowData['componentName'] = param['componentName'] except: pass rowData['tag'] = '' try: if param['tag'] is not None: rowData['tag'] = param['tag'] except: pass rowData['attribute'] = '' try: if param['attribute'] is not None: rowData['attribute'] = param['attribute'] except: pass rowData['attribute_value'] = '' try: if param['attribute_value'] is not None: rowData['attribute_value'] = param['attribute_value'] except: pass rowNum = dataTable.insertRow(rowData) dataTable.closeTable() #close the file connection
def __init__(self, name="Theory", parent_dataset=None, axarr=None): """ **Constructor** The following variables should be set by the particular realization of the theory: - parameters (dict): Parameters of the theory - function (func): Function that calculates the theory - min (real): min for integration/calculation - max (real): max - npoints (int): Number of points to calculate - point_distribution : all_points, linear, log - dt (real): default time step - dt_min (real): minimum time step for adaptive algorithms - eps (real): precision for adaptive algorithms - integration_method : Euler, RungeKutta5, AdaptiveDt - stop_steady (bool): Stop calculation if steady state of component 0 is attained Keyword Arguments: - name {str} -- Name of theory (default: {"Theory"}) - parent_dataset {DataSet} -- DataSet that contains the Theory (default: {None}) - ax {matplotlib axes} -- matplotlib graph (default: {None}) """ super().__init__() self.name = name self.parent_dataset = parent_dataset self.axarr = axarr self.ax = axarr[0] #theory calculation only on this plot self.parameters = OrderedDict( ) # keep the dictionary key in order for the parameter table self.tables = {} self.function = None self.active = True #defines if the theory is plotted self.calculate_is_busy = False self.axarr[0].autoscale(False) self.autocalculate = True self.extra_data = {} # Dictionary saved during "Save Project" # THEORY OPTIONS self.npoints = 100 self.dt = 0.001 self.dt_min = 1e-6 self.eps = 1e-4 self.stop_steady = False self.is_fitting = False self.has_modes = False ax = self.ax # XRANGE for FIT self.xmin = -np.inf self.xmax = np.inf self.xrange = ax.axvspan(self.xmin, self.xmax, facecolor='yellow', alpha=0.3, visible=False) self.xminline = ax.axvline(self.xmin, color='black', linestyle='--', marker='o', visible=False) self.xmaxline = ax.axvline(self.xmax, color='black', linestyle='--', marker='o', visible=False) self.xminlinedrag = DraggableVLine(self.xminline, DragType.horizontal, self.change_xmin, self) self.xmaxlinedrag = DraggableVLine(self.xmaxline, DragType.horizontal, self.change_xmax, self) self.is_xrange_visible = False # YRANGE for FIT self.ymin = -np.inf self.ymax = np.inf self.yrange = ax.axhspan(self.ymin, self.ymax, facecolor='pink', alpha=0.3, visible=False) self.yminline = ax.axhline(self.ymin, color='black', linestyle='--', marker='o', visible=False) self.ymaxline = ax.axhline(self.ymax, color='black', linestyle='--', marker='o', visible=False) self.yminlinedrag = DraggableHLine(self.yminline, DragType.vertical, self.change_ymin, self) self.ymaxlinedrag = DraggableHLine(self.ymaxline, DragType.vertical, self.change_ymax, self) self.is_yrange_visible = False # Pre-create as many tables as files in the dataset for f in parent_dataset.files: self.tables[f.file_name_short] = DataTable( axarr, "TH-" + f.file_name_short) #initiallize theory table: important for 'single_file' theories ft = f.data_table tt = self.tables[f.file_name_short] tt.num_columns = ft.num_columns tt.num_rows = ft.num_rows tt.data = np.zeros((tt.num_rows, tt.num_columns)) self.do_cite("") if CmdBase.mode == CmdMode.GUI: self.print_signal.connect( self.print_qtextbox ) # Asynchronous print when using multithread # flag for requesting end of computations self.stop_theory_flag = False