def load_filter_choice(vessel): # If nothing selected. Prevents loading empty filter options if vessel is not None: # Load Vessel Data if vessel not in dfs: dfs[vessel] = SQL().get_vessel(vessel=vessel) # Populate Options options = [{ 'label': i[0], 'value': i[0] } for i in SQL().get_filter_options()] return generate_vessel_filter(options=options)
def generate_table_container(contents=[]): children = [ html.Div([ html.H4('Database Tables', className='header-title panel-left table-col-3'), html.H4('Status', className='header-title panel-right table-col-3'), html.H4('Details', className='header-title panel-left table-col-3'), html.H4('Action', className='header-title panel-right table-col-1'), ], className='table-heading overflow-auto item-element-margin') ] status = "In Database" for item in contents: detail = 'Rows: {}'.format(SQL().get_table_rows(item)) children.append( generate_row(row_name='{}'.format(item), row_id='{}'.format(contents.index(item) + 1), status=status, details=detail, ucode=False)) return \ html.Div( id='database-tables', children=children, className='item-wrapper' )
def load_vessel_df(vessels, table): if vessels is not None: for vessel in vessels: if vessel == "All": continue if vessel not in dfs: dfs[vessel] = SQL().get_vessel(table=table, vessel=vessel)
def update_filter(value, db_table): options = [{ 'label': label2, 'value': value2 } for label2, value2 in SQL().get_attributes('{}'.format( db_table)).items()] return generate_axis_parameters(value, options)
def get_specification(dump, vessel, option, input_value): # TODO: Account for multiple specifications # Prepare data for specification option_type = SQL().get_column_datatypes(column=option, singular=True) if option_type == 'varchar': comparison = "==" # Assemble specifications conditions = [(option, comparison, input_value)] df = dfs[vessel].get_filtered(conditions=conditions) return df.to_json()
def load_vessel_field(series, db_table): if series is not None or series != u'None' or db_table is not None or db_table != u'None': vessels_in_series = SQL().get_vessel_from_series(series=series, db_table=db_table) if vessels_in_series is not None: vesselsInSeries = [{ 'label': i, 'value': i } for i in vessels_in_series] vesselsInSeries.append({'value': 'All', 'label': 'All'}) print("HELLO{}".format(vesselsInSeries)) return vesselsInSeries return
def load_series_field(dump): return [{ 'label': series, 'value': series } for series in SQL().get_all_series()]
def load_vessel_choice(dump): return [{'label': i, 'value': i} for i in SQL().get_vessels()]
def start_menu(self) -> None: incorrect_input = True while incorrect_input: self.my_view.print_menu() user_input = self.my_view.get_user_menu_option() # Press 1 to load your text file if user_input == "1": self.data = FileHandler.read_file() if self.data == FileNotFoundError: self.my_view.file_not_found_message() else: self.my_view.file_loaded_message() # Press 2 to write from plantuml text to python code elif user_input == "2": if self.data is not "": self.find_all() directory_name = FileHandler.choose_directory() if directory_name == TypeError: self.my_view.file_not_found_message() elif directory_name == "": self.my_view.exit_file_directory() elif directory_name == Exception: self.my_view.generic_error_message() else: self.write_all(directory_name) else: self.my_view.file_not_loaded_warning() # Press 3 to start command line interpreter elif user_input == "3": self.command_line_interpreter() # Press 4 to write file to data base elif user_input == "4": if self.data is not "": error_message = SQL.connect_to_db("assignment1") if error_message == PermissionError: self.my_view.user_has_no_file_permission() elif error_message == FileNotFoundError: self.my_view.file_not_found_message() elif error_message == Exception: self.my_view.generic_error_message() else: SQL.c.execute("""DROP TABLE if exists class;""") SQL.create_class_table() classes = self.get_class_names() SQL.insert_data_into_table(classes) self.my_view.database_connected_message() else: self.my_view.file_not_loaded_warning() # Press 5 to print PEP8 class file to screen from database elif user_input == "5": if self.data is not "": sql_database_table = SQL.fetch_all_class_data() if sql_database_table == PermissionError: self.my_view.user_has_no_file_permission() elif sql_database_table == FileNotFoundError: self.my_view.file_not_found_message() elif sql_database_table == TypeError: self.my_view.file_not_loaded_warning() elif sql_database_table == AttributeError: self.my_view.file_not_loaded_warning() elif sql_database_table == Exception: self.my_view.generic_error_message() else: self.my_view.read_database_file(sql_database_table) else: self.my_view.file_not_loaded_warning() # Press 6 to load text file, convert data to PEP8 python format then convert file to pickle # format in same directory elif user_input == "6": self.data = FileHandler.read_file() if self.data == FileNotFoundError: self.my_view.file_not_found_message() else: self.my_view.file_loaded_message() self.prep_pep8() pickle_status = Pickler.pickle_file(self.pep8_content) if pickle_status == PermissionError: self.my_view.user_has_no_file_permission() elif pickle_status == FileNotFoundError: self.my_view.file_not_found_message() elif pickle_status == Exception: self.my_view.generic_error_message() else: self.my_view.pickle_success_message() # Press 7 to load data from pickle file elif user_input == "7": if self.data is not "": pickle_content = Pickler.unpickle_file() if pickle_content == PermissionError: self.my_view.user_has_no_file_permission() elif pickle_content == FileNotFoundError: self.my_view.file_not_found_message() elif pickle_content == Exception: self.my_view.generic_error_message() else: self.my_view.print_my_pickle_content(pickle_content) self.my_view.file_loaded_message() else: self.my_view.file_not_loaded_warning() # Exit elif user_input == "8": incorrect_input = False self.my_view.exit_program() else: self.my_view.user_has_wrong_input()
def get_option_type(option): if option is None: return None return SQL().get_column_datatypes(column=option, singular=True)
def __upload_content(table_name, content): return SQL().excel_to_sql(table_name=table_name, excel_file=__parse_excel(content))
import pandas as pd from config.important_attributes import attributes from model.database import SQL from model.dataframe import * from controller.graph_components.regression import GraphMode sql = SQL() db_tables = sql.get_table_names() row = { u'namespace': u'dash_html_components', u'type': u'Div', u'props': { u'className': u'item-wrapper', u'n_clicks_timestamp': 1532117798075L, u'n_clicks': 1, u'children': [{ u'namespace': u'dash_html_components', u'type': u'Div', u'props': { u'className': u'table-heading overflow-auto item-element-margin', u'children': [{ u'namespace': u'dash_html_components', u'type': u'H4', u'props': { u'className': u'header-title panel-left table-col-3',
def load_series_field(dump): return [{ 'label': table, 'value': table } for table in SQL().get_table_names()]
def delete_button(click, container): if click is not None and click != u'None': table = container[0]['props']['children'] SQL().delete_table(table) return
def load_table_container(dump): return generate_table_container(SQL().get_table_names())
def test(dummy): test_sql = SQL() df = test_sql.get_table("testtable") return generate_table(df)
def load_filter_specification(option): # If nothing selected. Prevents loading empty filter options if option is not None: option_type = SQL().get_column_datatypes(column=option, singular=True) return generate_filter_input(option_type)
def load_filter(dump): return [{'label': i, 'value': i} for i in SQL().get_column_names()]
def obtain_data(dummy): sql = SQL() dff = sql.get_table(table_name) return dff.to_json()
def update_graph(filtered_df_json, value, settings, graph_mode, clusters, threshold, graphName, xLabel, yLabel, zLabel, extraMin, extraMax, seriesInput, dbTableInput, figure, vessels, *filter_settings): if figure is not None: figure['data'] = [] minSet = [] gformula = "" gsols = 0.0 gr_squared = 0.0 valuesOrigin = "" # Populate with 2D Data when X and Y set TODO: Remove hardcode + Account for 3D if value[1] is None or value[2] is None: figure['data'] = [] else: # Create the dataset for the vessels selected # Get specifications specifications = [] for i in range(1, len(filter_settings), 3): specifications.append( get_condition(filter_settings[i], filter_settings[i + 1], filter_settings[i + 2])) # Cleanup and prepare conditions conditions = [] for specification in specifications: for condition in specification: conditions.append(condition) # Obtain filtered df df = [] singleLineAll = False secondAll = False for vessel in filter_settings[0]: if vessel == "All" and 'multiline' not in settings: df = SQL().get_df_from_series(dbTableInput, seriesInput) singleLineAll = True break elif vessel == "All": secondAll = True continue df.append(dfs[vessel].get_filtered(conditions=conditions)) if len(df) == 0 and secondAll: singleLineAll = True df = SQL().get_df_from_series(dbTableInput, seriesInput) if singleLineAll: dfsDF = df else: dfsDF = pd.concat(df) # Remove any NaN values print("THIS IS VALUE: {}".format(value)) if value[0] == "2D": dfsDF = dfsDF.dropna(subset=[value[1], value[2]]) else: dfsDF = dfsDF.dropna(subset=[value[1], value[2], value[3]]) if threshold != "None": # Remove outliers NOTE: Adjust the threshold to modify how strictly filtered the data will be. So far tested 1, 1.5, 3. Strict ~ Lax mean = np.mean(dfsDF[value[1]]) stdio = np.std(dfsDF[value[1]]) print "Mean: " + str(mean) + " Std: " + str(stdio) dfsDF = dfsDF[np.abs(dfsDF[value[1]] - mean) <= (threshold * stdio)] mean = np.mean(dfsDF[value[2]]) stdio = np.std(dfsDF[value[2]]) print "Mean: " + str(mean) + " Std: " + str(stdio) dfsDF = dfsDF[np.abs(dfsDF[value[2]] - mean) <= (threshold * stdio)] if value[0] == "3D": mean = np.mean(dfsDF[value[3]]) stdio = np.std(dfsDF[value[3]]) print "Mean: " + str(mean) + " Std: " + str(stdio) dfsDF = dfsDF[np.abs(dfsDF[value[3]] - mean) <= (threshold * stdio)] # unqVessels = dfsDF['Vessel'].unique().tolist() unqVessels = filter_settings[0] print unqVessels # Set axis labels if any if xLabel == "": xName = value[1] else: xName = xLabel if yLabel == "": yName = value[2] else: yName = yLabel if graphName == "": if value[0] == "2D": gName = value[1] + " vs " + value[2] else: gName = value[1] + " vs " + value[2] + " vs " + value[3] else: gName = graphName # Multiline Logic if 'multiline' in settings: counter = 0 benchmark = 0 annotation = [] sfList = SQL().get_vessel_codes() for vessel in unqVessels: global valuesOrigin valuesOrigin = vessel print list(dfsDF) if vessel == "All": vesselRow = SQL().get_df_from_series( dbTableInput, seriesInput) if value[0] == "2D": vesselRow = vesselRow.dropna( subset=[value[1], value[2]]) else: vesselRow = vesselRow.dropna( subset=[value[1], value[2], value[3]]) else: vesselRow = dfsDF.loc[dfsDF['Vessel'] == vessel] # Clustering & Hover Data Generation hoverData = [] if 'clustering' in settings: vesselRow = k_means(value, vesselRow, clusters) vesselRow['Vessel'] = pd.Series(vessel, index=vesselRow.index) hoverData = np.c_[vesselRow[value[1]], vesselRow[value[2]]] else: # Generate the Hover Data # Iterate each row from db for key, value1 in vesselRow.iterrows(): placeholderText = "" # Iterate each column in the row for index, row in value1.items(): # Compare the value in important_attributes for col in full_attributes: if col == index: if isinstance(row, float): placeholderText += "<b>" + index + "</b>: " + str( round(row, 3)) + "<br>" else: placeholderText += "<b>" + index + "</b>: " + str( row) + "<br>" break hoverData.append(placeholderText) if 'datapoints' in settings: if value[0] == "2D": scatterPoints = go.Scatter( x=vesselRow[value[1].encode('utf8')], y=vesselRow[value[2].encode('utf8')], name=vessel + " Marker", mode='markers', marker=go.Marker(color=colorList[counter]), text=hoverData, ) else: scatterPoints = go.Scatter3d( x=vesselRow[value[1].encode('utf8')], y=vesselRow[value[2].encode('utf8')], z=vesselRow[value[3].encode('utf8')], name=vessel + " Marker", mode='markers', marker=go.Marker(color=colorList[counter]), text=hoverData, ) figure['data'].append(scatterPoints) if 'regression' in settings: if value[0] == "2D": line_data, r_squared, sols, formula = regression( vesselRow[value[1].encode('utf8')], vesselRow[value[2].encode('utf8')], graph_mode, extraMin, extraMax) # tmpLst = [] # tmpLst.append(r_squared) # tmpLst.append(sols) # tmpLst.append(formula) # gformula[vessel] = tmpLst # gformula[vessel] = [r_squared, sols, formula] global gr_squared, gsols, gformula gr_squared = r_squared gsols = sols gformula = formula eqString, supScript = generateEquationString( formula) bestFit = go.Scatter( x=line_data['x'], y=line_data['y'], name=vessel + ' Line', mode='lines', marker=go.Marker(color=colorList[counter]), ) figure['data'].append(bestFit) if benchmark == 0: benchmark = max(line_data['y']) if vessel == "All": vslCde = "All" else: vslCde = sfList[vessel] annotation.append( go.Annotation(x=min(line_data['x']) + 10, y=benchmark - counter * benchmark * 0.1, text=vslCde + ": " + eqString.format(*supScript), showarrow=False)) layout2d = go.Layout( title=gName, plot_bgcolor='rgb(229, 229, 229)', xaxis=go.XAxis( title=xName, zerolinecolor='rgb(255,255,255)', gridcolor='rgb(255,255,255)'), yaxis=dict(title=yName, zerolinecolor='rgb(255,255,255)', gridcolor='rgb(255,255,255)'), annotations=annotation, # yaxis2=dict(title='Percentage', gridcolor='blue', overlaying='y', side='right', range=[100,0]), ) figure['layout'] = layout2d else: surfacePlot, surfaceLayout = plot_3d( vesselRow[value[1].encode('utf8')], vesselRow[value[2].encode('utf8')], vesselRow[value[3].encode('utf8')], value[1], value[2], value[3]) figure['data'].append(surfacePlot) figure['layout'] = surfaceLayout counter += 1 else: # If no multiline # Clustering & Hover Data Generation hoverData = [] if 'clustering' in settings: dfsDF = k_means(value, dfsDF, clusters) hoverData = np.c_[dfsDF[value[1]], dfsDF[value[2]]] else: # Generate the Hover Data # Iterate each row from db for key, value1 in dfsDF.iterrows(): placeholderText = "" # Iterate each column in the row for index, row in value1.items(): # Compare the value in important_attributes for col in full_attributes: if col == index: if isinstance(row, float): placeholderText += "<b>" + index + "</b>: " + str( round(row, 3)) + "<br>" else: placeholderText += "<b>" + index + "</b>: " + str( row) + "<br>" break hoverData.append(placeholderText) if value[0] == "2D": # Add scatter from data set if 'datapoints' toggled if len(figure['data']) < 1: figure['data'].append({}) if 'datapoints' in settings: figure['data'][0] = go.Scatter( x=dfsDF[value[1].encode('utf8')], y=dfsDF[value[2].encode('utf8')], name='Data Marker', mode='markers', text=hoverData, ) else: figure['data'][0] = None # Add Line/Curve if 'regression' toggled if len(figure['data']) < 2: figure['data'].append({}) if 'regression' in settings: line_data, r_squared, sols, formula = regression( dfsDF[value[1].encode('utf8')], dfsDF[value[2].encode('utf8')], graph_mode, extraMin, extraMax) print "R-Squared: " + str(r_squared) print "Sum of Least Squares: " + str(sols) print "A Formula: " print formula # global gr_squared, gsols, gformula gr_squared = r_squared gsols = sols gformula = formula eqString, supScript = generateEquationString(formula) figure['data'][1] = go.Scatter( x=line_data['x'], y=line_data['y'], name='Line', mode='lines', ) annotation = go.Annotation(x=min(line_data['x']) + 10, y=max(line_data['y']), text="y=" + eqString.format(*supScript), showarrow=False) layout2d = go.Layout( title=gName, plot_bgcolor='rgb(229, 229, 229)', xaxis=go.XAxis(title=xName, zerolinecolor='rgb(255,255,255)', gridcolor='rgb(255,255,255)'), yaxis=dict(title=yName, zerolinecolor='rgb(255,255,255)', gridcolor='rgb(255,255,255)'), annotations=[annotation], # yaxis2=dict(title='Percentage', gridcolor='blue', overlaying='y', side='right', range=[100,0]), ) figure['layout'] = layout2d else: figure['data'][1] = None else: # 3D # Add scatter from data set if 'datapoints' toggled if len(figure['data']) < 1: figure['data'].append({}) if 'datapoints' in settings: figure['data'][0] = go.Scatter3d( x=dfsDF[value[1].encode('utf8')], y=dfsDF[value[2].encode('utf8')], z=dfsDF[value[3].encode('utf8')], name='Data Marker', mode='markers', text=hoverData, ) else: figure['data'][0] = None # Add Line/Curve if 'regression' toggled if len(figure['data']) < 2: figure['data'].append({}) if 'regression' in settings: surfacePlot, surfaceLayout, minimumSet = plot_3d( dfsDF[value[1].encode('utf8')], dfsDF[value[2].encode('utf8')], dfsDF[value[3].encode('utf8')], value[1], value[2], value[3]) figure['data'][1] = surfacePlot figure['layout'] = surfaceLayout minSet = minimumSet print "MINSET" print minSet else: figure['data'][1] = None figure['layout']['scene']['xaxis']['title'] = xName figure['layout']['scene']['yaxis']['title'] = yName figure['layout']['scene']['zaxis']['title'] = zName figure['layout']['title'] = gName # Clean figure data figure['data'] = [i for i in figure['data'] if i is not None] return figure return default_figure
0, # 2. Sea State 0, # 3. Swell 10, # 4. Speed 14, # 5. RPM 34, # 6. Trim 0, # 7. Power ] # Display column names with index no. def get_names(names): for name in names: print "{}. {}".format(names.index(name), name) sql = SQL() # # Read Attributes # get_names(attributes) # Get database tables db_tables = sql.get_table_names() # get_names(db_tables) # Get column names for given database column_names = sql.get_column_names(db_tables[0]) # get_names(column_names) # Insert Speed for i in range(len(col_no)): if col_no[i] > 0:
def update_filer(value, mode): if value > 0: options = [{'label': i, 'value': i} for i in SQL().get_column_names()] return generate_graph(mode, options)
def get_table(dummy): sql = SQL() df = sql.get_table('testtable') return generate_table(df)