def meta_data_text(clicked,sites_open,environment, region, year_min, year_max): species = 'Ozone' if year_min and year_max: sites_df = LoadData.Get_Species_Sites(species,environment,region,sites_open, year_min,year_max) table = dash_table.DataTable( id = 'o3_site_table', columns = [{"name": i, "id": i} for i in sites_df.columns], data= sites_df.to_dict('records'), sort_action = 'native', style_table={'maxHeight':288, 'overflowX': 'scroll'}, style_header={ 'textAlign': 'center', 'backgroundColor': 'white', 'fontWeight': 'bold'}, style_cell={'textAlign': 'left'}, style_as_list_view=True, # row_selectable="multi", ) count_estimate = str(LoadData.Estimate_data_count(species, sites_df,year_min, year_max)) if len(count_estimate) > 6: message = 'There are approximately {} million data points. This will take a while to load and process.'.format(count_estimate[:-6]) elif len(count_estimate) <= 6 and len(count_estimate) > 4: message = 'Approximately {} data points found'.format(int(count_estimate) - int(count_estimate)%1000) else: message = 'Approximately {} data points found'.format(count_estimate) output = html.Div(children = [html.P(message, style = {'color':'red'}), html.Br(),table]) else: output = html.P('Select years to analyse') return output
def GetData(sites): """ This function loops gathers all the analysis modules needed and imports them. Function IN: parameters (OPTIONAL, LIST): Fucntion OUT: argout: Description of what the fuction returns if any """ # Needs fucntion to specify the filenames and how the data should # be opened. For now lets just keep it simple with out Heathfield data # If there are no paramters given (and there should be) # then use some sample data # Get dataframe from LoadData.FromCSV. Leaving the input blank will get # the Heathfield data. if 'Heathfield' in sites: df = LoadData.FromCSV() df = TidyData.DateClean_Heathfeild(df) elif 'Edinburgh' in sites: df = LoadData.Edinburgh_Data() df.set_index('Date and Time', inplace=True) # Use the DateClean function to make the date into a datetime format # Drop last line as this is usually bogus data return df[:-1]
def Site_Summary(site_name, species): site_object = LoadData.get_site_info_object(site_name) site_variables_list = LoadData.Get_Site_Variables(site_name) for v in site_variables_list: if 'modelled' in v.lower().split(): site_variables_list.remove(v) if 'temperature' in v.lower().split(): site_variables_list.remove(v) if 'pressure' in v.lower().split(): site_variables_list.remove(v) if site_object.environment_type.lower()[0] in ['a', 'e', 'i', 'o', 'u']: prefix = 'an' else: prefix = 'a' summary = html.Div( id='site_summary', children=[ html.Br(), html.P('%s is %s %s site in the %s region, opened in %s.' % (site_name, prefix, site_object.environment_type.lower(), site_object.region, site_object.date_open.year)), html.Br(), html.P('This is a %s site measuring the following species:' % site_object.site_type), html.Ul([html.Li(x) for x in site_variables_list]) ]) return summary
def Site_Week_Summary(site_name, species): df = LoadData.get_recent_site_data(site_name, species, days_ago=7) plot = [ go.Scatter(x=df.index, y=df.Concentration.values, mode='lines', name=species) ] plot_title = '%s at %s between %s and %s' % ( species, site_name, df.index[0].date(), df.index[-1].date()) # Find the unit for the species unit = LoadData.Get_Unit('AURN', species) ytitle = '%s (%s)' % (species, unit) layout = go.Layout( title=plot_title, xaxis=dict(title='Date'), yaxis=dict(title=ytitle), images=[ dict(source="assets/UoE_Geosciences_2_colour.jpg", xref="paper", yref="paper", x=.6, y=0.95, sizex=0.25, sizey=0.25, xanchor="right", yanchor="bottom"), dict(source="assets/ukri-nerc-logo-600x160.png", xref="paper", yref="paper", x=0.83, y=0.95, sizex=0.2, sizey=0.2, xanchor="right", yanchor="bottom"), dict(source="assets/DEFRA-logo.png", xref="paper", yref="paper", x=1, y=0.95, sizex=0.13, sizey=0.13, xanchor="right", yanchor="bottom"), ], ) plot = dcc.Graph(id='map_site_timeseries', figure={ 'data': plot, 'layout': layout }) return plot
def comparison_plot_renderer(data, variable_options, site_choice, DataResample, start_date, end_date, medianswitch, title, xtitle, ytitle, label_format): if not data: return '' data = data.split(',') # Find min year for site start_year, end_year = LoadData.get_site_year_range_db(data[1]) df = load_station_data(data[0], data[1], [start_year, int(data[3])], data[4:]) if not isinstance(df, pd.DataFrame): return '' variable_options = data[4:] from dataplot.DataTools.AnalysisTools import ComparisonPlots # if comparison_tabs == 'week_comp': return ComparisonPlots.CompareWeeks(df, variable_options=variable_options, site_choice=site_choice, DataResample=DataResample, start_date=start_date, end_date=end_date, show_median=medianswitch, title=title, xtitle=xtitle, ytitle=ytitle, label_format=label_format)
def varaible_list(site): site_vars = LoadData.get_site_variables_db(site) var_options = [{ 'label': i.replace('<sub>', '').replace('</sub>', ''), 'value': i } for i in site_vars] return var_options, False
def get_site_minimum_year(site): start_year, end_year = LoadData.get_site_year_range_db(site) options = [{ 'label': i, 'value': i } for i in range(start_year, end_year + 1)] return options
def load_station_data(site_type, sites, years, variables): if sites: if site_type == 'DEFRA AURN': # df = LoadData.Get_AURN_data( sites, years, variables) df = LoadData.Get_One_Site_Data(sites, years, variables) else: print("Don't have any other data yet") else: df = 0 return df
def get_colourbychoices(data, value): if not data: return '' data = data.split(',') df = load_station_data(data[0], data[1], [int(data[2]), int(data[3])], data[4:]) if not isinstance(df, pd.DataFrame): return '' variable_list = LoadData.get_site_variables_db(data[1]) var_options = [{'label': i, 'value': i} for i in variable_list] return var_options
def site_info_message(site_info_string): ## Currently only works with one site chosen info = site_info_string.split(',') site_type, sites, min_year, max_year = info[0], info[1], str(info[2]), str( info[3]) site_info = LoadData.get_site_info_object(sites) env_type = site_info.environment_type gov_region = site_info.region message = "Plotting data for the %s site %s between %s and %s, which is a %s site in %s.\n" % ( site_type, sites, min_year, max_year, env_type, gov_region) return message
def Fill_Year_DEFRA_Data(year): ## This will be a module to fill up the db with the past values ## Likely/hopefully only need this the once. all_sites_query = site_info.objects.all() ## Need to prioritise input as this takes an absolute age. for site in all_sites_query: site_name = site.site_name site_code = site.site_code site_open = site.site_open date_open = site.date_open date_closed = site.date_closed # This skips sites that have already been added to the database ### THIS IS NOT A SMART WAY OF DOING THIS BUT IS A TEMP BODGE if measurement_data.objects.filter(date_and_time__year=year).filter( site_id=site_info.objects.filter( site_name=site_name)).exists(): continue # Don't include sites that are just a quick PM10 site # Only inlcudes Brighton Roadside PM10 & Northampton PM10 if 'PM10' in site_name: continue if date_open.year > year: continue # Load in dataframe - could be a memory issue here with the # site open the longest if site_open: date_closed = dt.now() # For the time being only get 2018 data if site_open: print('Getting data for %s: %d - %d (%s)' % (site_name, date_open.year, date_closed.year, site_code)) df = LoadData.Get_AURN_data(site_name, [year, year], drop_status_and_units=False) DEFRA_AURN_data_to_db(df, site_code) print('Submitted to database')
def Get_Latest_AURN_Data(site_name, year): # Just add the latest data to the database. This relies on all variables # in a site being updated at the same time. Which I think is correct. df = LoadData.Get_AURN_data(site_name, [year, year], drop_status_and_units=False) # Get the site code - I need a cleaner way of doing this... filename = 'dataplot/InfoFiles/DEFRA_AURN_sites_info.csv' sites = pd.read_csv(filename) site_code = sites['Site Code'].loc[sites['Site Name'] == site_name] site_code = site_code.values[0] # Query the site info based on the site code site_id = site_info.objects.get(site_code=site_code) # Get the latest date and time in the database for a given site site_measurements = measurement_data.objects.filter(site_id=site_id) most_recent_date = site_measurements.latest('date_and_time').date_and_time trimmed_df = df.loc[df.index > most_recent_date] DEFRA_AURN_data_to_db(trimmed_df, site_code)
def fill_maximum_year(site): start_year, end_year = LoadData.get_site_year_range_db(site) return end_year
def Update_DEFRA_Data(site_name): ## Find where the database still has unverified data in and see ## if the DEFRA site has been updated. site_id = site_info.objects.filter(site_code=site_code) site_data = measurement_info.objects.filter(site_id=site_id) # Only get data that hasn't been verified but isn't unknown queried_data = site_data.exclude(verified='V').exclude(verified='U') # Into a dataframe for ease of use current_data = pd.DataFrame.from_records(queried_data.values( 'date_and_time', 'value', 'verified'), index='date_and_time') # Get the years that still have unverified data years = current_data.index.year.unique().values # If the range of years is more than two then its unlikely it'll # ever be verifed so ignore it if len(years) > 2: years[years[0], years[1]] if max(years) < dt.now().year - 1: # If the maximum year is more than a year ago then don't bother doing anything pass if min(years) < dt.now().year - 1: # If the minimum year is more than a year ago then only use recent year years = [years[0]] # Load in the data again new_df = LoadData.Get_AURN_data(site_name, [years[0], years[-1]], drop_status_and_units=False) pollutant_cols = [] status_cols = [] unit_cols = [] for c in df.columns: if c.split('.')[0].lower() == 'status': status_cols.append(c) elif c.split('.')[0].lower() == 'unit': unit_cols.append(c) else: pollutant_cols.append(c) # Need to now update the database for i, col in enumerate(pollutant_cols): # Get the relevant status for the measurement status_col = df[status_cols[i]] status_col.replace('R', 'V', inplace=True) # Fill the nan values with 'U' for unknown - although this will rarely be # a problem as all nan status have a matching nan measurement status_col.dropna(inplace=True) status_col.fillna('U') chemical_formula = Get_Chemical_Formula(col) measurement_name = 'DEFRA_AURN_%s' % chemical_formula temp_col = new_df[col] temp_col.dropna(inplace=True) for x in range(len(temp_col)): # Filter the data by measurement_id, site and time # There should only be one data entry for each of these data_entry = measurement_data.objects.filter( measurement_id=measurement_name).filter( site_id=site_id).filter(date_and_time=temp_col.index[x])[0] if data_entry.verified == 'V': continue elif status_col[x] in ['U', 'N']: continue elif status_col[x] == data_entry.verified: continue else: data_entry.verified = status_col[x] data_entry.value = temp_col[x] data_entry.save()
def main_site_map(environment, region, species): mapbox_access_token = 'pk.eyJ1IjoiZG91Z2ZpbmNoIiwiYSI6ImNqZHhjYnpqeDBteDAyd3FsZXM4ZGdqdTAifQ.xLS22vmqzVYR0SAEDWdLpQ' # site_df = LoadData.get_all_site_info(environment, region) # random_sizes = np.random.randint(20, size = len(site_df)) # For the time being lets just set variable and time date = datetime(2017, 12, 14, 12) variable = species vals_df = LoadData.all_sites_one_var_data(date, variable, region, environment) unit = LoadData.Get_Unit('AURN', species) size_scale = 1 variable_vals = vals_df.value * size_scale hover_text = [ '%s: %.3f %s' % (vals_df.index.tolist()[x], variable_vals[x], unit) for x in range(len(variable_vals)) ] data = [ go.Scattermapbox( lat=vals_df.latitude.tolist(), lon=vals_df.longitude.tolist(), mode='markers', # customdata = final_df.index.tolist(), marker=go.scattermapbox.Marker( color=variable_vals.tolist(), colorscale='Viridis', showscale=True, size=14, colorbar=dict(title=species + ' ' + unit, titleside='right'), # opacity = 0.85, # color = chosen_hour, # cmax = last_day.max(axis = 1).max(), # colorbar = {'title':var_choice} ), text=hover_text, ) ] layout = go.Layout( showlegend=False, autosize=True, # showlegend = True, height=750, hovermode='closest', margin={ 'l': 0.2, 'r': 0.2, 't': 0.2, 'b': 0.2 }, mapbox=dict(accesstoken=mapbox_access_token, bearing=0, center=dict(lat=55, lon=-3.2), pitch=0, zoom=4.5), ) fig = dict(data=data, layout=layout) return len(vals_df), fig
continue site_year_open = site.date_open.year site_year_closed = site.date_closed if site_year_closed: site_year_closed = site_year_closed.year else: site_year_closed = dt.now().year site_year_open = 2020 for year in range(site_year_open, site_year_closed + 1): print('Processing {} data for site {}'.format(year, site.site_name)) try: df = LoadData.Get_AURN_data(site.site_name, [year, year], drop_status_and_units=False) except (HTTPError, URLError) as e: print('No web data for {} {}'.format(site.site_name, year)) continue pollutant_cols = [] status_cols = [] unit_cols = [] for c in df.columns: if c.split('.')[0].lower() == 'status': status_cols.append(c) elif c.split('.')[0].lower() == 'unit': unit_cols.append(c) else: pollutant_cols.append(c)
def site_count_data(species,split_by): site_count_df = LoadData.Yearly_Site_Count(species,split_by = split_by) return site_count_df
def uk_ozone(): ## Get the sites availble from the DEFRA AURN network site_regions = LoadData.AURN_regions() region_choices = ['All'] + site_regions region_options = [{'label': i.strip(), 'value': i.strip()} for i in region_choices] site_envs = LoadData.AURN_environment_types() env_choices = ['All'] + site_envs env_options = [{'label': i.strip(), 'value': i.strip()} for i in env_choices] #### Start the page layout page_layout = html.Div(id = 'full_page_container', children = ### The first items are for the common attributes (ie site) [ html.Div(className = 'page-header', children = [ html.Div(id = 'home-logo-holder', children = [html.A(id = 'home-logo', href="/")]), html.Div(id = 'page-header-holder', children = [html.A('UK Atmosphere',id = "page-header-text", href = "/")]), ]), html.Div(className = 'page-body',children = [ html.H3('Analysis for UK ozone from DEFRA AURN sites.'), html.Br(), html.Label('Select a region:'), dcc.Dropdown(id = 'o3_region_choice', multi = True, options = region_options, value = 'All'), html.Br(), html.Label('Select an environment type:'), dcc.Dropdown(id = 'o3_env_choice', multi = True, options = env_options, value = 'All'), html.Br(), html.Label('Select a range of years:'), dcc.Dropdown( id = 'o3_minimum_year', placeholder = 'Select start year...', value = 2000 ), html.P('To'), dcc.Dropdown( id = 'o3_maximum_year', placeholder = 'Select end year...', ), html.Br(), daq.BooleanSwitch(id = 'o3_site_open', on = False, label = 'Only use sites currently open', labelPosition = 'top'), html.Br(), html.Button('Find Ozone Data', id = 'o3_go_button'), html.Br(), html.Br(), dcc.Loading(id="o3_meta_data_load", children=[ html.Div(id = 'o3_meta_data_text')],type="dot"), html.Hr(), html.Button('Load Ozone Data', id = 'o3_load_button'),# disabled = True), html.Hr(), html.Div(id = 'o3_data_values_holder',children = [ daq.GraduatedBar(id = 'o3_load_bar', size = 500, # max = 100, value = 0, showCurrentValue=True), html.Div(id = 'o3_dataframe-holder'), dcc.Interval(id = 'Interval',interval = 500), dcc.Store(id = 'load_id_store'), html.Div(id = 'loaded_sites2'), html.Div(id = 'tester_output')]), ### Create a div to place the dataframe while its being used but not ### viewable by the user. Make data Json - very slow when being read html.Div(id = 'o3_metadata-holder', style = {'display': 'none'}), ### ************************** Site Count *************************** html.Div(id = 'O3_SiteCountHolder', className = 'plot_holder', children = [ dcc.Loading(id="loading-sitecount", children=[ html.Div(id = 'O3_SiteCountPlot')],type="dot", className = 'main_plot'), html.Div(id = 'O3_SiteCountTools', className = 'plot_tools', children = [ html.H3('Site Count Plot Tools:'), html.Br(), html.Label('Plot Title'), dcc.Input( id = 'O3_SiteCountTitle', placeholder = 'Enter Title', value = ''), html.Br(), html.Br(), html.Label('Spliy by:'), dcc.RadioItems(id = 'Site_Count_Split', options = [{'label': i, 'value': i} for i in ['Total', 'Environment Type','Region',]], value = 'Total'), html.Br(), ]), ]), html.Hr(), html.Br(), html.Label(), dcc.RadioItems(id = 'O3_Env_or_Regions', options = [{'label': i, 'value': i} for i in ['Environment Type', 'Region']], value = 'Environment Type', labelStyle={'display': 'inline-block'}), ### Each placeholder for plots and their individual controls go below ### ************************** TimeSeries *************************** html.Div(id = 'O3_TimeSeriesHolder', className = 'plot_holder', children = [ html.Div(id = 'O3_TimeSeries', className = 'main_plot'), html.Div(id = 'TimeSeriesTools', className = 'plot_tools', children = [ html.H3('Time Series Tools:'), html.Br(), html.Label('Plot Title'), dcc.Input( id = 'O3_TimeSeriesTitle', placeholder = 'Enter Title', value = ''), html.Br(), html.Label('X Axis Label'), dcc.Input( id = 'O3_TimeSeriesXTitle', placeholder = 'Enter X axis label', value = 'Year'), html.Br(), html.Label('Y Axis Label'), dcc.Input( id = 'O3_TimeSeriesYTitle', placeholder = 'Enter Y axis label', value = ''), html.Br(), dcc.RadioItems(id = 'O3_TimeSeriesLabelFormat', options = [{'label': i, 'value': i} for i in ['Variable Name', 'Chemical Formula',]], value = 'Variable Name'), html.Br(), html.Label('Value Type'), dcc.RadioItems(id = 'O3_ValueType', options = [{'label': i, 'value': i} for i in ['Annual Mean', 'Annual Maximum','Annual Minimum']], value = 'Annual Mean'), html.Br(), html.Label('Line Type'), dcc.RadioItems(id = 'O3_TimeSeriesLineOrScatter', options = [{'label': i, 'value': i} for i in ['Scatter', 'Line', 'Line & Scatter']], value = 'Line & Scatter', ), ]) ]), html.Hr(), ### ********************* Trend Table ********************************* html.Div(id = 'o3_trend_table'), html.Hr(), ### ********************* Gamma plot ********************************* html.Div(id = 'O3_Gamma_Plot_Holder', className = 'plot_holder', children = [ html.Div(id = 'O3_Gamma_Plot', className = 'main_plot'), html.Div(id = 'O3_GammaTools', className = 'plot_tools', children = [ html.H3('Gamma Plot Tools:'), html.Br(), html.Label('Plot Title'), dcc.Input( id = 'O3_GammaTitle', placeholder = 'Enter Title', value = ''), html.Br(), ]), ]), html.Hr(), ### ********************* YearlyExceed ********************************* html.Div(id = 'O3_YearlyExceedHolder', className = 'plot_holder', children = [ html.Div(id = 'O3_YearlyExceed',className = 'main_plot'), # html.Div(id = 'Correlation', className = 'main_plot'), html.Div(id = 'O3_YearlyExceedTools', className = 'plot_tools', children = [ html.H3('Yearly Exceedance Tools:'), html.Br(), html.Label('Plot Title'), dcc.Input( id = 'O3_YearlyExceedTitle', placeholder = 'Enter Title', value = ''), html.Br(), ]), ]), html.Hr(), ### ********************* Yearly siteExceed ********************************* html.Div(id = 'O3_YearlySiteExceedHolder', className = 'plot_holder', children = [ html.Div(id = 'O3_YearlySiteExceed',className = 'main_plot'), # html.Div(id = 'Correlation', className = 'main_plot'), html.Div(id = 'O3_YearlySiteExceedTools', className = 'plot_tools', children = [ html.H3('Yearly Site Exceedance Tools:'), html.Br(), html.Label('Plot Title'), dcc.Input( id = 'O3_YearlySiteExceedTitle', placeholder = 'Enter Title', value = ''), html.Br(), ]), ]), html.Hr(), ### ********************* MonthlyExceed ********************************* html.Div(id = 'O3_MonthlyExceedHolder', className = 'plot_holder', children = [ html.Div(id = 'O3_MonthlyExceed', className = 'main_plot'), # html.Div(id = 'DiurnalCycle', className = 'main_plot'), html.Div(id = 'O3_MonthlyExceedTools', className = 'plot_tools', children = [ html.H3('Monthly Exceedance Tools:'), html.Br(), html.Label('Plot Title'), dcc.Input( id = 'O3_MonthlyExceedTitle', placeholder = 'Enter Title', value = ''), html.Br(), ]), ]), html.Hr(), ### ******************* WeeklyExceed *************************** html.Div(id = 'O3_WeeklyExceedHolder', className = 'plot_holder', children = [ html.Div(id = 'O3_WeeklyExceed',className = 'main_plot'), # html.Div(id = 'HourlyBoxplots', className = 'main_plot'), html.Div(id = 'O3_WeeklyExceedTools', className = 'plot_tools', children = [ html.H3('Weekly Exceedance Tools:'), html.Br(), html.Label('Plot Title'), dcc.Input( id = 'O3_WeeklyExceedTitle', placeholder = 'Enter Title', value = ''), html.Br(), ]), ]), html.Br(), html.Hr(), ### ********************* ExceedMap ********************************* html.Div(id = 'ExceedMapHolder', className = 'plot_holder', children = [ html.Div(id = 'O3_ExceedMap', className = 'main_plot'), # html.Div(id = 'WeeklyCycle', className = 'main_plot'), html.Div(id = 'O3_ExceedMapTools', className = 'plot_tools', children = [ html.H3('Exceedance Map Tools:'), html.Br(), html.Label('Plot Title'), dcc.Input( id = 'O3_ExceedMapTitle', placeholder = 'Enter Title', value = ''), html.Br(), ]), ]), html.Hr(), ])]) return page_layout
def get_o3_meta_data(species, environment, region, year_start, year_end): if year_start and year_end: df = LoadData.get_all_species_obvs(species, environment, region,year_start, year_end) else: df = 0 return df
def list_available_sites(site_region, env_choice, open_sites_only=True): sites = LoadData.AURN_site_list_db(site_region, env_choice) sites = list(sites) sites.sort() options = [{'label': i, 'value': i} for i in sites] return options
site_year_closed = site.date_closed if site_year_open < 2010: site_year_open = 2010 # This is when modelled met started site_year_open = 2020 if site_year_closed: site_year_closed = site_year_closed.year else: site_year_closed = dt.now().year for year in range(site_year_open, site_year_closed + 1): # print(year) site_code = site.site_code # try: df = LoadData.Get_AURN_Met_Data(site_code, year) # except (HTTPError, URLError, KeyError) as e: # continue if type(df) != pd.core.frame.DataFrame: continue all_entries = [] for var in df.columns: filters = { 'site_id': site, 'date_and_time__year': year, 'measurement_id': var_ids[var] } avail_data = measurement_data.objects.filter(**filters) if len(avail_data):
def DEFRA_map_page(): site_regions = LoadData.AURN_regions() region_choices = ['All'] + site_regions region_options = [{'label': i.strip(), 'value': i.strip()} for i in region_choices] site_envs = LoadData.AURN_environment_types() env_choices = ['All'] + site_envs env_options = [{'label': i.strip(), 'value': i.strip()} for i in env_choices] all_species = LoadData.get_all_aurn_species() species_options = [{'label': i.replace('<sub>','').replace('</sub>',''), 'value': i} for i in all_species] page_layout = html.Div(id ='full_page_container', children = [ html.Div(className = 'page-header', children = [ html.Div(id = 'home-logo-holder', children = [html.A(id = 'home-logo', href="/")]), html.Div(id = 'page-header-holder', children = [html.A('UK Atmosphere',id = "page-header-text", href = "/")]), ]), html.Div(className = 'page-body',children = [ html.H3('Map of DEFRA AURN sites'), html.Div(className = 'tool_explainer', children = [ html.P('Select a pollutant and location on the map to see recent measurements'), ]), html.Div(className = 'map_data_selection', children = [ html.Label('Select an environment:'), dcc.Dropdown(id = 'map_env_choice', multi = False, options = env_options, value = 'All'), html.Br(), html.Label('Select a region:'), dcc.Dropdown(id = 'map_region_choice', multi = False, options = region_options, value = 'All'), html.Br(), html.Label('Select a species:'), dcc.Dropdown(id = 'map_species_choice', multi = False, options = species_options, value = 'Ozone'), html.Br(), ]), # Map layout will go here html.Div(id = 'map_output_holder', children = [ html.Div(id = 'main_map_holder', children = [ html.Div(id = 'site_counter_output'), dcc.Loading(id="loading-main-map", children=[ dcc.Graph(id = 'main_map', config = {'scrollZoom': True}, )],type="dot"), ]), html.Div(id = 'site_plot_from_map', children = [ html.H4(id = 'site_name_from_map'), dcc.Tabs(id="map_tabs", value='site_sum', children=[ dcc.Tab(label='Site Summary', value='site_sum'), dcc.Tab(label='Last 7 Days', value='site_week'), dcc.Tab(label='Yearly Stats', value='yearly_stats'), ]), html.Div(id = 'map_site_info'), ]), ]) ]) ]) return page_layout