def update_output(clicks, x_axis, y_axis, start_date, end_date, nbins_x, nbins_y): """ Return an updated heatmap based on the changes in the configuration. Keyword arguments: clicks -- Number of clicks on the apply button x-axis -- The column used for the x-axis y-axis -- The column used for the y-axis start_date -- String from the date picker representing the start date end_date -- String from the date picker representing the end date nbins_x -- Number of bins used for the x-axis nbins_y -- Number of bins used for the y-axis """ if clicks is None: raise PreventUpdate start_date = get_datetime_from_str(start_date) end_date = get_datetime_from_str(end_date) + timedelta(days=1) session_id = session.get_session_id() eq_data = earthquake_data.get_earthquake_data(session_id) filtered_data = eq_data.filter_by_dates(start_date, end_date) x_axis = filtered_data.data[x_axis] y_axis = filtered_data.data[y_axis] z, xbins, ybins = filtered_data.get_weight_matrix( x_axis.name, y_axis.name, nbins_x, nbins_y ) return heatmap.get_component(z, xbins, ybins)
def get_component(session_id): """Fetch catalog data from cache and return it as a dash DataTable. Keywords arguments: session_id -- ID of the current session """ eq_data = earthquake_data.get_earthquake_data(session_id) if eq_data is not None and eq_data.data.shape != (0, 0): return dcc.Loading(dash_table.DataTable( data=eq_data.data.to_dict('records'), columns=[ {'name': i, 'id': i} for i in eq_data.data.columns ], style_table={ 'overflow': 'auto', 'padding': '1em' }, page_action='native', page_current=0, page_size=20, sort_action='native', sort_mode='single', sort_by=[], filter_action='native' )) return 'No uploaded data found'
def update_output(contents, filename, session_id, use_sample_data=False): """Return a success or an error message depending on the success of parsing. Keyword arguments: contents -- The contents of the uploaded file as a binary string filename -- Name of the uploaded file session_id -- ID of the current session use_sample_data -- Whether the sample data set should be used """ if use_sample_data: filename = SAMPLE_DATA_FILENAME if contents is not None or use_sample_data: try: dataparser.parse_contents( contents, filename, session_id, use_sample_data ) eq_data = earthquake_data.get_earthquake_data(session_id) return dbc.Alert( """File {} uploaded successfully, {} rows. Please select a tool from the menu to inspect the data. """.format(filename, eq_data.data.shape[0]), color='success' ) except Exception as ex: print('Uploader:', ex) return html.Div([ dbc.Alert( 'The file could not be parsed, please try another one', color='danger' ) ])
def update_map(slider_value, apply_clicks, session_id, start_date, end_date, timestep_value, timestep_seconds): """Update the map based on the slider position and the configuration. This is a callback function invoked by changes to either the time slider or the configuration. Keyword arguments: slider_value -- The value of the current slider position. Between 0 and steps-1. apply_clicks -- The number of clicks on the apply button. session_id -- ID of the current session start_date -- String from the date picker representing the start date end_date -- String from the date picker representing the end date timestep_value -- The time step in some time unit. Earthquakes that happened within the time window of this size are shown. timestep_seconds -- The number of seconds the selected time unit is equal to """ timestep = timestep_seconds * timestep_value start_date = get_datetime_from_str(start_date) end_date = get_datetime_from_str(end_date) eq_data = earthquake_data.get_earthquake_data(session_id) filtered_data = filter_data(eq_data, start_date, timestep, slider_value) return quake_map.get_component(filtered_data)
def get_layout(session_id): """Return map layout with interactive map, time slider and configuration. This method draws the earthquakes that happened during the first week of the data. To update the map, use method `update_map`. Keyword arguments: session_id -- ID of the current session """ eq_data = earthquake_data.get_earthquake_data(session_id) if eq_data.data.shape[0] != 0: start_date, end_date = eq_data.get_daterange() start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0) default_end_date = start_date + eq_data.get_default_timedelta() filtered_data = filter_data(eq_data, start_date, DEFAULT_TIMESTEP, 0) templates = eq_data.filter_by_dates(start_date, end_date).get_templateids() sizes = get_sizes(filtered_data.data) opacities = get_opacities(filtered_data.get_datetimes()) california_data = is_california_data(eq_data.catalog_type) return html.Div([ dbc.Row([ dbc.Col([ dbc.Row( html.Div(id='map-wrapper', children=[ quake_map.get_component( filtered_data, sizes, opacities) ])), dbc.Row( html.Div(id='slider-wrapper', children=[ time_slider.get_component( start_date, default_end_date + timedelta(days=1), DEFAULT_TIMESTEP) ])) ]), dbc.Col( map_config.get_component( start_date, end_date, default_end_date, filtered_data.data.select_dtypes( include='number').columns, california_data, templates)) ]), html.Div(id='current_slider_time', style={'display': 'none'}) ]) return 'No uploaded data found'
def update_output(session_id, time_frame, xaxis, yaxis, color, size): # TODO: Get these from user configuration min_time = datetime(2008, 1, 1, 0, 4) max_time = datetime(2008, 1, 9, 5) eq_data = earthquake_data.get_earthquake_data(session_id) x_axis = eq_data.get_datetimes()[:1000] y_axis = eq_data.get_depths()[:1000] color = 'red' size = eq_data.get_magnitudes()[:1000] return scatterplot.update_output(session_id, min_time, max_time, x_axis, y_axis, color, size)
def get_layout(session_id): """Return the layout for a scatter plot and its configurations. Keyword arguments: session_id -- ID of the current session """ eq_data = earthquake_data.get_earthquake_data(session_id) if eq_data is None or eq_data.data.shape == (0, 0): return 'No uploaded data found' start_date, end_date = eq_data.get_daterange() start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0) default_end_date = start_date + eq_data.get_default_timedelta() filtered_data = eq_data.filter_by_dates( start_date, default_end_date + timedelta(days=1)) x_axis = filtered_data.get_datetimes() y_axis = filtered_data.get_depths() if CatalogTypes(filtered_data.catalog_type) == CatalogTypes.OTA_EXT: y_axis = -y_axis event_ids = filtered_data.get_eventids() color = 'red' default_size_column = eq_data.get_column_params( eq_data.get_magnitudes().name) sizes = get_sizes(filtered_data.data, default_size_column, is_map=False) size_data = filtered_data.data[default_size_column[0]] return dcc.Loading( html.Div([ dbc.Row([ dbc.Col( dcc.Loading(id='scatter-plot-loading', className='plot_sidebar_open', children=html.Div( id='scatter-plot', className='plot_sidebar_open', children=scatterplot.get_component( x_axis, y_axis, event_ids, color, sizes, size_data)))), dbc.Col( scatterplot_config.get_component( start_date, end_date, default_end_date, filtered_data.data.columns, filtered_data.data.select_dtypes(include='number'), x_axis.name, y_axis.name, default_size_column[0])) ]) ]))
def get_layout(session_id): eq_data = earthquake_data.get_earthquake_data(session_id) if eq_data is None or eq_data.data.shape == (0, 0): return 'No uploaded data found' # TODO: Get these from user configuration min_time = datetime(2008, 1, 1, 0, 4) max_time = datetime(2008, 1, 9, 5) x_axis = eq_data.get_datetimes()[:1000] y_axis = eq_data.get_depths()[:1000] color = 'red' size = eq_data.get_magnitudes()[:1000] return scatterplot.get_component(session_id, min_time, max_time, x_axis, y_axis, color, size)
def get_table(session_id): """Fetch catalog data from cache and return it as a dash DataTable. Keywords arguments: session_id -- ID of the current session """ eq_data = earthquake_data.get_earthquake_data(session_id) if eq_data is not None and eq_data.data.shape != (0, 0): return dash_table.DataTable(data=eq_data.data[:100].to_dict('records'), columns=[{ 'name': i, 'id': i } for i in eq_data.data.columns], style_table={ 'overflow': 'auto', 'padding': '1em' })
def update_template_options(start_date, end_date): """Update the list of template IDs to choose from to include all template IDs occurring at least once in the selected time period. Keyword arguments: start_date -- String from the date picker representing the start date end_date -- String from the date picker representing the end date """ session_id = session.get_session_id() eq_data = earthquake_data.get_earthquake_data(session_id) start_date = get_datetime_from_str(start_date) end_date = get_datetime_from_str(end_date) + timedelta(days=1) templates = eq_data.filter_by_dates(start_date, end_date).get_templateids() if templates is None: templates = [] return [{'label': template, 'value': template} for template in templates]
def get_layout(session_id): """ Return the layout for a heatmap and its configurations. Keyword arguments: session_id -- ID of the current session """ eq_data = earthquake_data.get_earthquake_data(session_id) if eq_data is None or eq_data.data.shape == (0, 0): return 'No uploaded data found' start_date, end_date = eq_data.get_daterange() filtered_data = eq_data.filter_by_dates(start_date, end_date) default_x = filtered_data.get_magnitudes() default_y = filtered_data.get_depths() default_nbins_x = 40 default_nbins_y = 40 z, xbins, ybins = filtered_data.get_weight_matrix( default_x.name, default_y.name, default_nbins_x, default_nbins_y ) return html.Div([ dbc.Row([ dbc.Col( html.Div( id='heatmap', className='plot_sidebar_open', children=heatmap.get_component( z, xbins, ybins) ) ), dbc.Col(heatmap_config.get_component( start_date, end_date, eq_data.data.select_dtypes( include=['number', 'datetime']).columns, default_x.name, default_y.name, default_nbins_x, default_nbins_y)) ]) ])
def update_output(clicks, start_date, end_date, x_axis, y_axis, size_column, color_column): """Return an updated scatter plot based on changes in the configuration. Keyword arguments: clicks -- Number of clicks on the apply button start_date -- String from the date picker representing the start date end_date -- String from the date picker representing the end date x_axis -- Name of the column to use for x-axis y_axis -- Name of the column to use for y-axis size_column -- The column for computing the size of each data point color_column -- The column for computing the color of each data point """ if clicks is None: raise PreventUpdate start_date = get_datetime_from_str(start_date) end_date = get_datetime_from_str(end_date) + timedelta(days=1) session_id = session.get_session_id() eq_data = earthquake_data.get_earthquake_data(session_id) filtered_data = eq_data.filter_by_dates(start_date, end_date) sizes = get_sizes(filtered_data.data, eq_data.get_column_params(size_column), False) size_data = None if size_column is not None: size_data = filtered_data.data[size_column] event_ids = filtered_data.get_eventids() if color_column is None: colors = 'red' else: colors = filtered_data.data[color_column] x_axis = filtered_data.data[x_axis] y_axis = filtered_data.data[y_axis] return scatterplot.get_component(x_axis, y_axis, event_ids, colors, sizes, size_data)
def update_output(clicks, column, nbins, start_date, end_date): """ Return an updated histogram based on the changes in the configuration. Keyword arguments: clicks -- Number of clicks on the apply button column -- Name of the column to use for the histogram nbins -- Maximum number of bins used start_date -- String from the date picker representing the start date end_date -- String from the date picker representing the end date """ if clicks is None: raise PreventUpdate start_date = get_datetime_from_str(start_date) end_date = get_datetime_from_str(end_date) + timedelta(days=1) session_id = session.get_session_id() eq_data = earthquake_data.get_earthquake_data(session_id) filtered_data = eq_data.filter_by_dates(start_date, end_date) column = filtered_data.data[column] return histogram.get_component(column, nbins)
def get_layout(session_id): """Return map layout with interactive map, time slider and configuration. This method draws the earthquakes that happened during the first week of the data. To update the map, use method `update_map`. Keyword arguments: session_id -- ID of the current session """ eq_data = earthquake_data.get_earthquake_data(session_id) if eq_data.data.shape[0] != 0: start_date, end_date = eq_data.get_daterange() default_end_date = start_date + timedelta(weeks=1) filtered_data = filter_data(eq_data, start_date, DEFAULT_TIMESTEP, 0) return html.Div([ dbc.Row([ dbc.Col( html.Div(id='map-wrapper', children=[quake_map.get_component(filtered_data) ])), dbc.Col( map_config.get_component(start_date, end_date, default_end_date)) ]), dbc.Row([ dbc.Col( html.Div(id='slider-wrapper', children=[ time_slider.get_component( start_date, default_end_date, DEFAULT_TIMESTEP) ])) ]) ]) return 'No uploaded data found'
def get_layout(session_id): """ Return the layout for a histogram and its configurations. Keyword arguments: session_id -- ID of the current session """ eq_data = earthquake_data.get_earthquake_data(session_id) if eq_data is None or eq_data.data.shape == (0, 0): return 'No uploaded data found' start_date, end_date = eq_data.get_daterange() default_end_date = start_date + eq_data.get_default_timedelta() filtered_data = eq_data.filter_by_dates( start_date, default_end_date + timedelta(days=1)) default_column = filtered_data.get_magnitudes() default_nbins = 10 return dcc.Loading( html.Div([ dbc.Row([ dbc.Col( dcc.Loading(id='histogram-loading', className='plot_sidebar_open', children=html.Div( id='histogram', className='plot_sidebar_open', children=histogram.get_component( default_column, default_nbins)))), dbc.Col( histogram_config.get_component(eq_data.data.columns, start_date, end_date, default_end_date, default_column.name)) ]) ]))
def get_data(session_id): eq_data = earthquake_data.get_earthquake_data(session_id) return eq_data