def _save_to_dat_callback( self, graph_id, datnum_id): # FIXME: This is dat specific... should not be in here def save_to_dat(clicks, fig, datnum, save_name): if clicks and datnum and fig: dat = get_dat(datnum) fig = go.Figure(fig) if not save_name: save_name = fig.layout.title.text if not save_name: save_name = dat.Figures._generate_fig_name( fig, overwrite=False) dat.Figures.save_fig(fig, save_name, sub_group_name='Dash', overwrite=True) return True else: raise PreventUpdate app.callback(Output(f'{graph_id}_div-fake-output', 'hidden'), Input(f'{graph_id}_but-dat-save', 'n_clicks'), State(graph_id, 'figure'), State(datnum_id, 'value'), State(f'{graph_id}_inp-download-name', 'value'))(save_to_dat)
def _save_to_shared_callback(self, graph_id): def save_to_file(clicks, fig, save_name): if clicks and fig: if not save_name: save_name = dictor(fig, 'layout.title.text', NameResetter().get_resetting_fig_name()) SharedFigs().add_fig(save_name, fig) return True else: raise PreventUpdate app.callback(Output(f'{graph_id}_div-fake-output2', 'hidden'), Input(f'{graph_id}_but-shared-save', 'n_clicks'), State(graph_id, 'figure'), State(f'{graph_id}_inp-download-name', 'value'))(save_to_file)
def _download_callback(self, graph_id, file_type: str): """https://pypi.org/project/dash-extensions/""" def make_file(n_clicks, fig: dict, filename: str): if n_clicks: fig = go.Figure(fig) if not filename: filename = fig.layout.title.text if not filename: filename = 'DashFigure' fname = filename + f'.{file_type}' bytes_ = False if file_type == 'html': data = fig.to_html() mtype = 'text/html' elif file_type == 'jpg': fig.write_image('temp/dash_temp.jpg', format='jpg') return send_file('temp/dash_temp.jpg', filename=fname, mime_type='image/jpg') elif file_type == 'svg': fig.write_image('temp/dash_temp.svg', format='svg') return send_file('temp/dash_temp.svg', fname, 'image/svg+xml') else: raise ValueError(f'{file_type} not supported') return dict(content=data, filename=fname, mimetype=mtype, byte=bytes_) else: raise PreventUpdate if file_type not in ['html', 'jpg', 'svg']: raise ValueError(f'{file_type} not supported') dl_id = f'{graph_id}_download-{file_type}' but_id = f'{graph_id}_but-{file_type}-download' name_id = f'{graph_id}_inp-download-name' app.callback(Output(dl_id, 'data'), Input(but_id, 'n_clicks'), State(graph_id, 'figure'), State(name_id, 'value'))(make_file)
def make_callback(self, inputs: Union[List[CALLBACK_TYPE], CALLBACK_TYPE] = None, outputs: Union[List[CALLBACK_TYPE], CALLBACK_TYPE] = None, func: Callable = None, states: Union[List[CALLBACK_TYPE], CALLBACK_TYPE] = None, triggers: Union[List[CALLBACK_TYPE], CALLBACK_TYPE] = None): """ Helper function for attaching callbacks more easily Args: inputs (List[CALLBACK_TYPE]): The tuples that would go into dash.dependencies.Input() (i.e. (<id>, <property>) outputs (List[CALLBACK_TYPE]): Similar, (<id>, <property>) states (List[CALLBACK_TYPE]): Similar, (<id>, <property>) func (Callable): The function to wrap with the callback (make sure it takes the right number of inputs in order and returns the right number of outputs in order) triggers (): Triggers callback but is not passed to function Returns: """ def ensure_list(val) -> List[CALLBACK_TYPE]: if isinstance(val, tuple): return [val] elif val is None: return [] elif isinstance(val, list): return val else: raise TypeError(f'{val} is not valid') if inputs is None and triggers is None: raise ValueError( f"Can't have both inputs and triggers set as None... " f"\n{inputs, triggers, outputs, states}") inputs, outputs, states, triggers = [ ensure_list(v) for v in [inputs, outputs, states, triggers] ] Inputs = [Input(*inp) for inp in inputs] Outputs = [Output(*out) for out in outputs] States = [State(*s) for s in states] app.callback(*Outputs, *Inputs, *States)(func) # Makes callback here
def add_dash(app): @du.callback( output=Output(f'{APP_ID}_large_upload_fn_store', 'data'), id=f'{APP_ID}_large_upload', ) def get_a_list(filenames): return {i: filenames[i] for i in range(len(filenames))} @app.callback( [ Output(f'{APP_ID}_process_video_button', 'disabled'), Output(f'{APP_ID}_t_start_input', 'value'), Output(f'{APP_ID}_t_end_input', 'value'), Output(f'{APP_ID}_vid_w_input', 'value') ], [ Input(f'{APP_ID}_large_upload_fn_store', 'data'), ], ) def upload_video(dic_of_names): if dic_of_names is None: return True, 0., None, None clip_1 = mpy.VideoFileClip(dic_of_names[list(dic_of_names)[0]]) return False, 0., clip_1.duration, clip_1.size[0] @app.callback( Output(f'{APP_ID}_image_div', 'children'), [ Input(f'{APP_ID}_font_select', 'value'), Input(f'{APP_ID}_vid_w_input', 'value'), Input(f'{APP_ID}_large_upload_fn_store', 'data'), Input(f'{APP_ID}_text_input', 'value'), Input(f'{APP_ID}_t_start_input', 'value'), Input(f'{APP_ID}_t_end_input', 'value'), Input(f'{APP_ID}_crop_bot_input', 'value'), Input(f'{APP_ID}_crop_top_input', 'value'), ], ) def frame_out(font, video_width, dic_of_names, text, clip_1_start, clip_1_end, crop_bot, crop_top): if any([v is None for v in [font, video_width, dic_of_names, text, crop_bot, crop_top]]): raise PreventUpdate clip_1 = mpy.VideoFileClip(dic_of_names[list(dic_of_names)[0]]) clip_1 = clip_1.fx(mpy.vfx.resize, width=video_width) clip_1 = clip_1.subclip(t_start=clip_1_start, t_end=clip_1_end) clip_1 = clip_1.fx(mpy.vfx.crop, y1=crop_top, y2=clip_1.size[1]-crop_bot) txt_clip = mpy.TextClip(text, size=clip_1.size, color='white', bg_color='black', font=font ).set_duration(clip_1.duration) # for image export in memory using PIL (for base64 convert), need to apply mask manually f = clip_1.fx(mpy.vfx.resize, width=540).get_frame(t=0) mask = 255 * txt_clip.fx(mpy.vfx.resize, width=540).to_mask().get_frame(t=0) ff = np.dstack([f, mask]).astype('uint8') im = Image.fromarray(ff) rawBytes = io.BytesIO() im.save(rawBytes, "PNG") rawBytes.seek(0) return html.Img(src=f"data:image/PNG;base64, {b64encode(rawBytes.read()).decode('utf-8')}") @app.callback( [ Output(f'{APP_ID}_video_div', 'children'), Output(f'{APP_ID}_download_link', 'href'), Output(f'{APP_ID}_download_button', 'disabled'), ], [ Input(f'{APP_ID}_process_video_button', 'n_clicks'), ], [ State(f'{APP_ID}_large_upload_fn_store', 'data'), State(f'{APP_ID}_t_start_input', 'value'), State(f'{APP_ID}_t_end_input', 'value'), State(f'{APP_ID}_vid_w_input', 'value'), State(f'{APP_ID}_text_input', 'value'), State(f'{APP_ID}_font_select', 'value'), State(f'{APP_ID}_crop_bot_input', 'value'), State(f'{APP_ID}_crop_top_input', 'value'), ] ) def process_pre_video(n_clicks, dic_of_names, clip_1_start, clip_1_end, video_width, text, font, crop_bot, crop_top): if n_clicks is None: raise PreventUpdate if dic_of_names is None: return None if text is None: text = '' clip_1 = mpy.VideoFileClip(dic_of_names[list(dic_of_names)[0]]) clip_1 = clip_1.fx(mpy.vfx.resize, width=video_width) clip_1 = clip_1.subclip(t_start=clip_1_start, t_end=clip_1_end) clip_1 = clip_1.fx(mpy.vfx.crop, y1=crop_top, y2=clip_1.size[1]-crop_bot) txt_clip = mpy.TextClip(text, size=clip_1.size, color='white', bg_color='black', font=font ).set_duration(clip_1.duration) clip_1 = clip_1.set_mask(txt_clip.to_mask()) ffname = Path("downloads") / f'{str(uuid.uuid4())}.mp4' Path.mkdir(ffname.parent, parents=True, exist_ok=True) cvc = mpy.CompositeVideoClip([clip_1], bg_color=(255, 255, 255)) # preview video set to 540 width and 5 fps fn_pre = '.'.join(str(ffname).split('.')[:-1]) + 'preview_.webm' cvc.fx(mpy.vfx.resize, width=540).write_videofile(fn_pre, audio=False, fps=5) # write full deal cvc.write_videofile(str(ffname), audio=False, fps=clip_1.fps) vid = open(fn_pre, 'rb') base64_data = b64encode(vid.read()) base64_string = base64_data.decode('utf-8') return [html.Video(src=f'data:video/webm;base64,{base64_string}', controls=True)], f'/{ffname}', False return app
def init_dashboard(server, login_reg=True): """Create a Plotly Dash dashboard.""" # Meta tags for viewport responsiveness meta_viewport = { "name": "viewport", "content": "width=device-width, initial-scale=1, shrink-to-fit=no" } dash_app = dash.Dash(server=server, url_base_pathname='/admin/dashboard/', suppress_callback_exceptions=True, external_stylesheets=[ "/static/css/styles.css", "https://fonts.googleapis.com/css?family=Lato", ], meta_tags=[meta_viewport]) df = create_dataframe() elements = build_nodes_edges_for_cytoscape(df) # Create Layout dash_app.layout = html.Div( children=[ dcc.Dropdown(id='dropdown', multi=True, placeholder="Select user(s)"), html.Div(id='dd-output-container'), dcc.Graph(id="histogram-graph", config={'displayModeBar': False}), dcc.Interval( id='interval-component', interval=60 * 1000, # in milliseconds n_intervals=0), cyto.Cytoscape( id='cytoscape-users-kinds', layout={ "title": "Actions Per User", 'name': 'cose' }, style={ 'width': '100%', 'height': '400px' }, elements=elements, stylesheet=[ # Group selectors { 'selector': 'node', 'style': { 'content': 'data(label)' } }, # Class selectors { 'selector': '.blue', 'style': { 'background-color': 'blue', 'line-color': 'blue' } }, { 'selector': '.green', 'style': { 'background-color': 'green', 'line-color': 'green' } } ]), dash_table.DataTable( id="database-table", columns=[ { "name": i, "id": i } for i in ['id', 'created_on', 'session_id', 'user', 'kind', 'text'] # omit the id column if i != 'id' ], style_table={'overflowX': 'auto'}, style_cell_conditional=[ { 'if': { 'column_id': 'session_id' }, 'width': '20px' }, { 'if': { 'column_id': 'kind' }, 'width': '20px' }, { 'if': { 'column_id': 'user' }, 'width': '40px' }, ], style_cell={ 'minWidth': '20px', 'width': '30px', 'maxWidth': '250px', 'whiteSpace': 'normal', }, data=[], sort_action="native", sort_mode="multi", page_action="native", page_size=10, tooltip_data=[{ column: { 'value': str(value), 'type': 'markdown' } for column, value in row.items() } for row in (df.to_dict('records') if not df.empty else {})], tooltip_duration=None, css=[{ 'selector': '.dash-spreadsheet td div', 'rule': ''' line-height: 15px; max-height: 30px; min-height: 30px; height: 30px; display: block; overflow-y: hidden; ''' }], ), ], id="dash-container", ) #, prevent_initial_call=True @dash_app.callback([ Output('histogram-graph', 'figure'), Output('cytoscape-users-kinds', 'elements'), Output('database-table', 'data') ], [Input('dropdown', 'value')], State('cytoscape-users-kinds', 'elements')) def update_output(value, elements): #figure=get_histogram_kind_figure(df), df1 = create_dataframe() if df1.empty: raise PreventUpdate if value: val_lst = [] if isinstance(value, list): val_lst = value else: val_lst.append(value) df1 = df1[df1["user"].isin(val_lst)] elements = build_nodes_edges_for_cytoscape(df1) fig = get_histogram_kind_figure(df1) return [fig, elements, df1.to_dict("records")] @dash_app.callback(Output("dropdown", "options"), [Input("dropdown", "search_value")]) def update_options(search_value): df = create_dataframe() if df.empty: raise PreventUpdate # return dash.no_update users_options = [{ 'label': i[0], 'value': i[0] } for i in df.groupby("user")['user']] search_value = [users_options[0]['value'] if users_options else ''] return users_options # @dash_app.callback(Output('dropdown', 'value'),[Input('dropdown', 'options')]) # def set_users_value(available_options): # if not available_options: # raise PreventUpdate # return available_options[0]['value'] if available_options else '' # @dash_app.callback(Output('database-table', 'data'), Input('interval-component', 'n_intervals')) # def query_df(n): # return create_dataframe().to_dict("records") if login_reg: _protect_dashviews(dash_app) return dash_app.server
# else: # spec = np.roll(existing_store['spec'],-1,0) # spec[-1] = newLine # existing_store['spec'] = spec # existing_store['freqs'] = freqs # existing_store['timestamp'] = timestamp # return existing_store @app.callback( ServersideOutput("userServerStore", "data"), [Trigger("check_for_data", "n_intervals")], [State("spec", "relayoutData"), State("userServerStore", "data")], prevent_initial_call=True) def update_server_store(relayoutData, userServerStore): existing_store = userServerStore latest_message = numpy_from_Redis(redis_client, 'latest') latest_integration = np.array(latest_message[:-1], dtype=const.DTYPE) latest_timestamp = latest_message[-1] if existing_store == None: existing_store = { 'spec': start_spec, 'freqs': start_freqs, 'timestamp': 0.0 }
@app.callback(ServersideOutput("store", "data"), Input("btn", "n_clicks")) def query_data(n_clicks): time.sleep(1) return px.data.gapminder() # no JSON serialization here @app.callback(Input("store", "data"), Output("dd", "options")) def update_dd(df): return [{ "label": column, "value": column } for column in df["year"]] # no JSON de-serialization here @app.callback( Output("graph", "figure"), [Input("dd", "value"), State("store", "data")]) def update_graph(value, df): df = df.query("year == {}".format(value)) # no JSON de-serialization here return px.sunburst(df, path=['continent', 'country'], values='pop', color='lifeExp', hover_data=['iso_alpha']) if __name__ == '__main__': app.run_server()
def register_callbacks(app, dcc): @app.callback( Output(component_id='stringency_index_show', component_property='children'), [Input(component_id='stringency_index', component_property='value')]) def update_stringency_index(input_value): return 'Stringency Index: {}'.format(input_value) @app.callback(Output('social-indicators-scroll', 'style'), [Input('input-switch', 'value')]) def update_output(value): if not value: return { 'maxHeight': '250px', 'overflow': 'scroll', 'display': 'block' } else: return { 'maxHeight': '250px', 'overflow': 'scroll', 'display': 'none' } @app.callback(Output('stringency-slider-container', 'style'), [Input('input-switch', 'value')]) def update_output(value): if value: return {'display': 'block'} else: return {'display': 'none'} @app.callback([ Output(component_id='left_co2', component_property='children'), Output(component_id='right_co2', component_property='children') ], [Input('submit_policy_selection', 'n_clicks')], [ State('input-switch', 'value'), State('stringency_index', 'value'), State('school-closing', 'value'), State('workplace-closing', 'value'), State('public-events', 'value'), State('gatherings', 'value'), State('public-transport', 'value'), State('stay-home', 'value'), State('internal-movement', 'value'), State('international-travel', 'value'), State('country-dropdown', 'value') ]) def submit_button_controller(n_clicks, input_switcher_state, stringency_idx, school_closing_score, workspace_closing_score, public_events_score, gathering_restrictions_score, public_transport_score, stay_home_score, internal_movement_score, international_travel_score, countries): if n_clicks > 0: parse_model_input = ParseModelInputs() parse_model_input.countries = countries if input_switcher_state: # when the toggle button is on right(stringency slider is on display) parse_model_input.model_type = DataAnalysingModels.STRINGENCY_INDEX_MODEL parse_model_input.stringency_idx = float(stringency_idx) else: parse_model_input.model_type = DataAnalysingModels.SOCIAL_POLICY_MODEL parse_model_input.school_closing_score = int( school_closing_score) parse_model_input.workspace_closing_score = int( workspace_closing_score) parse_model_input.public_events_score = int( public_events_score) parse_model_input.gathering_restrictions_score = int( gathering_restrictions_score) parse_model_input.public_transport_score = int( public_transport_score) parse_model_input.stay_home_score = int(stay_home_score) parse_model_input.internal_movement_score = int( internal_movement_score) parse_model_input.international_travel_score = int( international_travel_score) out = GenerateOutput(pred_steps=142) df = out.get_dataframe_for_plotting(parse_model_input, countries) fig1 = px.line(df, x='Date', y='MtCO2/day', color='Country', title="CO<sub>2</sub> Emission per day") fig1.update_layout(annotations=[ dict(yref='paper', y=1, xref='x', x=pd.to_datetime('2020-06-11'), text='Forecasting from here') ], shapes=[ dict(type='line', yref='paper', y0=0, y1=1, xref='x', x0=pd.to_datetime('2020-06-11'), x1=pd.to_datetime('2020-06-11'), line=dict(dash="dot")) ], transition_duration=500) fig2 = px.line(df, x='Date', y='MtCO2 reduced/day', color='Country', title='Reduction in CO<sub>2</sub> ' 'emission per day') fig2.update_layout(annotations=[ dict(yref='paper', y=1, xref='x', x=pd.to_datetime('2020-06-11'), text='Forecasting from here') ], shapes=[ dict(type='line', yref='paper', y0=0, y1=1, xref='x', x0=pd.to_datetime('2020-06-11'), x1=pd.to_datetime('2020-06-11'), line=dict(dash="dot")) ], transition_duration=500) return [ dcc.Graph(id='absolute-graph', figure=fig1), dcc.Graph(id='reduction-graph', figure=fig2) ] else: return [None, None]
def add_dash(app): @du.callback( output=Output(f'{APP_ID}_large_upload_fn_store', 'data'), id=f'{APP_ID}_large_upload', ) def get_a_list(filenames): return {i: filenames[i] for i in range(len(filenames))} @app.callback( Output(f'{APP_ID}_process_data_button', 'disabled'), [ Input(f'{APP_ID}_large_upload_fn_store', 'data'), Input(f'{APP_ID}_dcc_upload', 'contents') ], [ State(f'{APP_ID}_dcc_upload', 'filename') ] ) def upload_data(dic_of_names, list_contents, list_names): # who done it? ctx = dash.callback_context if not ctx.triggered: raise PreventUpdate if dic_of_names is None and list_contents is None: return True lines = [] # dcc.upload component if ctx.triggered[0]['prop_id'].split('.')[0] == f'{APP_ID}_dcc_upload': for i, fn in enumerate(list_names): content_type, content_string = list_contents[i].split(',') decoded = base64.b64decode(content_string) line = '' while line.strip() == '': line = io.StringIO(decoded.decode('utf-8')).readline() lines.append(line) # dash-uploader component elif ctx.triggered[0]['prop_id'].split('.')[0] == f'{APP_ID}_large_upload_fn_store': for k in dic_of_names.keys(): fn = dic_of_names[k] with open(fn) as f: while True: line = next(f) if line.strip() != '': break lines.append(line) else: return True return False @app.callback( [ ServersideOutput(f'{APP_ID}_session_store', 'data'), Output(f'{APP_ID}_xaxis_select', 'options'), Output(f'{APP_ID}_yaxis_select', 'options'), ], [ Input(f'{APP_ID}_process_data_button', 'n_clicks'), ], [ State(f'{APP_ID}_large_upload_fn_store', 'data'), State(f'{APP_ID}_dcc_upload', 'contents'), State(f'{APP_ID}_dcc_upload', 'filename') ] ) def process_data(n_clicks, dic_of_names, list_contents, list_names): if n_clicks is None: raise PreventUpdate if dic_of_names is None and list_contents is None: return [{}], None, None dfs = [] if list_names is not None: for i, fn in enumerate(list_names): content_type, content_string = list_contents[i].split(',') decoded = base64.b64decode(content_string) # with open(decoded, 'rb') as f: lines = [l for l in io.StringIO(decoded.decode('utf-8')).readlines() if l.strip() != ''] df = pd.read_json('[' + ','.join(lines) + ']', orient='records') df['fn'] = fn dfs.append(df) else: for k in dic_of_names.keys(): fn = dic_of_names[k] with open(fn) as f: lines = [l for l in f.readlines() if l.strip() != ''] df = pd.read_json('[' + ','.join(lines) + ']', orient='records') df['fn'] = Path(fn).stem dfs.append(df) df = pd.concat(dfs, axis=0, ignore_index=True, sort=True) cols = df.columns cols_axes = [{'label': c, 'value': c} for c in cols] return df, cols_axes, cols_axes @app.callback( Output(f'{APP_ID}_graph_div', 'children'), [ Input(f'{APP_ID}_xaxis_select', 'value'), Input(f'{APP_ID}_yaxis_select', 'value'), ], [ State(f'{APP_ID}_session_store', 'data'), ] ) def plot_data(xaxis, yaxis, df): if df is None: return [dbc.Alert('Upload & Process Data', color='primary')] if xaxis is None: return [dbc.Alert('Select x axis data', color='primary')] if yaxis is None: return [dbc.Alert('Select y axis data', color='primary')] if xaxis not in df.columns: return [dbc.Alert('x axis not in columns', color='danger')] if yaxis not in df.columns: return [dbc.Alert('y axis not in columns', color='danger')] fig = go.Figure() fig.update_layout(showlegend=True) for name, dfi in df.groupby('fn'): fig.add_trace( go.Scattergl( x=dfi[xaxis].tail(200000), y=dfi[yaxis].tail(200000), name=name ) ) return [dcc.Graph(figure=fig, config={'modeBarButtonsToAdd':['drawline', 'drawrect', 'drawopenpath', 'eraseshape']})] return app
# initialize staff result = ResultProcessing() result.load_models_directly(io.BytesIO(decoded)) return result except Exception as e: print(e) raise ValueError('There was an error processing this file.') # return html.Div([ # 'There was an error processing this file.' # ], # ) @app.callback([ServersideOutput('raw-result-store', 'data')], [Input('upload-result-data', 'contents')], [State('upload-result-data', 'filename')], memoize=True) def update_file_output(contents, filename): # display read file status and update main visualization Div if contents is None: raise PreventUpdate global_result = parse_contents_result(contents[0], filename[0]) return global_result def parse_contents_ori(contents, filename): content_type, content_string = contents.split(',') decoded = base64.b64decode(content_string) try: if 'csv' in filename: df = pd.read_csv(io.StringIO(decoded.decode('utf-8')))
html.Button(id="btn-run", children="Take a nap"), # Container for storing the result of the async job. html.Div(id="div-result"), # Container for storing a reference to the async job. dcc.Store(id="result-tuple"), # Interval component for polling updates on the status of the async job. dcc.Interval(id="poller", max_intervals=0), ]) @app.callback([ Output("btn-run", "disabled"), Output("btn-run", "children"), Output("result-tuple", "data"), Output("poller", "max_intervals") ], [Input("btn-run", "n_clicks")], [State("nap-duration", "value")]) def launch_job(n_clicks, value): # Run the job asynchronously (note the .delay syntax). result = take_a_nap.delay(value) # Disable button and set text (or start a spinner, etc.), save result reference, and start polling. return True, "Napping...", result.as_tuple(), -1 @app.callback([ Output("btn-run", "disabled"), Output("btn-run", "children"), Output("div-result", "children"), Output("poller", "max_intervals") ], [Input("poller", "n_intervals")], [State("result-tuple", "data")]) def poll_result(n_intervals, data): result = result_from_tuple(data, app=celery_app)