예제 #1
0
    def register_app(self, theapp):
        @theapp.callback([ServersideOutput(self.main_store.id, 'data')],
                         [Trigger(self.trigger_interval.id, 'n_intervals')])
        def _update_main_store():
            print(f"EiaAccess._update_main_store")
            df = pd.read_csv(self.input_pet_file)
            return df

        @theapp.callback([
            Output(self.data_dt.id, 'data'),
            Output(self.data_dt.id, 'columns')
        ], [
            Input(self.main_store.id, 'data'),
            Input(self.num_displayable_columns.id, 'value'),
            Input(self.eia_cat_div.dropdowns_store.id, 'data')
        ])
        def _populate_data_dt(df_main_store_data, num_displayable_columns,
                              dropdowns_store):

            print(
                f"EiaAccess._populate_data_dt number of columns:{len(df_main_store_data.columns.values)}"
            )
            if dropdowns_store is None:
                columns = df_main_store_data.columns.values[:
                                                            num_displayable_columns]
            else:
                padd_name = dropdowns_store['padd_name']
                padd_location = None  #dropdowns_store['padd_location']
                padd_production_type = dropdowns_store['padd_production_type']
                padd_fuel_type = dropdowns_store['padd_fuel_type']
                gas_price_region = dropdowns_store['gas_price_region']
                gas_price_gas_type = dropdowns_store['gas_price_gas_type']
                gas_price_grade = dropdowns_store['gas_price_grade']

                df_cols = self.eia_cat_div.eia_categories.get_column_set(
                    padd_name, padd_location, padd_production_type,
                    padd_fuel_type, gas_price_region, gas_price_gas_type,
                    gas_price_grade)
                columns = list(set(['date'] + list(df_cols.col.values)))
                # make date the first column
                columns = ['date'] + [c for c in columns if c != 'date']
                columns = columns[:num_displayable_columns]

            df_ret = df_main_store_data[columns]
            ret_columns = [{'label': c, 'id': c} for c in columns]

            dict_ret = df_ret.to_dict('records')
            return dict_ret, ret_columns

        self.eia_cat_div.register_app(theapp)
        self.logger.info('registered eia_access')
예제 #2
0
#     if timestamp == old_timestamp:
#         raise PreventUpdate
#     else:

#         spec = np.roll(existing_store['spec'],-1,0)
#         spec[-1] = newLine

#         existing_store['spec'] = spec
#         existing_store['freqs'] = freqs
#         existing_store['timestamp'] = timestamp

#         return existing_store


@app.callback(
    ServersideOutput("userServerStore",
                     "data"), [Trigger("check_for_data", "n_intervals")],
    [State("spec", "relayoutData"),
     State("userServerStore", "data")],
    prevent_initial_call=True)
def update_server_store(relayoutData, userServerStore):
    existing_store = userServerStore

    latest_message = numpy_from_Redis(redis_client, 'latest')
    latest_integration = np.array(latest_message[:-1], dtype=const.DTYPE)
    latest_timestamp = latest_message[-1]

    if existing_store == None:
        existing_store = {
            'spec': start_spec,
            'freqs': start_freqs,
            'timestamp': 0.0
import time
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
from dash_extensions.enrich import Dash, Output, Input, State, ServersideOutput

app = Dash(prevent_initial_callbacks=True)
app.layout = html.Div([
    html.Button("Query data", id="btn"),
    dcc.Dropdown(id="dd"),
    dcc.Graph(id="graph"),
    dcc.Loading(dcc.Store(id='store'), fullscreen=True, type="dot")
])


@app.callback(ServersideOutput("store", "data"), Input("btn", "n_clicks"))
def query_data(n_clicks):
    time.sleep(1)
    return px.data.gapminder()  # no JSON serialization here


@app.callback(Input("store", "data"), Output("dd", "options"))
def update_dd(df):
    return [{
        "label": column,
        "value": column
    } for column in df["year"]]  # no JSON de-serialization here


@app.callback(
    Output("graph", "figure"),
예제 #4
0
import time
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
from dash_extensions.enrich import Dash, Output, Input, Trigger, ServersideOutput

app = Dash(prevent_initial_callbacks=True)
app.layout = html.Div([
    html.Button("Query data", id="btn"),
    dcc.Dropdown(id="dd"),
    dcc.Graph(id="graph"),
    dcc.Loading(dcc.Store(id='store'), fullscreen=True, type="dot")
])


@app.callback(ServersideOutput("store", "data"),
              Trigger("btn", "n_clicks"),
              memoize=True)
def query_data():
    time.sleep(1)
    return px.data.gapminder()


@app.callback(Input("store", "data"), Output("dd", "options"))
def update_dd(df):
    return [{"label": column, "value": column} for column in df["year"]]


@app.callback(
    Output("graph", "figure"),
    [Input("store", "data"), Input("dd", "value")])
예제 #5
0
def add_dash(app):

    @du.callback(
        output=Output(f'{APP_ID}_large_upload_fn_store', 'data'),
        id=f'{APP_ID}_large_upload',
    )
    def get_a_list(filenames):
        return {i: filenames[i] for i in range(len(filenames))}


    @app.callback(
        Output(f'{APP_ID}_process_data_button', 'disabled'),
        [
            Input(f'{APP_ID}_large_upload_fn_store', 'data'),
            Input(f'{APP_ID}_dcc_upload', 'contents')
        ],
        [
            State(f'{APP_ID}_dcc_upload', 'filename')
        ]
    )
    def upload_data(dic_of_names, list_contents, list_names):
        # who done it?
        ctx = dash.callback_context
        if not ctx.triggered:
            raise PreventUpdate

        if dic_of_names is None and list_contents is None:
            return True

        lines = []
        # dcc.upload component
        if ctx.triggered[0]['prop_id'].split('.')[0] == f'{APP_ID}_dcc_upload':
            for i, fn in enumerate(list_names):
                content_type, content_string = list_contents[i].split(',')
                decoded = base64.b64decode(content_string)
                line = ''
                while line.strip() == '':
                    line = io.StringIO(decoded.decode('utf-8')).readline()
                lines.append(line)
        # dash-uploader component
        elif ctx.triggered[0]['prop_id'].split('.')[0] == f'{APP_ID}_large_upload_fn_store':
            for k in dic_of_names.keys():
                fn = dic_of_names[k]
                with open(fn) as f:
                    while True:
                        line = next(f)
                        if line.strip() != '':
                            break
                lines.append(line)

        else:
            return True

        return False


    @app.callback(
        [
            ServersideOutput(f'{APP_ID}_session_store', 'data'),
            Output(f'{APP_ID}_xaxis_select', 'options'),
            Output(f'{APP_ID}_yaxis_select', 'options'),
        ],
        [
            Input(f'{APP_ID}_process_data_button', 'n_clicks'),
        ],
        [
            State(f'{APP_ID}_large_upload_fn_store', 'data'),
            State(f'{APP_ID}_dcc_upload', 'contents'),
            State(f'{APP_ID}_dcc_upload', 'filename')
        ]
    )
    def process_data(n_clicks, dic_of_names, list_contents, list_names):
        if n_clicks is None:
            raise PreventUpdate

        if dic_of_names is None and list_contents is None:
            return [{}], None, None

        dfs = []
        if list_names is not None:
            for i, fn in enumerate(list_names):
                content_type, content_string = list_contents[i].split(',')
                decoded = base64.b64decode(content_string)
                # with open(decoded, 'rb') as f:
                lines = [l for l in io.StringIO(decoded.decode('utf-8')).readlines() if l.strip() != '']
                df = pd.read_json('[' + ','.join(lines) + ']', orient='records')
                df['fn'] = fn
                dfs.append(df)
        else:
            for k in dic_of_names.keys():
                fn = dic_of_names[k]
                with open(fn) as f:
                    lines = [l for l in f.readlines() if l.strip() != '']
                df = pd.read_json('[' + ','.join(lines) + ']', orient='records')
                df['fn'] = Path(fn).stem
                dfs.append(df)
        df = pd.concat(dfs, axis=0, ignore_index=True, sort=True)

        cols = df.columns
        cols_axes = [{'label': c, 'value': c} for c in cols]

        return df, cols_axes, cols_axes


    @app.callback(
        Output(f'{APP_ID}_graph_div', 'children'),
        [
            Input(f'{APP_ID}_xaxis_select', 'value'),
            Input(f'{APP_ID}_yaxis_select', 'value'),
        ],
        [
            State(f'{APP_ID}_session_store', 'data'),
        ]
    )
    def plot_data(xaxis, yaxis, df):
        if df is None:
            return [dbc.Alert('Upload & Process Data', color='primary')]
        if xaxis is None:
            return [dbc.Alert('Select x axis data', color='primary')]
        if yaxis is None:
            return [dbc.Alert('Select y axis data', color='primary')]

        if xaxis not in df.columns:
            return [dbc.Alert('x axis not in columns', color='danger')]
        if yaxis not in df.columns:
            return [dbc.Alert('y axis not in columns', color='danger')]

        fig = go.Figure()
        fig.update_layout(showlegend=True)
        for name, dfi in df.groupby('fn'):
            fig.add_trace(
                go.Scattergl(
                    x=dfi[xaxis].tail(200000),
                    y=dfi[yaxis].tail(200000),
                    name=name
                )
            )

        return [dcc.Graph(figure=fig, config={'modeBarButtonsToAdd':['drawline', 'drawrect', 'drawopenpath', 'eraseshape']})]

    return app
예제 #6
0
    try:
        if 'pkl' in filename:
            # initialize staff
            result = ResultProcessing()
            result.load_models_directly(io.BytesIO(decoded))
            return result
    except Exception as e:
        print(e)
        raise ValueError('There was an error processing this file.')
        # return html.Div([
        #     'There was an error processing this file.'
        # ],
        # )


@app.callback([ServersideOutput('raw-result-store', 'data')],
              [Input('upload-result-data', 'contents')],
              [State('upload-result-data', 'filename')],
              memoize=True)
def update_file_output(contents, filename):
    # display read file status and update main visualization Div
    if contents is None:
        raise PreventUpdate
    global_result = parse_contents_result(contents[0], filename[0])
    return global_result


def parse_contents_ori(contents, filename):
    content_type, content_string = contents.split(',')
    decoded = base64.b64decode(content_string)
    try:
예제 #7
0
                             '\nValid Tickers from listed Exchanges:\n' +
                             '\n'.join(exchange_list()))
    except Exception as InvalidTicker:
        # dbc.Alert(
        #     str(InvalidTicker),
        #     id="alert-invalid-ticker",
        #     dismissable=True,
        #     is_open=True,
        # )
        logger.exception(InvalidTicker)
        return False, True, '', handler_data_message(
            'See Error Message(s) below:', traceback.format_exc())


@app.callback([
    ServersideOutput('fin-store', 'data'),
    Output('select-column', 'options'),
    Output('status-info', 'loading_state'),
    Output('handler-past-data', 'data')
], [Input('ticker-input', 'valid')], [
    State('ticker-input', 'value'),
    State('analysis-mode', 'value'),
    State('snapshot-uuid', 'value')
])
def fin_report(ticker_valid, ticker, live_analysis_mode, snapshot_uuid):
    if not ticker_valid:
        return [], [], {'is_loading': True}, dash.no_update
    try:
        ticker_allcaps = ticker.upper()
        db_key = ticker_allcaps + '-' + snapshot_uuid
        if 1 in live_analysis_mode or not db.exists(db_key):