Ejemplo n.º 1
0
def test_update_master_table():
    init_master_table('amazing_database2.db')
    update_dict = {
        'Dataset_Name': 'my_dataset',
        'Raw_Data_Prefix': 'raw',
        'Cleaned_Data_Prefix': 'clean',
        'Cleaned_Cycles_Prefix': 'cycles',
        'Descriptors_Prefix': 'desc'
    }
    update_master_table(update_dict, 'amazing_database2.db')
    test_df = get_file_from_database('master_table', 'amazing_database2.db')
    expected = pd.DataFrame({
        'Dataset_Name': ['my_dataset'],
        'Raw_Data_Prefix': ['raw'],
        'Cleaned_Data_Prefix': ['clean'],
        'Cleaned_Cycles_Prefix': ['cycles'],
        'Descriptors_Prefix': ['desc']
    })
    assert pd.DataFrame.equals(test_df, expected)

    neg_result = update_master_table(
        None,
        'amazing_database2.db',
    )
    assert neg_result == [{}]

    os.remove('amazing_database2.db')
    return
Ejemplo n.º 2
0
def test_get_file_from_database():
    df = pd.DataFrame({
        'A': [1, 2, 3],
        'B': [10, 20, 30],
        'C': [100, 200, 300]
    })
    upload_filename = 'my_other_amazing_file'
    database_name = 'another_amazing_database.db'
    update_database_newtable(df, upload_filename, database_name)
    assert os.path.exists('another_amazing_database.db')
    result = get_file_from_database('my_other_amazing_file',
                                    'another_amazing_database.db')
    assert pd.DataFrame.equals(result, df)
    neg_result = get_file_from_database('something_else',
                                        'another_amazing_database.db')
    assert neg_result == None
    os.remove('another_amazing_database.db')
    return
Ejemplo n.º 3
0
def update_model_indb(filename, n_clicks, new_peak_thresh):
    if n_clicks is not None:
        cleanset_name = filename.split('.')[0] + 'CleanSet'
        df_clean = get_file_from_database(cleanset_name, database)
        feedback_str = generate_model(df_clean, filename, new_peak_thresh,
                                      database)
        feedback = html.Div([feedback_str])
    else:
        feedback = html.Div(['Model has not been updated yet.'])
    return feedback
Ejemplo n.º 4
0
def test_init_master_table():
    init_master_table('new_database.db')
    assert os.path.exists('new_database.db')
    init_table = get_file_from_database('master_table', 'new_database.db')
    expected_cols = [
        'Dataset_Name', 'Raw_Data_Prefix', 'Cleaned_Data_Prefix',
        'Cleaned_Cycles_Prefix', 'Descriptors_Prefix'
    ]
    assert init_table.empty
    assert set(expected_cols) == set(init_table.columns)
    os.remove('new_database.db')
    return
Ejemplo n.º 5
0
def update_link_3(value):
    if value is not None:
        mod_points_df = get_file_from_database(
            value.split('.')[0] + '-ModPoints', database)
        if mod_points_df is not None:
            csv_string = mod_points_df.to_csv(index=False, encoding='utf-8')
        else:
            # return an empty dataframe
            csv_string = pd.DataFrame().to_csv(index=False, encoding='utf-8')
        csv_string = "data:text/csv;charset=utf-8," + \
            urllib.parse.quote(csv_string)
        return csv_string
def pop_with_db(filename, database):
    """Returns dataframes that can be used to populate the app graphs.
    Finds the already existing file in the database and returns
    the cleaned version (as a dataframe) and the raw version
    (also as a dataframe)."""
    cleanset_name = get_filename_pref(filename) + 'CleanSet'
    rawset_name = get_filename_pref(filename) + 'Raw'
    if if_file_exists_in_db(database, filename):
        # then the file exists in the database and we can just read it
        df_clean = get_file_from_database(cleanset_name, database)
        df_raw = get_file_from_database(rawset_name, database)
        datatype = df_clean['datatype'].iloc[0]
        (cycle_ind_col, data_point_col, volt_col, curr_col, dis_cap_col,
         char_cap_col, charge_or_discharge) = col_variables(datatype)

    else:
        df_clean = None
        df_raw = None
        peakloc_dict = {}

    return df_clean, df_raw
def parse_contents(decoded,
                   filename,
                   datatype,
                   database,
                   windowlength=9,
                   polyorder=3):
    """Checks if the uploaded file exists in the database yet. Will
    process and add that file to the database if it doesn't appear in
    the master table yet. Otherwise will return html.Div that the
    file already exists in the database. """

    cleanset_name = get_filename_pref(filename) + 'CleanSet'
    # this gets rid of any filepath in the filename and just leaves the
    # clean set name as it appears in the database check to see if the
    # database exists, and if it does, check if the file exists.
    ans_p = if_file_exists_in_db(database, filename)
    if ans_p:
        df_clean = get_file_from_database(cleanset_name, database)
        new_peak_thresh = 0.7
        feedback = generate_model(df_clean, filename, new_peak_thresh,
                                  database)
        return 'That file exists in the database: ' + \
            str(get_filename_pref(filename))
    else:
        try:
            decoded_dataframe = decoded_to_dataframe(decoded, datatype,
                                                     filename)
            process_data(filename, database, decoded_dataframe, datatype,
                         windowlength, polyorder)
            df_clean = get_file_from_database(cleanset_name, database)
            new_peak_thresh = 0.7
            feedback = generate_model(df_clean, filename, new_peak_thresh,
                                      database)
            return 'New file has been processed: ' + \
                str(get_filename_pref(filename))
        except Exception as e:
            return 'There was a problem uploading that file. ' + \
                'Check the format of the upload file is as expected.' + \
                str(e)
def test_parse_update_master():
    """Tests the parse update master function"""
    process_data(test_filename, test_db, decoded_dataframe, test_datatype)
    core_test_filename = get_filename_pref(test_filename)
    ans = parse_update_master(core_test_filename, test_db, test_datatype,
                              decoded_dataframe)
    assert ans == None
    master_table = get_file_from_database('master_table', test_db)
    name = get_filename_pref(test_filename)
    assert name + 'Raw' in list(master_table['Raw_Data_Prefix'])
    assert name + 'CleanSet' in list(master_table['Cleaned_Data_Prefix'])
    assert name + '-CleanCycle' in list(master_table['Cleaned_Cycles_Prefix'])
    assert name + '-descriptors' in list(master_table['Descriptors_Prefix'])
    os.remove(test_db)
    return
Ejemplo n.º 9
0
def test_param_dicts_to_df():
    """Tests the parameter dictionaries generated by the model
    functions are parsed nicely and added to the database in the 
    modparams table"""

    process_data(test_filename, test_db, decoded_dataframe, test_datatype)
    core_test_filename = get_filename_pref(test_filename)
    new_peak_thresh = 0.7
    df_clean = get_file_from_database(core_test_filename + 'CleanSet', test_db)
    feedback = generate_model(
        df_clean, core_test_filename, new_peak_thresh, test_db)
    assert core_test_filename + 'ModParams' in get_table_names(test_db)
    param_dicts_to_df(core_test_filename + 'ModParams', test_db)
    assert core_test_filename + '-descriptors' in get_table_names(test_db)
    desc_df = get_file_from_database(
        core_test_filename + '-descriptors', test_db)
    expected_cols = ['d_gauss_sigma', 'd_gauss_center', 'd_gauss_amplitude',
                     'd_gauss_fwhm', 'd_gauss_height', 'd_cycle_number',
                                     'c_gauss_sigma', 'c_gauss_center', 'c_gauss_amplitude',
                                     'c_gauss_fwhm', 'c_gauss_height', 'c_cycle_number']
    for col in expected_cols:
        assert col in desc_df.columns
    os.remove(test_db)
    return
Ejemplo n.º 10
0
def load_sep_cycles(core_file_name, database_name, datatype):
    """Loads cycles from an existing uploaded file from the
    database, and saves them as separate dataframes
    with the cycle number as the key."""
    (cycle_ind_col, data_point_col, volt_col, curr_col, dis_cap_col,
     char_cap_col, charge_or_discharge) = col_variables(datatype)
    name = core_file_name + 'Raw'
    df_single = get_file_from_database(name, database_name)
    gb = df_single.groupby(by=[cycle_ind_col])
    cycle_dict = dict(iter(gb))
    for key in cycle_dict.keys():
        cycle_dict[key]['Battery_Label'] = core_file_name
        update_database_newtable(cycle_dict[key],
                                 core_file_name + '-' + 'Cycle' + str(key),
                                 database_name)
    return cycle_dict
Ejemplo n.º 11
0
def update_link_1(value):
    if value is not None:
        peak_vals_df = get_file_from_database(
            value.split('.')[0] + '-descriptors', database)
        if peak_vals_df is not None:
            # only need a subsest of the columns:
            cols_to_keep = [
                col for col in peak_vals_df if col.startswith('sorted')
            ]
            baseline = [col for col in peak_vals_df if 'gauss' in col]
            cols_to_keep.extend(baseline)
            cols_to_keep.extend(['d_cycle_number', 'c_cycle_number'])
            peak_vals_df = peak_vals_df[cols_to_keep]
            csv_string = peak_vals_df.to_csv(index=False, encoding='utf-8')
        else:
            # return an empty dataframe
            csv_string = pd.DataFrame().to_csv(index=False, encoding='utf-8')
        csv_string = "data:text/csv;charset=utf-8," + \
            urllib.parse.quote(csv_string)
        return csv_string
def test_get_table_names():
    """Tests that the correct table names are returned"""
    # first make sure all data is processed
    process_data(test_filename, test_db, decoded_dataframe, test_datatype)
    new_peak_thresh = 0.7
    core_filename = get_filename_pref(test_filename)
    df_clean = get_file_from_database(core_filename + 'CleanSet', test_db)
    feedback = generate_model(df_clean, core_filename, new_peak_thresh,
                              test_db)
    assert core_filename + 'ModParams' in get_table_names(test_db)
    param_dicts_to_df(core_filename + 'ModParams', test_db)

    names_list = get_table_names(test_db)
    expected_list = [
        'master_table', 'test_data-CleanCycle1', 'test_data-Cycle1',
        'test_data-ModPoints', 'test_data-descriptors', 'test_dataCleanSet',
        'test_dataModParams', 'test_dataRaw', 'test_dataUnalteredRaw'
    ]
    assert set(names_list) == set(expected_list)
    os.remove(test_db)
    return
Ejemplo n.º 13
0
def param_dicts_to_df(mod_params_name, database):
    """Uses the already generated parameter dictionaries stored in
    the filename+ModParams datatable in the database, to add in the
    dictionary data table with those parameter dictionaries
    formatted nicely into one table. """
    mod_params_df = get_file_from_database(mod_params_name, database)
    charge_descript = pd.DataFrame()
    discharge_descript = pd.DataFrame()
    for i in range(len(mod_params_df)):
        param_dict_charge = ast.literal_eval(
            mod_params_df.loc[i, ('Model_Parameters_charge')])
        param_dict_discharge = ast.literal_eval(
            mod_params_df.loc[i, ('Model_Parameters_discharge')])
        charge_peak_heights = ast.literal_eval(
            mod_params_df.loc[i, ('charge_peak_heights')])
        discharge_peak_heights = ast.literal_eval(
            mod_params_df.loc[i, ('discharge_peak_heights')])
        charge_keys = []
        new_dict_charge = {}
        if param_dict_charge is not None:
            for key, value in param_dict_charge.items():
                if '_amplitude' in key and 'base_' not in key:
                    charge_keys.append(key.split('_')[0])
            c_update_dict = {
                'c_gauss_sigma': param_dict_charge['base_sigma'],
                'c_gauss_center': param_dict_charge['base_center'],
                'c_gauss_amplitude': param_dict_charge['base_amplitude'],
                'c_gauss_fwhm': param_dict_charge['base_fwhm'],
                'c_gauss_height': param_dict_charge['base_height'],
            }
            new_dict_charge.update(c_update_dict)
            new_dict_charge.update(
                {'c_cycle_number': float(mod_params_df.loc[i, ('Cycle')])})
        peaknum = 0
        for item in charge_keys:
            peaknum = peaknum + 1
            center = param_dict_charge[item + '_center']
            amp = param_dict_charge[item + '_amplitude']
            fract = param_dict_charge[item + '_fraction']
            sigma = param_dict_charge[item + '_sigma']
            height = param_dict_charge[item + '_height']
            fwhm = param_dict_charge[item + '_fwhm']
            raw_peakheight = charge_peak_heights[peaknum - 1]
            PeakArea, PeakAreaError = scipy.integrate.quad(my_pseudovoigt,
                                                           0.0,
                                                           100,
                                                           args=(center, amp,
                                                                 fract, sigma))
            new_dict_charge.update({
                'c_area_peak_' + str(peaknum):
                PeakArea,
                'c_center_peak_' + str(peaknum):
                center,
                'c_amp_peak_' + str(peaknum):
                amp,
                'c_fract_peak_' + str(peaknum):
                fract,
                'c_sigma_peak_' + str(peaknum):
                sigma,
                'c_height_peak_' + str(peaknum):
                height,
                'c_fwhm_peak_' + str(peaknum):
                fwhm,
                'c_rawheight_peak_' + str(peaknum):
                raw_peakheight
            })
        new_dict_df = pd.DataFrame(columns=new_dict_charge.keys())
        for key1, val1 in new_dict_charge.items():
            new_dict_df.at[0, key1] = new_dict_charge[key1]
        charge_descript = pd.concat([charge_descript, new_dict_df], sort=True)
        charge_descript = charge_descript.reset_index(drop=True)
        charge_descript2 = dfsortpeakvals(charge_descript, 'c')
        discharge_keys = []
        if param_dict_discharge is not None:
            for key, value in param_dict_discharge.items():
                if '_amplitude' in key and 'base_' not in key:
                    discharge_keys.append(key.split('_')[0])
            new_dict_discharge = {}
            update_dict = {
                'd_gauss_sigma': param_dict_discharge['base_sigma'],
                'd_gauss_center': param_dict_discharge['base_center'],
                'd_gauss_amplitude': param_dict_discharge['base_amplitude'],
                'd_gauss_fwhm': param_dict_discharge['base_fwhm'],
                'd_gauss_height': param_dict_discharge['base_height'],
            }
            new_dict_discharge.update(update_dict)
            new_dict_discharge.update(
                {'d_cycle_number': float(mod_params_df.loc[i, ('Cycle')])})
            peaknum = 0
            for item in discharge_keys:
                peaknum = peaknum + 1
                center = param_dict_discharge[item + '_center']
                amp = param_dict_discharge[item + '_amplitude']
                fract = param_dict_discharge[item + '_fraction']
                sigma = param_dict_discharge[item + '_sigma']
                height = param_dict_discharge[item + '_height']
                fwhm = param_dict_discharge[item + '_fwhm']
                raw_peakheight = discharge_peak_heights[peaknum - 1]
                PeakArea, PeakAreaError = scipy.integrate.quad(
                    my_pseudovoigt, 0.0, 100, args=(center, amp, fract, sigma))
                new_dict_discharge.update({
                    'd_area_peak_' + str(peaknum):
                    PeakArea,
                    'd_center_peak_' + str(peaknum):
                    center,
                    'd_amp_peak_' + str(peaknum):
                    amp,
                    'd_fract_peak_' + str(peaknum):
                    fract,
                    'd_sigma_peak_' + str(peaknum):
                    sigma,
                    'd_height_peak_' + str(peaknum):
                    height,
                    'd_fwhm_peak_' + str(peaknum):
                    fwhm,
                    'd_rawheight_peak_' + str(peaknum):
                    raw_peakheight
                })
        else:
            new_dict_discharge = None
        if new_dict_discharge is not None:
            new_dict_df_d = pd.DataFrame(columns=new_dict_discharge.keys())
            for key1, val1 in new_dict_discharge.items():
                new_dict_df_d.at[0, key1] = new_dict_discharge[key1]
            discharge_descript = pd.concat([discharge_descript, new_dict_df_d],
                                           sort=True)
            discharge_descript = discharge_descript.reset_index(drop=True)
            discharge_descript2 = dfsortpeakvals(discharge_descript, 'd')
        else:
            discharge_descript2 = None
            # append the two dfs (charge and discharge) before putting them in
            # database
        full_df_descript = pd.concat([charge_descript2, discharge_descript2],
                                     sort=True,
                                     axis=1)
        update_database_newtable(full_df_descript,
                                 mod_params_name[:-9] + '-descriptors',
                                 database)
    return
Ejemplo n.º 14
0
def update_figure2(filename, peak_thresh, n_clicks, show_gauss, desc_to_plot,
                   cd_to_plot, peaknum_to_plot):
    """ This is  a function to evaluate the model on a sample plot before updating the database"""
    if filename is None:
        filename = 'ExampleData'
        database_sel = init_db
    else:
        database_sel = database
    data, raw_data = pop_with_db(filename, database_sel)
    datatype = data['datatype'].iloc[0]
    (cycle_ind_col, data_point_col, volt_col, curr_col, dis_cap_col,
     char_cap_col, charge_or_discharge) = col_variables(datatype)
    selected_step = round(data[cycle_ind_col].max() / 2) + 1
    # select a cycle in the middle of the set
    dff_data = data[data[cycle_ind_col] == selected_step]
    if len(data[cycle_ind_col].unique()) > 1:
        lenmax = max([
            len(data[data[cycle_ind_col] == cyc])
            for cyc in data[cycle_ind_col].unique() if cyc != 1
        ])
    else:
        lenmax = len(data)
    dff_raw = raw_data[raw_data[cycle_ind_col] == selected_step]
    peak_vals_df = get_file_from_database(
        filename.split('.')[0] + '-descriptors', database_sel)

    fig = plotly.subplots.make_subplots(
        rows=1,
        cols=2,
        subplot_titles=('Descriptors',
                        'Example Data for Model Tuning (Cycle ' +
                        str(int(selected_step)) + ')'),
        shared_xaxes=True)
    marker = {'color': ['#0074D9']}
    if peak_vals_df is not None:
        if n_clicks is not None:
            # if the user has hit the update-model-button - remake model
            new_df_mody, model_c_vals, model_d_vals, peak_heights_c, peak_heights_d = get_model_dfs(
                dff_data, datatype, selected_step, lenmax, peak_thresh)
            dff_mod = new_df_mody
            c_sigma = model_c_vals['base_sigma']
            c_center = model_c_vals['base_center']
            c_amplitude = model_c_vals['base_amplitude']
            c_fwhm = model_c_vals['base_fwhm']
            c_height = model_c_vals['base_height']

            d_sigma = model_d_vals['base_sigma']
            d_center = model_d_vals['base_center']
            d_amplitude = model_d_vals['base_amplitude']
            d_fwhm = model_d_vals['base_fwhm']
            d_height = model_d_vals['base_height']
        else:
            # if user hasn't pushed the button, populate with original model
            # from database
            modset_name = filename.split('.')[0] + '-ModPoints'
            df_model = get_file_from_database(modset_name, database_sel)
            dff_mod = df_model[df_model[cycle_ind_col] == selected_step]

            filtpeakvals = peak_vals_df[peak_vals_df['c_cycle_number'] ==
                                        selected_step]
            filtpeakvals = filtpeakvals.reset_index(drop=True)
            # grab values for the underlying gaussian in the charge:
            try:
                c_sigma = filtpeakvals['c_gauss_sigma'].iloc[0]
                c_center = filtpeakvals['c_gauss_center'].iloc[0]
                c_amplitude = filtpeakvals['c_gauss_amplitude'].iloc[0]
                c_fwhm = filtpeakvals['c_gauss_fwhm'].iloc[0]
                c_height = filtpeakvals['c_gauss_height'].iloc[0]
            except BaseException:
                # there may not be a model
                pass
            # grab values for the underlying discharge gaussian:
            try:
                d_sigma = filtpeakvals['d_gauss_sigma'].iloc[0]
                d_center = filtpeakvals['d_gauss_center'].iloc[0]
                d_amplitude = filtpeakvals['d_gauss_amplitude'].iloc[0]
                d_fwhm = filtpeakvals['d_gauss_fwhm'].iloc[0]
                d_height = filtpeakvals['d_gauss_height'].iloc[0]
            except BaseException:
                pass

        fig.append_trace(
            {
                'x': dff_data[volt_col],
                'y': dff_data['Smoothed_dQ/dV'],
                'type': 'scatter',
                'marker': marker,
                'name': 'Smoothed Data'
            }, 1, 2)
        if len(peaknum_to_plot) > 0:
            for value in peaknum_to_plot:
                try:
                    fig.append_trace(
                        {
                            'x':
                            peak_vals_df['c_cycle_number'],
                            'y':
                            peak_vals_df[str(''.join(desc_to_plot)) +
                                         str(''.join(cd_to_plot)) + value],
                            'type':
                            'scatter',
                            'marker':
                            marker,
                            'name':
                            value
                        }, 1, 1)
                except KeyError as e:
                    None
        fig.append_trace(
            {
                'x': dff_mod[volt_col],
                'y': dff_mod['Model'],
                'type': 'scatter',
                'name': 'Model of One Cycle'
            }, 1, 2)
        # add if checkbox is selected to show polynomial baseline
        if 'show' in show_gauss:
            try:
                fig.append_trace(
                    {
                        'x':
                        dff_mod[volt_col],
                        'y': ((c_amplitude /
                               (c_sigma * ((2 * 3.14159)**0.5))) * np.exp(
                                   (-(dff_mod[volt_col] - c_center)**2) /
                                   (2 * c_sigma**2))),
                        'type':
                        'scatter',
                        'name':
                        'Charge Gaussian Baseline'  # plot the poly
                    },
                    1,
                    2)
            except BaseException:
                pass
            # add the plot of the discharge guassian:
            try:
                fig.append_trace(
                    {
                        'x':
                        dff_mod[volt_col],
                        'y':
                        -((d_amplitude /
                           (d_sigma * ((2 * 3.14159)**0.5))) * np.exp(
                               (-(dff_mod[volt_col] - d_center)**2) /
                               (2 * d_sigma**2))),
                        'type':
                        'scatter',
                        'name':
                        'Discharge Gaussian Baseline'  # plot the poly
                    },
                    1,
                    2)
            except BaseException:
                pass

    fig['layout']['showlegend'] = True
    fig['layout']['xaxis1'].update(title='Cycle Number')
    fig['layout']['xaxis2'].update(title='Voltage (V)')
    fig['layout']['yaxis1'].update(title='Descriptor Value')
    fig['layout']['yaxis2'].update(title='dQ/dV',
                                   range=[
                                       dff_data['Smoothed_dQ/dV'].min(),
                                       dff_data['Smoothed_dQ/dV'].max()
                                   ])
    fig['layout']['height'] = 600
    fig['layout']['margin'] = {'l': 40, 'r': 10, 't': 60, 'b': 200}
    return fig
Ejemplo n.º 15
0
def update_figure1(selected_step, filename, showmodel):
    fig = plotly.subplots.make_subplots(rows=1,
                                        cols=2,
                                        subplot_titles=('Raw Cycle',
                                                        'Smoothed Cycle'),
                                        shared_xaxes=True)
    marker = {'color': ['#0074D9']}
    if filename is None or filename == 'options':
        filename = 'ExampleData'
        database_sel = init_db
    else:
        database_sel = database
    data, raw_data = pop_with_db(filename, database_sel)
    datatype = data['datatype'].iloc[0]
    (cycle_ind_col, data_point_col, volt_col, curr_col, dis_cap_col,
     char_cap_col, charge_or_discharge) = col_variables(datatype)
    modset_name = filename.split('.')[0] + '-ModPoints'
    df_model = get_file_from_database(modset_name, database_sel)
    if df_model is not None:
        filt_mod = df_model[df_model[cycle_ind_col] == selected_step]

    if data is not None:
        filtered_data = data[data[cycle_ind_col] == selected_step]
    if raw_data is not None:
        raw_filtered_data = raw_data[raw_data[cycle_ind_col] == selected_step]

    for i in filtered_data[cycle_ind_col].unique():
        if data is not None:
            dff = filtered_data[filtered_data[cycle_ind_col] == i]
        if raw_data is not None:
            dff_raw = raw_filtered_data[raw_filtered_data[cycle_ind_col] == i]
        if df_model is not None:
            dff_mod = filt_mod[filt_mod[cycle_ind_col] == i]

        if data is not None:
            fig.append_trace(
                {
                    'x': dff[volt_col],
                    'y': dff['Smoothed_dQ/dV'],
                    'type': 'scatter',
                    'marker': marker,
                    'name': 'Smoothed Data'
                }, 1, 2)
        if raw_data is not None:
            fig.append_trace(
                {
                    'x': dff_raw[volt_col],
                    'y': dff_raw['dQ/dV'],
                    'type': 'scatter',
                    'marker': marker,
                    'name': 'Raw Data'
                }, 1, 1)
        if df_model is not None and showmodel == 'showmodel':
            fig.append_trace(
                {
                    'x': dff_mod[volt_col],
                    'y': dff_mod['Model'],
                    'type': 'scatter',
                    'name': 'Model'
                }, 1, 2)

        fig['layout']['showlegend'] = False
        fig['layout']['xaxis1'].update(title='Voltage (V)')
        fig['layout']['xaxis2'].update(title='Voltage (V)')
        fig['layout']['yaxis1'].update(title='dQ/dV')
        fig['layout']['yaxis2'].update(title='dQ/dV')
        fig['layout']['height'] = 600
        fig['layout']['margin'] = {'l': 40, 'r': 10, 't': 60, 'b': 200}
    return fig