Beispiel #1
0
def mastercal_plot(cpd, low_coord, mid_coord, high_coord, curve, middle_y_offset, date):
    """
    This function creates a plot for each master calibration event that displays a line between the low and high
    standards as well as a table with statistics gathered in create_curve. It then saves the figure for display on
    the daily website.
    """
    import seaborn as sns
    import matplotlib.pyplot as plt
    import pandas as pd
    from summit_core import picarro_dir, TempDir

    # create dataframes required for plotting
    calData = pd.DataFrame(columns=['x', 'y'])
    calData['x'] = [low_coord[0], mid_coord[0], high_coord[0]]
    calData['y'] = [low_coord[1], mid_coord[1], high_coord[1]]

    calData1 = calData.drop(calData.index[1], axis=0)                                       # remove mid points for line

    sns.set()  # seaborn plot setup
    f, ax = plt.subplots(nrows=1)  # setup subplot
    sns.despine(f)  # remove right/top axes

    # plot the regression lines & statistics table
    sns.regplot(x='x', y='y', data=calData1, ax=ax,
                line_kws={'label': ' Intercept: {:1.5f}\n Slope: {:1.5f}\n Mid Offset: {:1.5f}\n'.format(
                          curve.intercept, curve.m, middle_y_offset)})

    # plot the three points
    sns.scatterplot(x='x', y='y', data=calData, ax=ax, s=70)

    # plot details
    ax.set_title(f'{cpd} Master Calibration Event')                                        # title
    ax.set_ylabel('Standard', fontsize=14)                                                 # ylabel
    ax.set_xlabel('Calibration Event', fontsize=14)                                        # xlabel
    ax.get_lines()[0].set_color('purple')                                                  # line color
    ax.legend()                                                                            # legend
    ax.set(xlim=((calData['x'].iloc[0] - 10), (calData['x'].iloc[-1] + 10)))
    ax.set(ylim=((calData['y'].iloc[0] - 10), (calData['y'].iloc[-1] + 10)))

    # Save the figure by the low cal date
    plotdir = picarro_dir / 'plots'
    with TempDir(plotdir):
        f.savefig(f'{cpd}_masterCal_{date}.png', format='png')
        f.close()
Beispiel #2
0
def get_last_processor_date(processor, logger):
    """
    Retrieves the latest high-level date for the specified processor. It looks at GcRuns for VOCs (complete runs),
    5-second Datums for the Picarro, and matched GcRuns for methane.
    :param processor: str, in ['voc', 'picarro', 'methane']
    :param logger: logging logger
    :return: datetime, date of last data point for the specified processor
    """

    from summit_core import connect_to_db, TempDir

    if processor is 'voc':
        from summit_core import voc_dir as directory
        from summit_voc import GcRun as DataType
    elif processor is 'picarro':
        from summit_core import picarro_dir as directory
        from summit_picarro import Datum as DataType
    elif processor is 'methane':
        from summit_core import methane_dir as directory
        from summit_methane import GcRun as DataType
    else:
        logger.error('Invalid processor supplied to get_last_processor_date()')
        assert False, 'Invalid processor supplied to get_last_processor_date()'

    with TempDir(directory):
        engine, session = connect_to_db(f'sqlite:///summit_{processor}.sqlite',
                                        directory)
        val = session.query(DataType.date).order_by(
            DataType.date.desc()).first()

        if val:
            val = val[0]

    session.close()
    engine.dispose()

    return val
Beispiel #3
0
async def plot_dailies(logger):
    """
    Loads dailies for the last 3 weeks and plots with ticks for every three days and minor ticks for every day.
    Plots are registered with the core database so they're uploaded to the Taylor drive.

    :param logger: logger, to log events to
    :return: Boolean, True if it ran without error and created data, False if not
    """

    try:
        from pathlib import Path
        import datetime as dt
        from summit_core import connect_to_db, core_dir, TempDir, Config, Plot, add_or_ignore_plot, create_daily_ticks
        plotdir = core_dir / 'plots/daily'
        remotedir = r'/data/web/htdocs/instaar/groups/arl/summit/protected/plots'

        try:
            os.chdir(plotdir)
        except FileNotFoundError:
            os.mkdir(plotdir)

    except ImportError as e:
        logger.error(f'ImportError occurred in plot_dailies()')
        send_processor_email(PROC, exception=e)
        return False

    try:
        engine, session = connect_to_db('sqlite:///summit_daily.sqlite',
                                        core_dir)
        Base.metadata.create_all(engine)
    except Exception as e:
        logger.error(
            f'Error {e.args} prevented connecting to the database in plot_dailies()'
        )
        send_processor_email(PROC, exception=e)
        return False

    try:
        core_engine, core_session = connect_to_db(
            'sqlite:///summit_core.sqlite', core_dir)
        Plot.__table__.create(core_engine, checkfirst=True)
        Config.__table__.create(core_engine, checkfirst=True)

        daily_config = core_session.query(Config).filter(
            Config.processor == PROC).one_or_none()

        if not daily_config:
            daily_config = Config(
                processor=PROC, days_to_plot=21
            )  # use all default values except processor on init
            core_session.add(daily_config)
            core_session.commit()

    except Exception as e:
        logger.error(
            f'Error {e.args} prevented connecting to the core database in plot_new_data()'
        )
        send_processor_email(PROC, exception=e)
        return False

    try:
        logger.info('Running plot_dailies()')

        date_ago = datetime.now() - dt.timedelta(
            days=daily_config.days_to_plot +
            1)  # set a static for retrieving data at beginning of plot cycle

        date_limits, major_ticks, minor_ticks = create_daily_ticks(
            daily_config.days_to_plot, minors_per_day=1)

        major_ticks = [t for ind, t in enumerate(major_ticks)
                       if ind % 3 == 0]  # use every third daily tick

        dailies = session.query(Daily).filter(Daily.date >= date_ago).order_by(
            Daily.date).all()

        dailydict = {}
        for param in daily_parameters:
            dailydict[param] = [getattr(d, param) for d in dailies]

        with TempDir(plotdir):  ## PLOT i-butane, n-butane, acetylene

            name = summit_daily_plot(dailydict.get('date'), ({
                'Ads Xfer A': [None, dailydict.get('ads_xfer_a')],
                'Ads Xfer B': [None, dailydict.get('ads_xfer_b')],
                'Valves Temp': [None, dailydict.get('valves_temp')],
                'GC Xfer Temp': [None, dailydict.get('gc_xfer_temp')],
                'Catalyst': [None, dailydict.get('catalyst')]
            }),
                                     limits={
                                         'right':
                                         date_limits.get('right', None),
                                         'left': date_limits.get('left', None),
                                         'bottom': 0,
                                         'top': 475
                                     },
                                     major_ticks=major_ticks,
                                     minor_ticks=minor_ticks)

            hot_plot = Plot(plotdir / name, remotedir, True)
            add_or_ignore_plot(hot_plot, core_session)

            name = summit_daily_plot(dailydict.get('date'), ({
                'CJ1 Temp': [None, dailydict.get('cj1')],
                'CJ2 Temp': [None, dailydict.get('cj2')],
                'Standard Temp': [None, dailydict.get('std_temp')]
            }),
                                     limits={
                                         'right':
                                         date_limits.get('right', None),
                                         'left': date_limits.get('left', None),
                                         'bottom': 10,
                                         'top': 50
                                     },
                                     major_ticks=major_ticks,
                                     minor_ticks=minor_ticks)

            room_plot = Plot(plotdir / name, remotedir, True)
            add_or_ignore_plot(room_plot, core_session)

            name = summit_daily_plot(dailydict.get('date'), ({
                'H2 Gen Pressure': [None, dailydict.get('h2_gen_p')],
                'Line Pressure': [None, dailydict.get('line_p')],
                'Zero Pressure': [None, dailydict.get('zero_p')],
                'FID Pressure': [None, dailydict.get('fid_p')]
            }),
                                     limits={
                                         'right':
                                         date_limits.get('right', None),
                                         'left': date_limits.get('left', None),
                                         'bottom': 0,
                                         'top': 75
                                     },
                                     y_label_str='Pressure (PSI)',
                                     major_ticks=major_ticks,
                                     minor_ticks=minor_ticks)

            pressure_plot = Plot(plotdir / name, remotedir, True)
            add_or_ignore_plot(pressure_plot, core_session)

            name = summit_daily_plot(dailydict.get('date'), ({
                'Inlet Short Temp': [None, dailydict.get('inlet_short')]
            }),
                                     limits={
                                         'right':
                                         date_limits.get('right', None),
                                         'left': date_limits.get('left', None),
                                         'bottom': 0,
                                         'top': 60
                                     },
                                     major_ticks=major_ticks,
                                     minor_ticks=minor_ticks)

            inlet_plot = Plot(plotdir / name, remotedir, True)
            add_or_ignore_plot(inlet_plot, core_session)

            name = summit_daily_plot(dailydict.get('date'), ({
                'Battery V': [None, dailydict.get('battv')],
                '12Va': [None, dailydict.get('v12a')],
                '15Va': [None, dailydict.get('v15a')],
                '15Vb': [None, dailydict.get('v15b')],
                '24V': [None, dailydict.get('v24')],
                '5Va': [None, dailydict.get('v5a')]
            }),
                                     limits={
                                         'right':
                                         date_limits.get('right', None),
                                         'left': date_limits.get('left', None),
                                         'bottom': 0,
                                         'top': 30
                                     },
                                     y_label_str='Voltage (v)',
                                     major_ticks=major_ticks,
                                     minor_ticks=minor_ticks)

            voltage_plot = Plot(plotdir / name, remotedir, True)
            add_or_ignore_plot(voltage_plot, core_session)

            name = summit_daily_plot(dailydict.get('date'), ({
                'MFC1': [None, dailydict.get('mfc1')],
                'MFC2': [None, dailydict.get('mfc2')],
                'MFC3a': [None, dailydict.get('mfc3a')],
                'MFC3b': [None, dailydict.get('mfc3b')],
                'MFC4': [None, dailydict.get('mfc4')],
                'MFC5': [None, dailydict.get('mfc5')]
            }),
                                     limits={
                                         'right':
                                         date_limits.get('right', None),
                                         'left': date_limits.get('left', None),
                                         'bottom': -1,
                                         'top': 3.5
                                     },
                                     y_label_str='Flow (Ml/min)',
                                     major_ticks=major_ticks,
                                     minor_ticks=minor_ticks)

            flow_plot = Plot(plotdir / name, remotedir, True)
            add_or_ignore_plot(flow_plot, core_session)

        core_session.commit()
        core_session.close()
        core_engine.dispose()

        session.close()
        engine.dispose()
        return True

    except Exception as e:
        logger.error(f'Exception {e.args} occurred in plot_dailies()')
        send_processor_email(PROC, exception=e)
        session.close()
        engine.dispose()
        return False
Beispiel #4
0
async def plot_new_data(logger):
    """
    Checks data against the last plotting time, and creates new plots for CO, CO2, and CH4 if new data exists.

    :param logger: logging logger at module level
    :return: boolean, did it run/process new data?
    """

    logger.info('Running plot_new_data()')

    try:
        from pathlib import Path
        from summit_core import picarro_dir as rundir
        from summit_core import create_daily_ticks, connect_to_db, TempDir, Plot, core_dir, Config, add_or_ignore_plot
        from summit_picarro import Base, Datum, summit_picarro_plot

        plotdir = rundir / 'plots'
        remotedir = r'/data/web/htdocs/instaar/groups/arl/summit/plots'

    except Exception as e:
        logger.error('ImportError occurred in plot_new_data()')
        send_processor_email(PROC, exception=e)
        return False

    try:
        engine, session = connect_to_db('sqlite:///summit_picarro.sqlite',
                                        rundir)
        Base.metadata.create_all(engine)
    except Exception as e:
        logger.error(f'Exception {e.args} occurred in plot_new_data()')
        send_processor_email(PROC, exception=e)
        return False

    try:
        core_engine, core_session = connect_to_db(
            'sqlite:///summit_core.sqlite', core_dir)
        Plot.__table__.create(core_engine, checkfirst=True)
        Config.__table__.create(core_engine, checkfirst=True)

        picarro_config = core_session.query(Config).filter(
            Config.processor == PROC).one_or_none()

        if not picarro_config:
            picarro_config = Config(
                processor=PROC
            )  # use all default values except processor on init
            core_session.add(picarro_config)
            core_session.commit()

    except Exception as e:
        logger.error(
            f'Error {e.args} prevented connecting to the core database in plot_new_data()'
        )
        send_processor_email(PROC, exception=e)
        return False

    try:
        newest_data_point = (session.query(Datum.date).filter(
            Datum.mpv_position == 1).order_by(Datum.date.desc()).first()[0])

        if newest_data_point <= picarro_config.last_data_date:
            logger.info('No new data was found to plot.')
            core_session.close()
            core_engine.dispose()
            session.close()
            engine.dispose()
            return False

        picarro_config.last_data_date = newest_data_point
        core_session.add(picarro_config)

        date_limits, major_ticks, minor_ticks = create_daily_ticks(
            picarro_config.days_to_plot)

        all_data = (
            session.query(Datum.date, Datum.co, Datum.co2, Datum.ch4).filter((
                Datum.mpv_position == 0) | (Datum.mpv_position == 1)).filter(
                    (Datum.instrument_status == 963),
                    (Datum.alarm_status == 0)).filter(
                        Datum.date >= date_limits['left']
                    )  # grab only data that falls in plotting period
            .all())

        if not all_data:
            logger.info('No new data was found to plot.')
            core_session.close()
            core_engine.dispose()
            session.close()
            engine.dispose()
            return False

        # get only ambient data
        dates = []
        co = []
        co2 = []
        ch4 = []
        for result in all_data:
            dates.append(result.date)
            co.append(result.co)
            co2.append(result.co2)
            ch4.append(result.ch4)

        with TempDir(plotdir):

            from summit_core import five_minute_medians
            dates_co, co = five_minute_medians(dates, co)

            name = summit_picarro_plot(None, ({
                'Summit CO': [dates_co, co]
            }),
                                       limits={
                                           'right':
                                           date_limits.get('right', None),
                                           'left':
                                           date_limits.get('left', None),
                                           'bottom': 60,
                                           'top': 180
                                       },
                                       major_ticks=major_ticks,
                                       minor_ticks=minor_ticks)

            co_plot = Plot(plotdir / name, remotedir,
                           True)  # stage plots to be uploaded
            add_or_ignore_plot(co_plot, core_session)

            name = summit_picarro_plot(None, ({
                'Summit CO2': [dates, co2]
            }),
                                       limits={
                                           'right':
                                           date_limits.get('right', None),
                                           'left':
                                           date_limits.get('left', None),
                                           'bottom': 400,
                                           'top': 420
                                       },
                                       major_ticks=major_ticks,
                                       minor_ticks=minor_ticks,
                                       unit_string='ppmv')

            co2_plot = Plot(plotdir / name, remotedir,
                            True)  # stage plots to be uploaded
            add_or_ignore_plot(co2_plot, core_session)

            name = summit_picarro_plot(None, ({
                'Summit Methane [Picarro]': [dates, ch4]
            }),
                                       limits={
                                           'right':
                                           date_limits.get('right', None),
                                           'left':
                                           date_limits.get('left', None),
                                           'bottom': 1850,
                                           'top': 2050
                                       },
                                       major_ticks=major_ticks,
                                       minor_ticks=minor_ticks)

            ch4_plot = Plot(plotdir / name, remotedir,
                            True)  # stage plots to be uploaded
            add_or_ignore_plot(ch4_plot, core_session)

        logger.info('New data plots were created.')

        session.close()
        engine.dispose()

        core_session.commit()
        core_session.close()
        core_engine.dispose()
        return True
    except Exception as e:
        logger.error(f'Exception {e.args} occurred in plot_new_data()')
        send_processor_email(PROC, exception=e)

        session.close()
        engine.dispose()

        core_session.close()
        core_engine.dispose()
        return False
async def dual_plot_methane(logger):
    """
    Connects to both the methane [gc] and picarro databases to create an overlayed plot of both data.

    :param logger: logger, to log events to
    :return: Boolean, True if it ran without error and created data, False if not
    """

    PROC = 'Methane DualPlotter'

    try:
        from pathlib import Path
        from summit_core import core_dir, Config
        from summit_core import methane_dir
        from summit_core import picarro_dir
        from summit_core import connect_to_db, create_daily_ticks, TempDir, Plot, add_or_ignore_plot
        from summit_picarro import Datum
        from summit_methane import Base, GcRun, summit_methane_plot

        from summit_picarro import Base as PicarroBase

        remotedir = r'/data/web/htdocs/instaar/groups/arl/summit/plots'

    except ImportError as e:
        logger.error('ImportError occurred in dual_plot_methane()')
        send_processor_email(PROC, exception=e)
        return False

    try:
        gc_engine, gc_session = connect_to_db(
            'sqlite:///summit_methane.sqlite', methane_dir)
        Base.metadata.create_all(gc_engine)

        picarro_engine, picarro_session = connect_to_db(
            'sqlite:///summit_picarro.sqlite', picarro_dir)
        PicarroBase.metadata.create_all(picarro_engine)
    except Exception as e:
        logger.error(
            f'Exception {e.args} prevented connection to the database in dual_plot_methane()'
        )
        send_processor_email(PROC, exception=e)
        return False

    try:
        core_engine, core_session = connect_to_db(
            'sqlite:///summit_core.sqlite', core_dir)
        Plot.__table__.create(core_engine, checkfirst=True)
        Config.__table__.create(core_engine, checkfirst=True)

        twoplot_config = core_session.query(Config).filter(
            Config.processor == PROC).one_or_none()

        if not twoplot_config:
            twoplot_config = Config(
                processor=PROC
            )  # use all default values except processor on init
            core_session.add(twoplot_config)
            core_session.commit()

    except Exception as e:
        logger.error(
            f'Error {e.args} prevented connecting to the core database in plot_new_data()'
        )
        send_processor_email(PROC, exception=e)
        return False

    try:
        logger.info('Running dual_plot_methane()')

        newest_picarro_data_point = (picarro_session.query(Datum.date).filter(
            Datum.mpv_position == 1).order_by(Datum.date.desc()).first()[0])
        try:
            newest_gc_data_point = (gc_session.query(GcRun.date).filter(
                GcRun.median != None).filter(GcRun.standard_rsd < .02).filter(
                    GcRun.rsd < .02).order_by(GcRun.date.desc()).first()[0])
        except TypeError:
            logger.error(
                'NoneType not subscriptable encountered due to lack of methane data to query.'
            )
            from summit_errors import send_processor_warning
            send_processor_warning(
                PROC, 'Dual Plotter',
                '''The Methane Dual Plotter could not query any GcRuns for methane data.\n
                                   Check the database to make sure there are in fact GcRuns with medians and valid rsds.
                                   \nThis often happens when the methane database is remade without re-setting 
                                   the filesize and pa_startlie in the config table of Core database, 
                                   thus no peaks are found.''')
            return False

        newest_data_point = max(newest_picarro_data_point,
                                newest_gc_data_point)

        if newest_data_point <= twoplot_config.last_data_date:
            logger.info('No new data was found to plot.')
            core_session.close()
            core_engine.dispose()
            picarro_session.close()
            picarro_engine.dispose()
            return False

        date_limits, major_ticks, minor_ticks = create_daily_ticks(
            twoplot_config.days_to_plot)

        if newest_data_point > twoplot_config.last_data_date:

            runs_with_medians = (gc_session.query(GcRun).filter(
                GcRun.median != None).filter(GcRun.standard_rsd < .02).filter(
                    GcRun.rsd < .02).order_by(GcRun.date).all())

            gc_dates = [run.date for run in runs_with_medians]
            gc_ch4 = [run.median for run in runs_with_medians]

            picarro_data = (picarro_session.query(
                Datum.date, Datum.ch4).filter((Datum.mpv_position == 0) | (
                    Datum.mpv_position == 1)).filter(
                        (Datum.instrument_status == 963),
                        (Datum.alarm_status == 0)).filter(
                            Datum.date >= date_limits['left']).all()
                            )  # grab only data that falls in plotting period

            picarro_dates = [p.date for p in picarro_data]
            picarro_ch4 = [p.ch4 for p in picarro_data]

            with TempDir(methane_dir / 'plots'):
                name = summit_methane_plot(
                    None, {
                        'Summit Methane [Picarro]':
                        [picarro_dates, picarro_ch4],
                        'Summit Methane [GC]': [gc_dates, gc_ch4]
                    },
                    title='Summit Methane [Picarro & GC]',
                    limits={
                        'bottom': 1850,
                        'top': 2050,
                        'right': date_limits.get('right', None),
                        'left': date_limits.get('left', None)
                    },
                    major_ticks=major_ticks,
                    minor_ticks=minor_ticks)

                methane_plot = Plot(methane_dir / 'plots' / name, remotedir,
                                    True)  # stage plots to be uploaded
                add_or_ignore_plot(methane_plot, core_session)

                twoplot_config.last_data_date = newest_data_point
                core_session.merge(twoplot_config)

            logger.info('New data plots created.')
        else:
            logger.info('No new data found to be plotted.')

        gc_session.close()
        gc_engine.dispose()

        picarro_session.close()
        picarro_engine.dispose()

        core_session.commit()

        core_session.close()
        core_engine.dispose()
        return True

    except Exception as e:
        logger.error(f'Exception {e.args} occurred in dual_plot_methane()')
        send_processor_email(PROC, exception=e)

        core_session.close()
        core_engine.dispose()

        gc_session.close()
        gc_engine.dispose()

        picarro_session.close()
        picarro_engine.dispose()
        return False
async def plot_new_data(logger):
    """
    If newer data exists, plot it going back one week from the day of the plotting.

    :param logger: logger, to log events to
    :return: Boolean, True if it ran without error and created data, False if not
    """

    try:
        from pathlib import Path
        from summit_core import core_dir, Config
        from summit_core import methane_dir as rundir
        from summit_core import connect_to_db, create_daily_ticks, TempDir, Plot, add_or_ignore_plot
        from summit_methane import Sample, GcRun, Base, plottable_sample, summit_methane_plot

        remotedir = r'/data/web/htdocs/instaar/groups/arl/summit/plots'

    except ImportError as e:
        logger.error('ImportError occurred in plot_new_data()')
        send_processor_email(PROC, exception=e)
        return False

    try:
        engine, session = connect_to_db('sqlite:///summit_methane.sqlite',
                                        rundir)
        Base.metadata.create_all(engine)
    except Exception as e:
        logger.error(
            f'Exception {e.args} prevented connection to the database in plot_new_data()'
        )
        send_processor_email(PROC, exception=e)
        return False

    try:
        core_engine, core_session = connect_to_db(
            'sqlite:///summit_core.sqlite', core_dir)
        Plot.__table__.create(core_engine, checkfirst=True)
        Config.__table__.create(core_engine, checkfirst=True)

        ch4_config = core_session.query(Config).filter(
            Config.processor == PROC).one_or_none()

        if not ch4_config:
            ch4_config = Config(
                processor=PROC
            )  # use all default values except processor on init
            core_session.add(ch4_config)
            core_session.commit()

    except Exception as e:
        logger.error(
            f'Error {e.args} prevented connecting to the core database in plot_new_data()'
        )
        send_processor_email(PROC, exception=e)
        return False

    try:
        logger.info('Running plot_new_data()')

        engine, session = connect_to_db('sqlite:///summit_methane.sqlite',
                                        rundir)

        runs_with_medians = (session.query(GcRun).filter(
            GcRun.median != None).filter(GcRun.standard_rsd < .02).filter(
                GcRun.rsd < .02).order_by(GcRun.date).all())

        last_ambient_date = runs_with_medians[-1].date
        # get date after filtering, ie don't plot if there's no new data getting plotted

        date_limits, major_ticks, minor_ticks = create_daily_ticks(
            ch4_config.days_to_plot)

        if last_ambient_date > ch4_config.last_data_date:

            ambient_dates = [run.date for run in runs_with_medians]
            ambient_mrs = [run.median for run in runs_with_medians]

            with TempDir(rundir / 'plots'):
                name = summit_methane_plot(
                    None,
                    {'Summit Methane [GC]': [ambient_dates, ambient_mrs]},
                    limits={
                        'bottom': 1850,
                        'top': 2050,
                        'right': date_limits.get('right', None),
                        'left': date_limits.get('left', None)
                    },
                    major_ticks=major_ticks,
                    minor_ticks=minor_ticks)

                methane_plot = Plot(rundir / 'plots' / name, remotedir,
                                    True)  # stage plots to be uploaded
                add_or_ignore_plot(methane_plot, core_session)

                ch4_config.last_data_date = last_ambient_date
                core_session.merge(ch4_config)

            logger.info('New data plots created.')
        else:
            logger.info('No new data found to be plotted.')

        session.close()
        engine.dispose()

        core_session.commit()
        core_session.close()
        core_engine.dispose()
        return True

    except Exception as e:
        logger.error(f'Exception {e.args} occurred in plot_new_data()')
        send_processor_email(PROC, exception=e)
        core_session.close()
        core_engine.dispose()
        session.close()
        engine.dispose()
        return False