def plot_new_data(logger):
    """
    Plots mixing ratio data, creating plot files and queueing the files for upload.

    This will plot data, regardless of if there's any new data since it's not run continously.

    :param logger: logging logger to record to
    :return: bool, True if ran corrected, False if exit on error
    """
    logger.info('Running plot_new_data()')
    try:
        engine, session = connect_to_db(DB_NAME, CORE_DIR)
    except Exception as e:
        logger.error(
            f'Error {e.args} prevented connecting to the database in plot_new_data()'
        )
        return False

    remotedir = BOULDAIR_BASE_PATH + '/MR_plots'

    compounds_to_plot = (session.query(Quantification.name).join(
        Standard, Quantification.standard_id == Standard.id).filter(
            Standard.name == 'quantlist').all())
    compounds_to_plot[:] = [q.name for q in compounds_to_plot]

    date_limits, major_ticks, minor_ticks = create_monthly_ticks(
        6, days_per_minor=7)

    with open(JSON_PUBLIC_DIR / 'zug_plot_info.json', 'r') as file:
        compound_limits = json.loads(file.read())

    for name in compounds_to_plot:
        params = (GcRun.date, Compound.mr)
        filters = (Compound.name == name, GcRun.date >= date_limits['left'],
                   *ambient_filters)

        results = abstract_query(params, filters, GcRun.date)

        dates = [r.date for r in results]
        mrs = [r.mr for r in results]

        p = MixingRatioPlot({name: (dates, mrs)},
                            limits={
                                **date_limits,
                                **compound_limits[name]
                            },
                            major_ticks=major_ticks,
                            minor_ticks=minor_ticks,
                            filepath=MR_PLOT_DIR / f'{name}_plot.png')

        p.plot()

        file_to_upload = FileToUpload(p.filepath, remotedir, staged=True)
        add_or_ignore_plot(file_to_upload, session)

    session.commit()
    session.close()
    engine.dispose()

    return True
def get_final_single_sample_data(compounds):

    final_single_sample_data = {}

    for compound in compounds:
        params = (GcRun.date, Compound.mr)
        filters = (
            Compound.name == compound,
            GcRun.date >= datetime(2018, 3, 1),
            GcRun.date < datetime(2018, 12, 20),
            *ambient_filters  # includes filtering data for filtered = False
        )

        results = abstract_query(params, filters, GcRun.date)

        final_single_sample_data[compound] = ([
            r.date.replace(second=0) for r in results
        ], [r.mr for r in results])

    return final_single_sample_data
예제 #3
0
engine, session = connect_to_db(DB_NAME, CORE_DIR)

standard_to_quantify_with = session.query(Standard).filter(
    Standard.name == 'cc416168').one_or_none()
vocs = session.query(Standard).filter(Standard.name == 'vocs').one_or_none()
vocs = [q.name for q in vocs.quantifications]

samples = {
    "2019_12_24a_02.D", "2019_12_24_Blank2500.D", "2019_12_24_CC464566_02.D",
    "2019_12_24_Trap600.D", "2019_12_24_CC464566_01.D", "2019_12_24_04.D"
}

filters = (GcRun.integration.has(Integration.filename.in_(samples)), )

runs = abstract_query((GcRun, ), filters, GcRun.date)

for run in runs:
    run.blank_subtract(session=session,
                       blank=None,
                       compounds_to_subtract=ALL_COMPOUNDS)
    # force "blank subtraction" with no values; this means all samples are now "as is" when they were integrated

session.commit()

session.close()
engine.dispose()

df = get_df_with_filters(use_mrs=False,
                         filters=filters,
                         compounds=ALL_COMPOUNDS)
def plot_history(logger):
    """
    Plot longterm plots containing data from 2013 onward.

    Queries the database to get all OldData as well as newer data processed by this system and plots them together.

    If OldData exists for a compound, it is combined with newer data and plotted from 2013 to the most recent data. One
    set of plots with a zeroed axis is created to show scale, as well as one with more appropriate bounds for viewing.

    :param logger: logging logger to record to
    :return: bool, True if ran correctly, False if exit on error
    """
    logger.info('Running plot_history()')

    try:
        engine, session = connect_to_db(DB_NAME, CORE_DIR)
    except Exception as e:
        logger.error(
            f'Error {e.args} prevented connecting to the database in plot_history()'
        )
        return False

    remotedir = BOULDAIR_BASE_PATH + '/full_plots'

    compounds_to_plot = (session.query(Quantification.name).join(
        Standard, Quantification.standard_id == Standard.id).filter(
            Standard.name == 'quantlist').all())
    compounds_to_plot[:] = [q.name for q in compounds_to_plot]

    date_limits, major_ticks, minor_ticks = create_monthly_ticks(
        84, days_per_minor=0)

    major_ticks = major_ticks[::6]

    with open(JSON_PUBLIC_DIR / 'zug_long_plot_info.json', 'r') as file:
        compound_limits = json.loads(file.read())

    for name in compounds_to_plot:
        old_results = (session.query(OldData.date, OldData.mr).filter(
            OldData.name == name).order_by(OldData.date).all())

        params = (GcRun.date, Compound.mr)
        filters = (Compound.name == name, GcRun.date >= date_limits['left'],
                   *ambient_filters)

        new_results = abstract_query(params, filters, GcRun.date)

        dates = [o.date for o in old_results] + [n.date for n in new_results]
        mrs = [o.mr for o in old_results] + [n.mr for n in new_results]

        limits = {**date_limits, **compound_limits[name]}

        # Create full plot w/ limits from file.
        fullplot = MixingRatioPlot({name: (dates, mrs)},
                                   limits=limits,
                                   major_ticks=major_ticks,
                                   minor_ticks=minor_ticks,
                                   filepath=FULL_PLOT_DIR / f'{name}_plot.png')

        fullplot.plot()

        file_to_upload = FileToUpload(fullplot.filepath,
                                      remotedir,
                                      staged=True)
        add_or_ignore_plot(file_to_upload, session)

        limits['bottom'] = 0

        # Create full plot w/ 0 limit for the bottom and top limit from file.
        fullplot_zeroed = MixingRatioPlot({name: (dates, mrs)},
                                          limits=limits,
                                          major_ticks=major_ticks,
                                          minor_ticks=minor_ticks,
                                          filepath=FULL_PLOT_DIR /
                                          f'{name}_plot_zeroed.png')

        fullplot_zeroed.plot()

        file_to_upload = FileToUpload(fullplot_zeroed.filepath,
                                      remotedir,
                                      staged=True)
        add_or_ignore_plot(file_to_upload, session)

    session.commit()
    session.close()
    engine.dispose()

    return True
def plot_standard_and_ambient_peak_areas(logger):
    """
    Plots peak area responses for both ambient samples and standard samples.

    Standard peak areas are plotted to show response over time, whereas ambient peak areas are seldom used but still
    appreciated/useful on occasion. Plots are queued to be uploaded the next time a call to upload files is made.

    :param logger: logging logger to record to
    :return: bool, True if ran correctly, False if exit on error
    """
    logger.info('Running plot_standard_and_ambient_peak_areas()')

    try:
        engine, session = connect_to_db(DB_NAME, CORE_DIR)
    except Exception as e:
        logger.error(
            f'Error {e.args} prevented connecting to the database in plot_standard_and_ambient_peak_areas()'
        )
        return False

    date_limits, major_ticks, minor_ticks = create_monthly_ticks(
        18, days_per_minor=7)
    major_ticks[:] = [
        major for num, major in enumerate(major_ticks) if num % 2 == 0
    ]  # utilize only 1/2 of the majors

    remote_pa_dir = BOULDAIR_BASE_PATH + '/PA_plots'
    remote_std_dir = BOULDAIR_BASE_PATH + '/std_PA_plots'

    for compound in ALL_COMPOUNDS:
        # Plot Ambient Peak Areas

        params = (GcRun.date, Compound.pa)
        filters = (*ambient_filters, GcRun.date >= date_limits['left'],
                   Compound.name == compound)

        results = abstract_query(params, filters, GcRun.date)

        dates = [r.date for r in results]
        pas = [r.pa for r in results]

        pa_plot = PeakAreaPlot({compound: [dates, pas]},
                               limits=date_limits,
                               major_ticks=major_ticks,
                               minor_ticks=minor_ticks,
                               filepath=PA_PLOT_DIR /
                               f'{compound}_pa_plot.png')

        pa_plot.plot()
        file_to_upload = FileToUpload(pa_plot.filepath,
                                      remote_pa_dir,
                                      staged=True)
        add_or_ignore_plot(file_to_upload, session)

        filters = (GcRun.date >= date_limits['left'], GcRun.type.in_(
            (1, 2, 3)), Compound.name == compound)

        results = abstract_query(params, filters, GcRun.date)

        dates = [r.date for r in results]
        pas = [r.pa for r in results]

        std_pa_plot = StandardPeakAreaPlot({compound: [dates, pas]},
                                           limits=date_limits,
                                           major_ticks=major_ticks,
                                           minor_ticks=minor_ticks,
                                           filepath=STD_PA_PLOT_DIR /
                                           f'{compound}_plot.png')

        std_pa_plot.plot()
        file_to_upload = FileToUpload(std_pa_plot.filepath,
                                      remote_std_dir,
                                      staged=True)
        add_or_ignore_plot(file_to_upload, session)

    session.commit()
    session.close()
    engine.dispose()
    return True
예제 #6
0
from datetime import datetime
from random import randint

from scratch_plotting import (TimeSeries, TwoAxisTimeSeries, LinearityPlot,
                              MixingRatioPlot, PeakAreaPlot,
                              StandardPeakAreaPlot, LogParameterPlot,
                              TwoAxisLogParameterPlot)

from IO.db.models import Compound, GcRun, LogFile
from plotting import create_daily_ticks
from reporting import abstract_query

ethane = abstract_query([GcRun.date, Compound.mr], [
    Compound.name == 'ethane',
    GcRun.date.between(datetime(2019, 2, 1), datetime(2019, 2, 14)), GcRun.type
    == 5, Compound.filtered == False
])

propane = abstract_query([GcRun.date, Compound.mr], [
    Compound.name == 'propane',
    GcRun.date.between(datetime(2019, 2, 1), datetime(2019, 2, 14)), GcRun.type
    == 5, Compound.filtered == False
])

iButane = abstract_query([GcRun.date, Compound.mr], [
    Compound.name == 'i-butane',
    GcRun.date.between(datetime(2019, 2, 1), datetime(2019, 2, 14)), GcRun.type
    == 5, Compound.filtered == False
])