Exemplo n.º 1
0
def fetch_and_validate_observation(access_token, observation_id, start, end,
                                   only_missing=False, base_url=None):
    """Task that will run immediately after Observation values are
    uploaded to the API to validate the data. If over a day of data is
    present, daily validation will be applied.

    For the last day of a multiday series that only has a partial day's
    worth of data, if `only_missing` is False, the data is evaluated as
    one series and daily validation is applied. If `only_missing` is True,
    any discontinuous periods of data with less than one day of data will
    only have immediate validation applied. If the period is longer than
    a day, the full daily validation is applied.

    Parameters
    ----------
    access_token : str
        Token to access the API
    observation_id : str
        ID of the observation to fetch values and validate
    start : datetime-like
        Start time to limit observation fetch
    end : datetime-like
        End time to limit observation fetch
    only_missing : boolean, default False
        If True, only periods that have not had daily validation applied
        are fetched and validated. Otherwise all data between start and end
        is validated.
    base_url : str, default None
        URL for the API to fetch and post data
    """
    session = APISession(access_token, base_url=base_url)
    observation = session.get_observation(observation_id)
    _split_validation(session, observation, start, end, only_missing)
Exemplo n.º 2
0
def fetch_and_validate_all_observations(access_token, start, end,
                                        only_missing=True, base_url=None):
    """
    Run the observation validation for all observations that the user
    has access to in their organization. See further discussion in
    :py:func:`solarforecastarbiter.validation.tasks.fetch_and_validate_all_observations`

    Parameters
    ----------
    access_token : str
        Token to access the API
    start : datetime-like
        Start time to limit observation fetch
    end : datetime-like
        End time to limit observation fetch
    only_missing : boolean, default True
        If True, only periods that have not had daily validation applied
        are fetched and validated. Otherwise all data between start and end
        is validated.
    base_url : str, default None
        URL for the API to fetch and post data

    """
    session = APISession(access_token, base_url=base_url)
    user_info = session.get_user_info()
    observations = [obs for obs in session.list_observations()
                    if obs.provider == user_info['organization']]
    for observation in observations:
        _split_validation(session, observation, start, end, only_missing)
Exemplo n.º 3
0
def daily_observation_validation(access_token, start, end, base_url=None):
    """
    Run the daily observation validation for all observations that the user
    has access to.
    """
    session = APISession(access_token, base_url=base_url)
    observations = session.list_observations()
    for observation in observations:
        try:
            _daily_validation(session, observation, start, end, base_url)
        except IndexError:
            logger.warning(('Skipping daily validation of %s '
                            'not enough values'), observation.name)
            continue
Exemplo n.º 4
0
def daily_single_observation_validation(access_token,
                                        observation_id,
                                        start,
                                        end,
                                        base_url=None):
    """
    Task that expects a longer, likely daily timeseries of Observation values
    that will be validated.
    """
    session = APISession(access_token, base_url=base_url)
    observation = session.get_observation(observation_id)
    try:
        out = _daily_validation(session, observation, start, end, base_url)
    except IndexError:
        logger.warning('Daily validation for %s failed: not enough values',
                       observation.name)
    else:
        return out
Exemplo n.º 5
0
def immediate_observation_validation(access_token,
                                     observation_id,
                                     start,
                                     end,
                                     base_url=None):
    """
    Task that will run immediately after Observation values are uploaded to the
    API to validate the data.
    """
    session = APISession(access_token, base_url=base_url)
    observation = session.get_observation(observation_id)
    observation_values = session.get_observation_values(
        observation_id, start, end)
    value_series = observation_values['value']
    quality_flags = observation_values['quality_flag'].copy()

    validation_func = IMMEDIATE_VALIDATION_FUNCS.get(observation.variable,
                                                     validate_timestamp)
    validation_flags = validation_func(observation, value_series)

    for flag in validation_flags:
        quality_flags |= flag

    quality_flags.name = 'quality_flag'
    observation_values.update(quality_flags)
    session.post_observation_values(observation_id,
                                    observation_values,
                                    params='donotvalidate')
Exemplo n.º 6
0
def compute_report(access_token, report_id, base_url=None):
    """
    Create a raw report using data from API. Typically called as a task.
    Failures will attempt to post a message for the failure in an
    empty RawReport to the API.

    Parameters
    ----------
    session : :py:class:`solarforecastarbiter.api.APISession`
        API session for getting and posting data
    report_id : str
        ID of the report to fetch from the API and generate the raw
        report for

    Returns
    -------
    raw_report : :py:class:`solarforecastarbiter.datamodel.RawReport`
    """
    session = APISession(access_token, base_url=base_url)
    fail_wrapper = capture_report_failure(report_id, session)
    report = fail_wrapper(session.get_report, err_msg=(
        'Failed to retrieve report. Perhaps the report does not exist, '
        'the user does not have permission, or the connection failed.')
    )(report_id)
    data = fail_wrapper(get_data_for_report, err_msg=(
        'Failed to retrieve data for report which may indicate a lack '
        'of permissions or that an object does not exist.')
    )(session, report)
    raw_report = fail_wrapper(create_raw_report_from_data, err_msg=(
        'Unhandled exception when computing report.')
    )(report, data)
    fail_wrapper(session.post_raw_report, err_msg=(
        'Computation of report completed, but failed to upload result to '
        'the API.')
    )(report.report_id, raw_report)
    return raw_report
Exemplo n.º 7
0
def report(verbose, user, password, base_url, report_file, output_file, format,
           serialization_roundtrip, orca_server_url):
    """
    Make a report. See API documentation's POST reports section for
    REPORT_FILE requirements.
    """
    set_log_level(verbose)
    token = cli_access_token(user, password)
    with open(report_file) as f:
        metadata = json.load(f)
    session = APISession(token, base_url=base_url)
    report = session.process_report_dict(metadata)
    if orca_server_url is not None:
        import plotly.io as pio
        pio.orca.config.server_url = orca_server_url
    if serialization_roundtrip:
        with mock_raw_report_endpoints(base_url):
            session.create_report(report)
            reports.compute_report(token, 'no_id', base_url)
            full_report = session.get_report('no_id')
    else:
        data = reports.get_data_for_report(session, report)
        raw_report = reports.create_raw_report_from_data(report, data)
        full_report = report.replace(raw_report=raw_report, status='complete')
    # assumed dashboard url based on api url
    dash_url = base_url.replace('api', 'dashboard')
    if ((format == 'detect' and output_file.endswith('.html'))
            or format == 'html'):
        html_report = template.render_html(full_report,
                                           dash_url,
                                           with_timeseries=True,
                                           body_only=False)
        with open(output_file, 'w') as f:
            f.write(html_report)
    elif ((format == 'detect' and output_file.endswith('.pdf'))
          or format == 'pdf'):
        pdf_report = template.render_pdf(full_report, dash_url)
        with open(output_file, 'wb') as f:
            f.write(pdf_report)
    else:
        raise ValueError("Unable to detect format")
def get_apisession(token, base_url=None):
    return APISession(token, base_url=base_url)