Beispiel #1
0
def pickle_warnings(regions, date_from, date_to, pickle_file_name):
    """All warnings and problems are selected from regObs or the avalanche api and neatly pickel'd for later use.
    This method also gets all warnings in english for the english main message.

    :param regions:             [int or list of ints] RegionID as given in the forecast api
    :param date_from:           [date or string as yyyy-mm-dd]
    :param date_to:             [date or string as yyyy-mm-dd]
    :param pickle_file_name:    filename including directory as string

    :return:
    """

    warnings = []

    # get all warning and problems for this region and then loop though them joining them on date
    for r in regions:
        warnings_no = gfa.get_avalanche_warnings(r, date_from, date_to)
        warnings_en = gfa.get_avalanche_warnings(r, date_from, date_to, lang_key=2)

        # loop trough all the norwegian forecasts
        for i in range(0, len(warnings_no), 1):

            # add english main message with same dates
            for k in range(0, len(warnings_en), 1):

                if warnings_no[i].date == warnings_en[k].date:
                    warnings_no[i].set_main_message_en(warnings_en[k].main_message_en)
                    continue

        warnings = warnings + warnings_no

    mp.pickle_anything(warnings, pickle_file_name)
def pickle_warnings(regions, date_from, date_to, pickle_file_name):
    '''All forecasted warnings and problems are selected from regObs or the avalanche api.
    Dangers and problems are connected and neatly pickel'd for later use.

    :param regions:            list [int] RegionID as given in regObs [101-199]
    :param date_from:          string as 'yyyy-mm-dd'
    :param date_to:            string as 'yyyy-mm-dd'
    :param pickle_file_name:   filename including directory as string
    :return:
    '''

    warnings = []

    for r in regions:

        # get all warning and problems for this region and then loop though them joining them where dates match.
        region_warnings = gfa.get_avalanche_warnings(r, date_from, date_to)
        #name = gro.get_forecast_region_name(r)
        '''
        problems = gro.get_problems_from_AvalancheWarnProblemV(r, date_from, date_to)

        print('matrix.py -> pickle_warnings: {0} problems found for {1}'.format(len(problems), name))

        for i in range(0, len(region_warnings), 1):
            for j in range(0, len(problems), 1):
                if region_warnings[i].date == problems[j].date:
                    region_warnings[i].add_problem(problems[j])
        '''
        warnings += region_warnings
    '''
    # make sure all problems are ordered from lowest id (main problem) to largest.
    for w in warnings:
        w.avalanche_problems = sorted(w.avalanche_problems, key=lambda AvalancheProblem: AvalancheProblem.order)
    '''
    mp.pickle_anything(warnings, pickle_file_name)
def test_get_avalanche_warnings():
    region_ids = [3003]
    from_date = dt.date(2018, 12, 3)
    to_date = dt.date(2018, 12, 7)
    aw = gf.get_avalanche_warnings(region_ids,
                                   from_date,
                                   to_date,
                                   lang_key=1,
                                   as_dict=False)
    aw_dict = gf.get_avalanche_warnings(region_ids,
                                        from_date,
                                        to_date,
                                        lang_key=1,
                                        as_dict=True)

    df = pandas.DataFrame(aw_dict)
    df.to_csv('../localstorage/test_aw_dict.csv', index_label='index')

    k = 'm'
def get_svalbard_regional_forecasts():
    region_ids = [3001, 3002, 3003, 3004]
    from_date = dt.date(2016, 12, 1)
    to_date = dt.date(2019, 4, 30)
    aw_dict = gf.get_avalanche_warnings(region_ids,
                                        from_date,
                                        to_date,
                                        lang_key=1,
                                        as_dict=True)
    df = pandas.DataFrame(aw_dict)
    df.to_csv('../localstorage/svalbard_forecasts.csv', index_label='index')
def get_svalbard_regional_forecasts_2015():
    region_ids = [130]
    from_date = dt.date(2014, 12, 1)
    to_date = dt.date(2015, 5, 31)
    aw_dict = gf.get_avalanche_warnings(region_ids,
                                        from_date,
                                        to_date,
                                        lang_key=1,
                                        as_dict=True)
    df = pandas.DataFrame(aw_dict)
    df.to_csv('../localstorage/svalbard_forecasts_2015.csv',
              index_label='index')
Beispiel #6
0
def test_AvalancheDanger_to_dict():
    region_ids = [3022]  # Trollheimen

    from_date = dt.date(2018, 12, 1)
    to_date = dt.date(2018, 12, 5)

    warnings_ = gf.get_avalanche_warnings(region_ids,
                                          from_date,
                                          to_date,
                                          lang_key=1)

    _d = warnings_[0].to_dict()

    k = 'm'
Beispiel #7
0
def _get_raw_varsom(year, date, days, max_file_age=23):
    if date:
        season = gm.get_season_from_date(date)
        regions = gm.get_forecast_regions(year=season, get_b_regions=True)
        aw = []
        from_date = date - dt.timedelta(days=days + 1)
        to_date = date
        single_warning = gf.get_avalanche_warnings(regions, from_date, to_date)
        for sw in single_warning:
            if sw.danger_level > 0:
                aw.append(sw)
    else:
        aw = gvp.get_all_forecasts(year=year, max_file_age=max_file_age)
    return aw
def get_season_18_19():
    region_ids = [
        3003, 3007, 3009, 3010, 3011, 3012, 3013, 3014, 3015, 3016, 3017, 3022,
        3023, 3024, 3027, 3028, 3029, 3031, 3032, 3034, 3035
    ]
    from_date = dt.date(2018, 12, 1)
    to_date = dt.date(2019, 3, 11)
    aw_dict = gf.get_avalanche_warnings(region_ids,
                                        from_date,
                                        to_date,
                                        lang_key=1,
                                        as_dict=True)
    df = pandas.DataFrame(aw_dict)
    df.to_csv('../localstorage/norwegian_avalanche_warnings_season_18_19.csv',
              index_label='index')
Beispiel #9
0
def test_AvalancheDanger_as_df():
    """
    Put class data into a pandas.DataFrame
    :return:
    """
    region_ids = [3022]  # Trollheimen

    from_date = dt.date(2018, 12, 1)
    to_date = dt.date(2018, 12, 6)

    warnings_ = gf.get_avalanche_warnings(region_ids,
                                          from_date,
                                          to_date,
                                          lang_key=1,
                                          as_dict=True)

    df = pandas.DataFrame.from_dict(warnings_)
    df.to_csv(r'../localstorage/aval_danger.csv', header=True)

    k = 'm'
Beispiel #10
0
def test_MountainWeather_class():
    """
    Requires "forecast_api_version" : "v4.0.1" in /config/api.json
    """
    region_ids = [3022]  # Trollheimen

    from_date = dt.date(2018, 12, 1)
    to_date = dt.date(2018, 12, 4)

    warnings_as_json = gf.get_avalanche_warnings_as_json(region_ids,
                                                         from_date,
                                                         to_date,
                                                         lang_key=1)
    warnings_ = gf.get_avalanche_warnings(region_ids,
                                          from_date,
                                          to_date,
                                          lang_key=1)

    w = warnings_as_json[0]
    mw = gf.MountainWeather()
    mw.from_dict(w['MountainWeather'])

    k = 'm'
Beispiel #11
0
def get_forecasted_dangers(region_ids, from_date, to_date, include_ikke_vurdert=False, lang_key=1):
    """Gets forecasted dangers for multiple regions.

    :param region_id:               [int] only one region. ID as given in regObs
    :param from_date:               [date or string as yyyy-mm-dd] gets dates [from, to>
    :param to_date:                 [date or string as yyyy-mm-dd] gets dates [from, to>
    :param include_ikke_vurdert:    [bool] if true, it includes forecasts where danger_level = 0

    :return:
    """

    # get all warning and problems for this region and then loop though them joining them where dates match.
    region_warnings = gfa.get_avalanche_warnings(region_ids, from_date, to_date, lang_key=lang_key)

    if not include_ikke_vurdert:
        all_non_zero_warnings = []

        for w in region_warnings:
            if w.danger_level != 0:
                all_non_zero_warnings.append(w)

        region_warnings = all_non_zero_warnings

    return region_warnings
Beispiel #12
0
def _get_all_snow(get_new=False):

    file_name = '{}observations and forecasts 2012-17.pickle'.format(
        env.local_storage)

    if get_new:
        all_observations = go.get_all_registrations('2012-12-01',
                                                    '2017-07-01',
                                                    geohazard_tids=10)

        years = ['2012-13', '2013-14', '2014-15', '2015-16', '2016-17']
        all_forecasts = []
        for y in years:
            from_date, to_date = gm.get_forecast_dates(y)
            region_ids = gm.get_forecast_regions(y)
            all_forecasts += gfa.get_avalanche_warnings(
                region_ids, from_date, to_date)

        mp.pickle_anything([all_observations, all_forecasts], file_name)

    else:
        [all_observations, all_forecasts] = mp.unpickle_anything(file_name)

    return all_observations, all_forecasts
# -*- coding: utf-8 -*-
__author__ = 'raek'

from varsomdata import getforecastapi as gfa
from varsomdata import getkdvelements as gkdv
from datetime import date as date

if __name__ == '__main__':
    """Lists how many times different forecasters hva made a forecast for a given reigon."""

    # Get all regions
    # regions = gkdv.get_kdv('ForecastRegionKDV')

    # Get forecasts
    nordenskiold_warnings_201516 = gfa.get_avalanche_warnings(
        130, '2016-01-27', '2016-07-01')
    nordenskiold_warnings_201617 = gfa.get_avalanche_warnings(
        3003, '2016-12-02', date.today())
    nordenskiold_warnings = nordenskiold_warnings_201516 + nordenskiold_warnings_201617

    # Count occurences
    dict_of_forecasters = {}
    for w in nordenskiold_warnings:
        forecaster = w.nick
        if w.danger_level > 0:
            if forecaster in dict_of_forecasters.keys():
                dict_of_forecasters[forecaster] += 1
            else:
                dict_of_forecasters[forecaster] = 1

    a = 1
import sys
sys.path.insert(0, '/home/hans/Desktop/varsomdata')
#import dangerlevelsandproblems as dlp
from varsomdata import getforecastapi as gfa
from varsomscripts import dangerlevelsandproblems as dlp

a = gfa.get_avalanche_warnings(3003, "2020-10-1", "2021-04-13", lang_key=2)
b = dlp._save_danger_and_problem_to_file(a, "my_own_warnings.csv")

print(a[0])
print(type(a[0]))
Beispiel #15
0
def get_all_forecasts(year, lang_key=1, max_file_age=23):
    """Specialized method for getting all forecasts for one season.
    For the current season (at the time of writing, 2018-19), if a request
    has been made the last 23hrs, data is retrieved from a locally stored pickle,
    if not, new request is made to the regObs api. Previous seasons are not
    requested if a pickle is found in local storage.

    :param year:                [string] Eg. season '2017-18'
    :param lang_key             [int] 1 is norwegian, 2 is english
    :param max_file_age:        [int] hrs how old the file is before new is retrieved

    :return valid_forecasts:    [list of AvalancheWarning]
    """

    from_date, to_date = gm.get_forecast_dates(year=year)
    file_name = '{0}all_forecasts_{1}_lk{2}.pickle'.format(
        env.local_storage, year, lang_key)
    file_date_limit = dt.datetime.now() - dt.timedelta(hours=max_file_age)

    # if we are well out of the current season (30 days) its little chance the data set has changed.
    current_season = gm.get_season_from_date(dt.date.today() -
                                             dt.timedelta(30))

    # Get forecast regions used in the current year
    region_ids = gm.get_forecast_regions(year, get_b_regions=True)

    get_new = True

    if os.path.exists(file_name):
        # if file contains a season long gone, dont make new.
        if year == current_season:
            file_age = dt.datetime.fromtimestamp(os.path.getmtime(file_name))
            # If file is newer than the given time limit, dont make new.
            if file_age > file_date_limit:
                # If file size larger than that of an nearly empty file, dont make new.
                if os.path.getsize(file_name) > 100:  # 100 bytes limit
                    get_new = False
        else:
            get_new = False

    if get_new:
        lg.info(
            "getvarsompickles.py -> get_all_forecasts: Get new {0} forecasts and pickle."
            .format(year))

        all_forecasts = gfa.get_avalanche_warnings(region_ids,
                                                   from_date,
                                                   to_date,
                                                   lang_key=lang_key)

        # Valid forecasts have a danger level. The other are empty.
        valid_forecasts = []
        for f in all_forecasts:
            if f.danger_level > 0:
                valid_forecasts.append(f)

        mp.pickle_anything(valid_forecasts, file_name)

    else:
        valid_forecasts = mp.unpickle_anything(file_name)

    return valid_forecasts