Example #1
0
def pickle_warnings(regions, date_from, date_to, pickle_file_name):
    """All warnings and problems are selected from regObs or the avalanche api and neatly pickel'd for later use.
    This method also gets all warnings in english for the english main message.

    :param regions:             [int or list of ints] RegionID as given in the forecast api
    :param date_from:           [date or string as yyyy-mm-dd]
    :param date_to:             [date or string as yyyy-mm-dd]
    :param pickle_file_name:    filename including directory as string

    :return:
    """

    warnings = []

    # get all warning and problems for this region and then loop though them joining them on date
    for r in regions:
        warnings_no = gfa.get_avalanche_warnings(r, date_from, date_to)
        warnings_en = gfa.get_avalanche_warnings(r, date_from, date_to, lang_key=2)

        # loop trough all the norwegian forecasts
        for i in range(0, len(warnings_no), 1):

            # add english main message with same dates
            for k in range(0, len(warnings_en), 1):

                if warnings_no[i].date == warnings_en[k].date:
                    warnings_no[i].set_main_message_en(warnings_en[k].main_message_en)
                    continue

        warnings = warnings + warnings_no

    mp.pickle_anything(warnings, pickle_file_name)
def get_2016_17_warnings(how_to_get_data='Get new and dont pickle', pickle_file_name=None):
    '''

    :param hot_to_get_data:     'Get new and dont pickle', 'Get new and save pickle' or 'Load pickle'
    :param file_name:           Not needed if no pickles involved
    :return:
    '''

    if 'Get new' in how_to_get_data:

        from_date = dt.date(2016, 11, 30)
        #to_date = dt.date.today()
        to_date = dt.date(2017, 5, 31)

        #region_ids = [3012, 3013]
        region_ids = gm.get_forecast_regions(year='2016-17')

        all_warnings = []
        for region_id in region_ids:
            all_warnings += gd.get_forecasted_dangers(region_id, from_date, to_date, include_ikke_vurdert=False)

        # Sort by date
        all_warnings = sorted(all_warnings, key=lambda AvalancheDanger: AvalancheDanger.date)

        if 'and save pickle' in how_to_get_data:
            mp.pickle_anything(all_warnings, pickle_file_name)

    elif 'Load pickle' in how_to_get_data:
        all_warnings = mp.unpickle_anything(pickle_warnings_file_name)

    else:
        all_warnings = 'No valid data retrival method given in get_2015_16_warnings.'

    return all_warnings
Example #3
0
def pickle_warnings(regions, date_from, date_to, pickle_file_name):
    '''All forecasted warnings and problems are selected from regObs or the avalanche api.
    Dangers and problems are connected and neatly pickel'd for later use.

    :param regions:            list [int] RegionID as given in regObs [101-199]
    :param date_from:          string as 'yyyy-mm-dd'
    :param date_to:            string as 'yyyy-mm-dd'
    :param pickle_file_name:   filename including directory as string
    :return:
    '''

    warnings = []

    for r in regions:

        # get all warning and problems for this region and then loop though them joining them where dates match.
        region_warnings = gfa.get_avalanche_warnings(r, date_from, date_to)
        #name = gro.get_forecast_region_name(r)
        '''
        problems = gro.get_problems_from_AvalancheWarnProblemV(r, date_from, date_to)

        print('matrix.py -> pickle_warnings: {0} problems found for {1}'.format(len(problems), name))

        for i in range(0, len(region_warnings), 1):
            for j in range(0, len(problems), 1):
                if region_warnings[i].date == problems[j].date:
                    region_warnings[i].add_problem(problems[j])
        '''
        warnings += region_warnings
    '''
    # make sure all problems are ordered from lowest id (main problem) to largest.
    for w in warnings:
        w.avalanche_problems = sorted(w.avalanche_problems, key=lambda AvalancheProblem: AvalancheProblem.order)
    '''
    mp.pickle_anything(warnings, pickle_file_name)
Example #4
0
def pickle_M3(data_set, config_file_name, pickle_m3_file_name):
    """Makes a list of elements matching the m3 matrix. Uses a configuration file as for the matrix elements and
     runs through all warnings adding occurances and danger level used at each combination of the matrix.

    :param data_set:
    :param pickle_m3_file_name:
    :return:
    """

    config_file_name = '{0}{1}'.format(env.input_folder, config_file_name)
    m3_elements = rf.read_configuration_file(config_file_name, M3Element)

    # read out the data_set and add to M3Elements
    for i in range(0, len(data_set['level']['values']), 1):

        size = data_set['size']['values'][i]
        if size is None:
            size = '0 - Ikke gitt'
            print(
                'matrix.py -> picke_M3 -> Warning: Encountered occurrence where avalanche size is None. Set to 0 - Ikke gitt.'
            )
        trigger = data_set['trigger']['values'][i]
        probability = data_set['probability']['values'][i]
        distribution = data_set['distribution']['values'][i]

        for e in m3_elements:

            m3_size = e.avalanche_size
            m3_trigger = e.trigger
            m3_probability = e.probability
            m3_distribution = e.distribution

            if size.strip() in m3_size and trigger.strip(
            ) in m3_trigger and probability.strip(
            ) in m3_probability and distribution.strip() in m3_distribution:
                level = data_set['level']['values'][i]
                e.add_danger_level(level)

    # count all levels added (for debug only) for control and make some stats
    count = 0
    for e in m3_elements:
        count += len(e.danger_level_list)
        e.set_level_average()
        e.set_level_standard_dev()

    mp.pickle_anything(m3_elements, pickle_m3_file_name)
Example #5
0
            aval_types, m.main_message_no, m.main_message_en)

        l.write(s)
    l.close()


if __name__ == "__main__":

    year = '2017-18'
    regions = gm.get_forecast_regions(year=year)
    date_from, date_to = gm.get_dates_from_season(year=year)

    # file names
    file_name_for_warnings_pickle = '{0}{1}'.format(
        env.local_storage, 'runmainmessage warnings.pickle')
    file_name_for_main_messages_pickle = '{0}{1}'.format(
        env.local_storage, 'runmainmessage main messages.pickle')
    file_name_for_main_messages_csv = '{0}{1}'.format(
        env.output_folder, 'Alle hovedbudskap {}.csv'.format(year))

    ##### pickle the warnings and dataset with main messages
    # pickle_warnings(regions, date_from, date_to, file_name_for_warnings_pickle)
    main_messages = select_messages_with_more(file_name_for_warnings_pickle)
    mp.pickle_anything(main_messages, file_name_for_main_messages_pickle)
    main_messages = mp.unpickle_anything(file_name_for_main_messages_pickle)

    # write to file
    save_main_messages_to_file(main_messages, file_name_for_main_messages_csv)

    pass
Example #6
0
def pickle_data_set(warnings, file_name, use_ikke_gitt=False):
    '''Data preperation continued. Takes the warnings which is a list of class AvalancheDanger objects and makes a dictionary
    data set of if. The value indexes relate to each other. I.e. distribution, level, probability etc.
    at the nth index originate from the same problem.

    The data set also includes information on what the xKDV tables in regObs contains and preferred colors when
    plotting.

    :param warnings:        list of AvalancheDanger objects
    :param file_name:       full path and filename to pickle the data to
    :param use_ikke_gitt:   If I dont whant to use the ID = 0 (Ikke gitt) values they can be omitted all in all.

    :return:
    '''

    level_list = []
    size_list = []
    trigger_list = []
    probability_list = []
    distribution_list = []

    for w in warnings:
        if w.danger_level > 0 and len(w.avalanche_problems) > 0:
            # The first problem in avalanche_problems is used. This is the main problem.
            level_list.append(w.danger_level)
            try:
                size_list.append(w.avalanche_problems[0].aval_size)
            except:
                size_list.append('Ikke gitt')
            trigger_list.append(w.avalanche_problems[0].aval_trigger)
            probability_list.append(w.avalanche_problems[0].aval_probability)
            distribution_list.append(w.avalanche_problems[0].aval_distribution)

        # Test is lengths match and give warning if not.
        control = (len(level_list) + len(size_list) + len(trigger_list) +
                   len(probability_list) + len(distribution_list)) / 5
        if not control == len(level_list):
            print(
                "runForMatrix -> pickle_data_set: list-lenghts dont match. Error in data."
            )

    level_keys = [v for v in gkdv.get_kdv('AvalancheDangerKDV').keys()]
    size_keys = [v.Name for v in gkdv.get_kdv('DestructiveSizeKDV').values()]
    triggers_keys = [
        v.Name for v in gkdv.get_kdv('AvalTriggerSimpleKDV').values()
    ]
    probability_keys = [
        v.Name for v in gkdv.get_kdv('AvalProbabilityKDV').values()
    ]
    distribution_keys = [
        v.Name for v in gkdv.get_kdv('AvalPropagationKDV').values()
    ]

    level_colors = ['0.5', '#ccff66', '#ffff00', '#ff9900', '#ff0000', 'k']

    if use_ikke_gitt == False:
        level_keys.pop(0)
        size_keys.pop(0)
        triggers_keys.pop(0)
        probability_keys.pop(0)
        distribution_keys.pop(0)

        level_colors.pop(0)

    data_set = {
        'level': {
            'values': level_list,
            'keys': level_keys,
            'colors': level_colors
        },
        'size': {
            'values': size_list,
            'keys': size_keys,
            'colors': ['0.7']
        },
        'trigger': {
            'values': trigger_list,
            'keys': triggers_keys,
            'colors': ['0.7']
        },
        'probability': {
            'values': probability_list,
            'keys': probability_keys,
            'colors': ['0.7']
        },
        'distribution': {
            'values': distribution_list,
            'keys': distribution_keys,
            'colors': ['0.7']
        }
    }

    mp.pickle_anything(data_set, file_name)
def make_forecaster_data(warnings, save_for_web=False):
    '''Make the forecaster dictionary with all the neccesary data.
    method also makes the dict needed fore the menu on the pythonanywhre website.

    :param warnings:
    :return:
    '''

    # get nicknames and ids to all regObs users. Get {id:nick} to all forecasters.
    observer_nicks = gm.get_observer_v()

    # Make dataset with dict {nick: Forecaster}. Add warnings to Forecaster object.
    # Note: A list of all forecaster names is all the keys in this dictionary
    forecaster_dict = {}
    for w in warnings:
        if w.nick not in forecaster_dict:
            forecaster_dict[w.nick] = Forecaster(w.nick)
            forecaster_dict[w.nick].add_warning(w)
        else:
            forecaster_dict[w.nick].add_warning(w)

    # need this below for forecasterstatisitics
    nowcast_lengths_all = []
    forecast_lengths_all = []
    danger_levels_all = []
    problems_pr_warning_all = []
    for w in warnings:
        nowcast_lengths_all.append(len(w.avalanche_nowcast))
        forecast_lengths_all.append(len(w.avalanche_forecast))
        danger_levels_all.append(w.danger_level)
        problems_pr_warning_all.append(len(w.avalanche_problems))

    # Add more data for forecaster objects in the dict
    for n, f in forecaster_dict.items():

        # add # warnings made
        forecaster_dict[f.nick].add_warnings_count(len(f.warnings))
        for o_i, o_n in observer_nicks.items():
            if o_n == f.nick:
                forecaster_dict[f.nick].add_observer_id(o_i)

        # find how many pr date
        dates = {}
        for w in f.warnings:
            if w.date not in dates:
                dates[w.date] = 1
            else:
                dates[w.date] += 1
        forecaster_dict[f.nick].add_dates(dates)

        # Add lists of dangerlevels, nowcastlengths, forecast lengths and problems.
        # for this forecaster and all and avarages.
        nowcast_lengths = []
        forecast_lengths = []
        danger_levels = []
        problems_pr_warning = []

        for w in f.warnings:
            nowcast_lengths.append(len(w.avalanche_nowcast))
            forecast_lengths.append(len(w.avalanche_forecast))
            danger_levels.append(w.danger_level)
            problems_pr_warning.append(len(w.avalanche_problems))

        forecaster_dict[f.nick].add_nowcast_lengths(nowcast_lengths, nowcast_lengths_all)
        forecaster_dict[f.nick].add_forecast_lengths(forecast_lengths, forecast_lengths_all)
        forecaster_dict[f.nick].add_danger_levels(danger_levels, danger_levels_all)
        forecaster_dict[f.nick].add_problems_pr_warning(problems_pr_warning, problems_pr_warning_all)

    # Save dict of forecasters for the website menu. Find where nick match forecasters.
    if save_for_web:
        forecaster_nicknid_dict = {-1:'_OVERSIKT ALLE_'}
        for o_i,o_n in observer_nicks.items():
            for f_n, f_F in forecaster_dict.items():
                if o_n == f_n:
                    forecaster_nicknid_dict[o_i] = o_n
        mp.pickle_anything(forecaster_nicknid_dict, '{0}forecasterlist.pickle'.format(env.web_root_folder))

    return forecaster_dict