Ejemplo n.º 1
0
def get_avalanche_index(from_date, to_date, region_ids=None, observer_ids=None):
    '''

    :param from_date:
    :param to_date:
    :param region_ids:
    :param observer_ids:
    :return:
    '''

    # get all data
    avalanche_activities = go.get_avalanche_activity(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids)
    avalanche_activities_2 = go.get_avalanche_activity_2(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids)
    avalanches = go.get_avalanche(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids)
    danger_signs = go.get_danger_sign(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids)

    # get index definition
    index_definition = rf.read_configuration_file('{0}aval_dl_order_of_size_and_num.csv'.format(env.input_folder), AvalancheIndex)

    avalanche_indexes = []

    for aa in avalanche_activities:

        ai = AvalancheIndex()
        ai.set_num_and_size_and_index(aa.EstimatedNumName, aa.DestructiveSizeName, index_definition)
        ai.set_date_region_observation(aa.DtAvalancheTime, aa.ForecastRegionName, aa)
        avalanche_indexes.append(ai)

    for aa in avalanche_activities_2:

        ai = AvalancheIndex()
        ai.set_num_and_size_and_index(aa.EstimatedNumName, aa.DestructiveSizeName, index_definition)
        # Activity date is the avarage of DtStart and DtEnd
        activity_date = aa.DtStart+(aa.DtEnd-aa.DtStart)/2
        ai.set_date_region_observation(activity_date.date(), aa.ForecastRegionName, aa)
        avalanche_indexes.append(ai)

    for aa in avalanches:

        ai = AvalancheIndex()
        # Make sure size is not None
        ai.set_num_and_size_and_index("Ett (1)", aa.DestructiveSizeName, index_definition)
        ai.set_date_region_observation(aa.DtAvalancheTime, aa.ForecastRegionName, aa)
        avalanche_indexes.append(ai)

    for ds in danger_signs:

        ai = AvalancheIndex()
        if 'Ferske skred' in ds.DangerSignName:
            ai.set_avalanches_as_dangersign()
        elif 'Ingen faretegn observert' in ds.DangerSignName:
            ai.set_no_avalanche_activity()
        else:
            continue
        ai.set_date_region_observation(ds.DtObsTime, ds.ForecastRegionName, ds)
        avalanche_indexes.append(ai)

    return avalanche_indexes
Ejemplo n.º 2
0
def pickle_M3(data_set, config_file_name, pickle_m3_file_name):
    """Makes a list of elements matching the m3 matrix. Uses a configuration file as for the matrix elements and
     runs through all warnings adding occurances and danger level used at each combination of the matrix.

    :param data_set:
    :param pickle_m3_file_name:
    :return:
    """

    config_file_name = '{0}{1}'.format(env.input_folder, config_file_name)
    m3_elements = rf.read_configuration_file(config_file_name, M3Element)

    # read out the data_set and add to M3Elements
    for i in range(0, len(data_set['level']['values']), 1):

        size = data_set['size']['values'][i]
        if size is None:
            size = '0 - Ikke gitt'
            print 'runmatrix.py -> picke_M3 -> Warning: Encountered occurrence where avalanche size is None. Set to 0 - Ikke gitt.'
        trigger = data_set['trigger']['values'][i]
        probability = data_set['probability']['values'][i]
        distribution = data_set['distribution']['values'][i]

        for e in m3_elements:

            m3_size = e.avalanche_size
            m3_trigger = e.trigger
            m3_probability = e.probability
            m3_distribution = e.distribution

            if size in m3_size and trigger in m3_trigger and probability in m3_probability and distribution in m3_distribution:
                level = data_set['level']['values'][i]
                e.add_danger_level(level)

    # count all levels added (for debug only) for control and make some stats
    count = 0
    for e in m3_elements:
        count += len(e.danger_level_list)
        e.set_level_average()
        e.set_level_standard_dev()

    mp.pickle_anything(m3_elements, pickle_m3_file_name)
Ejemplo n.º 3
0
def step_2_find_most_valued(date_region):

    # most valued obs could change name to observation with highest index

    index = rf.read_configuration_file('{0}aval_dl_order_of_size_and_num.csv'.format(env.input_folder), IndexOfSizeAndNumber)
    for d in date_region:

        for aa in d.avalanche_activity:
            max_index = 0
            for i in index:
                if i.estimated_num in aa.EstimatedNumName and i.destructive_size in aa.DestructiveSizeName:
                    max_index = i.index
            if d.highest_value < max_index:
                d.highest_value = max_index
                d.most_valued_observation = aa

        for a in d.avalanche:
            max_index = 0
            for i in index:
                if i.estimated_num in 'Ett (1)' and i.destructive_size in a.DestructiveSizeName:
                    max_index = i.index
            if d.highest_value < max_index:
                d.highest_value = max_index
                d.most_valued_observation = a

        for ds in d.danger_sign:
            if 'Ferske skred' in ds.DangerSignName:
                if d.highest_value < 2:
                    d.highest_value = 2
                    d.most_valued_observation = ds
            if 'Ingen faretegn observert' in ds.DangerSignName:
                if d.highest_value < 1:
                    d.highest_value = 1
                    d.most_valued_observation = ds

    return date_region
Ejemplo n.º 4
0
    from_date = dt.date(2014, 11, 30)
    to_date = dt.date(2015, 6, 1)
    #to_date = dt.date.today()

    ### get and make the data set
    # date_region, forecasted_dangers = step_1_make_data_set(region_id, from_date, to_date)
    # mp.pickle_anything([date_region, forecasted_dangers], '{0}runforavalancheactivity_step_1.pickle'.format(env.local_storage))
    #
    # ## Find the observaton of highest value pr region pr date
    # date_region, forecasted_dangers = mp.unpickle_anything('{0}runforavalancheactivity_step_1.pickle'.format(env.local_storage))
    # date_region = step_2_find_most_valued(date_region)
    # mp.pickle_anything([date_region, forecasted_dangers], '{0}runforavalancheactivity_step_2.pickle'.format(env.local_storage))
    # #
    ## ready to add to count elements
    date_region, forecasted_dangers = mp.unpickle_anything('{0}runforavalancheactivity_step_2.pickle'.format(env.local_storage))
    elements = rf.read_configuration_file('{0}aval_dl_configuration.csv'.format(env.input_folder), ActivityAndDanger)
    elements = step_3_count_occurances(date_region, elements)
    mp.pickle_anything([date_region, forecasted_dangers, elements], '{0}runforavalancheactivity_step_3.pickle'.format(env.local_storage))

    ### ready to plot?
    date_region, forecasted_dangers, elements = mp.unpickle_anything('{0}runforavalancheactivity_step_3.pickle'.format(env.local_storage))
    step_4_plot(date_region, forecasted_dangers, elements, '{0}Avalanches and dangers {1} to {2}'.format(env.plot_folder, from_date, to_date))

    total_a = 0
    total_aa = 0
    total_ds = 0
    for d in date_region:
        total_a += len(d.avalanche)
        total_aa += len(d.avalanche_activity)
        total_ds += len(d.danger_sign)