Esempio n. 1
0
def pickle_data_set(warnings, file_name, use_ikke_gitt=False):
    '''Data preperation continued. Takes the warnings which is a list of class AvalancheDanger objects and makes a dictionary
    data set of if. The value indexes relate to each other. I.e. distribution, level, probability etc.
    at the nth index originate from the same problem.

    The data set also includes information on what the xKDV tables in regObs contains and preferred colors when
    plotting.

    :param warnings:        list of AvalancheDanger objects
    :param file_name:       full path and filename to pickle the data to
    :param use_ikke_gitt:   If I dont whant to use the ID = 0 (Ikke gitt) values they can be omitted all in all.

    :return:
    '''

    level_list = []
    size_list = []
    trigger_list = []
    probability_list = []
    distribution_list = []

    for w in warnings:
        if w.danger_level > 0 and len(w.avalanche_problems) > 0:
            # The first problem in avalanche_problems is used. This is the main problem.
            level_list.append(w.danger_level)
            size_list.append(w.avalanche_problems[0].aval_size)
            trigger_list.append(w.avalanche_problems[0].aval_trigger)
            probability_list.append(w.avalanche_problems[0].aval_probability)
            distribution_list.append(w.avalanche_problems[0].aval_distribution)

        # Test is lengths match and give warning if not.
        control = (len(level_list) + len(size_list) + len(trigger_list) + len(probability_list) + len(distribution_list))/5
        if not control == len(level_list):
            print("runForMatrix -> pickle_data_set: list-lenghts dont match. Error in data.")

    level_keys = gkdv.get_kdv('AvalancheDangerKDV').keys()
    size_keys = [v.Name for v in gkdv.get_kdv('DestructiveSizeKDV').values()]
    triggers_keys = [v.Name for v in gkdv.get_kdv('AvalTriggerSimpleKDV').values()]
    probability_keys = [v.Name for v in gkdv.get_kdv('AvalProbabilityKDV').values()]
    distribution_keys = [v.Name for v in gkdv.get_kdv('AvalPropagationKDV').values()]

    level_colors = ['0.5','#ccff66', '#ffff00', '#ff9900', '#ff0000', 'k']

    if use_ikke_gitt == False:
        level_keys.pop(0)
        size_keys.pop(0)
        triggers_keys.pop(0)
        probability_keys.pop(0)
        distribution_keys.pop(0)

        level_colors.pop(0)

    data_set = {'level': {'values': level_list, 'keys': level_keys, 'colors':level_colors},
                'size': {'values': size_list, 'keys': size_keys, 'colors':['0.7']},
                'trigger': {'values': trigger_list, 'keys': triggers_keys, 'colors':['0.7']},
                'probability': {'values': probability_list, 'keys': probability_keys, 'colors':['0.7']},
                'distribution': {'values': distribution_list, 'keys': distribution_keys, 'colors':['0.7']}}

    mp.pickle_anything(data_set, file_name)
Esempio n. 2
0
def _make_odata_filter(from_date, to_date, region_id, observer_id, geohazard_tid=None):
    """Based on what is requested, this mehod builds the odata filter par of the request url.

    :param from_date:       [date] A query returns [from_date, to_date>
    :param to_date:         [date] A query returns [from_date, to_date>
    :param region_ids:      [int or list of ints] If region_ids = None, all regions are selected
    :param observer_ids:    [int or list of ints] If observer_ids = None, all observers are selected
    :param geohazard_tid:   [int] 10 is snow, 20,30,40 are dirt, 60 is water and 70 is ice

    :return:                [string] filter part of request url
    """

    odata_filter = "DtObsTime gt datetime'{0}' and " \
                   "DtObsTime lt datetime'{1}' and ".format(from_date, to_date)

    if region_id is not None:
        forecast_region_kdv = kdv.get_kdv("ForecastRegionKDV")
        region_name = forecast_region_kdv[region_id].Name
        odata_filter += "ForecastRegionName eq '{0}' and ".format(region_name)

    if observer_id is not None:
        odata_filter += "ObserverId eq {0} and ".format(observer_id)

    if geohazard_tid is not None:
        odata_filter += "GeoHazardTID eq {0} and ".format(geohazard_tid)

    odata_filter += "LangKey eq 1"
    odata_filter = fe.add_norwegian_letters(odata_filter)

    return odata_filter
Esempio n. 3
0
def make_2015_16_plots():
    """Plots both observations pr observer and pr region for display on webpage for the season 2015-16.
    Method includes a request for list of relevant observers.

    :return:
    """

    # get a list of relevant observerst to plot and make plickle in the web-folder
    observer_list = gm.get_observer_dict_for_2015_16_ploting()
    mp.pickle_anything(observer_list, '{0}observerlist.pickle'.format(env.web_root_folder))

    # list of months to be ploted
    months = []
    month = dt.date(2015,11,1)
    while month < dt.date.today():
        months.append(month)
        almost_next = month + dt.timedelta(days=35)
        month = dt.date(almost_next.year, almost_next.month, 1)

    ## Get all regions
    region_ids = []
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 100 < k < 150 and v.IsActive is True:
            region_ids.append(v.ID)

    make_observer_plots(observer_list, months)
    make_region_plots(region_ids, months)

    return
Esempio n. 4
0
def get_active_forecast_regions():
    '''Get all active forecast regions. IsActive = True in ForecastRegionKDV
    '''

    region_ids = []
    ForecastRegionKDV = kdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 100 < k < 150 and v.IsActive is True:
            region_ids.append(v.ID)

    return region_ids
Esempio n. 5
0
def get_forecast_region_name(region_id):
    """
    Method takes in the region id (same as ForecastRegionTID in regObs). It looks up the name in ForecastRegionKDV
    and returns the region name.

    :param region_id:    Region ID is an int as given i ForecastRegionKDV
    :return:             Region Name string is returned
    """

    forecast_region_kdv = gkdv.get_kdv("ForecastRegionKDV")
    forecast_region_kdvelement = forecast_region_kdv[region_id]
    forecast_region_name = fe.remove_norwegian_letters(forecast_region_kdvelement.Name)

    return forecast_region_name
def make_2015_16_plots(run_all=False):
    """Plots both observations pr observer and pr region for display on web page for the season 2015-16.
    Method includes a request for list of relevant observers.

    :return:
    """

    # list of months to be plotted
    all_months = []
    month = dt.date(2015,11,1)
    while month < dt.date.today():
        all_months.append(month)
        almost_next = month + dt.timedelta(days=35)
        month = dt.date(almost_next.year, almost_next.month, 1)

    # if not specified run only the last month
    if not run_all:
        if dt.date.today().day < 5 and len(all_months) > 1:
            last_months = all_months[-2:]
        else:
            last_months = [all_months[-1]]
    else:
        last_months = all_months

    # get a list of relevant observerst to plot and make plickle in the web-folder
    previous_observer_list = mp.unpickle_anything('{0}observerlist.pickle'.format(env.web_root_folder))
    observer_list = gm.get_observer_dict_for_2015_16_ploting()
    mp.pickle_anything(observer_list, '{0}observerlist.pickle'.format(env.web_root_folder))

    # if observer not in previous observer list, run all months else only run last months
    new_observers = {}
    for k,v in observer_list.iteritems():
        if k not in previous_observer_list.keys():
            new_observers[k] = v

    # Get all regions
    region_ids = []
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 100 < k < 150 and v.IsActive is True:
            region_ids.append(v.ID)

    # run the stuff
    make_observer_plots(new_observers, all_months)
    make_observer_plots(previous_observer_list, last_months)
    make_region_plots(region_ids, last_months)

    return
Esempio n. 7
0
def plot_histogram_on_given_problem(problem_combo , warnings, file_name, figure_title='', file_ext=".png"):
    '''
    Plots histogram of occurences pr dangerlevel for a given avalanche problem (combination of distribution,
    probability, size and trigger). This method uses the more genereic plot_histogram function.

    :param problem_combo:   dictionary of {distribution : value, probability : value, size : value, trigger : value}
    :param warnings:        list of all warnings in selected timespan
    :param file_name:       String. filename including path.
    :param figure_title:    String. If not specified it becomes same as filename
    :param file_ext:        String. Default extencion is png if no other extencion is specified.

    :return:
    '''

    if figure_title == '':
        figure_title = file_name

    # loop trough warnings and look up problems matching the problem_combo. Plot the result.
    level_list = []

    for w in warnings:
        if w.danger_level > 0 and len(w.avalanche_problems) > 0:
            p = w.avalanche_problems[0]
            if (p.aval_distribution == problem_combo['distribution'] and
                    p.aval_probability == problem_combo['probability'] and
                    p.aval_size == problem_combo['size'] and
                    p.aval_trigger == problem_combo['trigger']):
                level_list.append(w.danger_level)

    level_keys = gkdv.get_kdv('AvalancheDangerKDV').keys()
    level_colors = ['0.5','#ccff66', '#ffff00', '#ff9900', '#ff0000', 'k']

    fig_text = 'Plottet viser tilfeller pr faregrad av \n' \
               'skredproblemer med egenskaper: \n' \
               '{2}, {3}, {4} og\n' \
               '{5}. \n \n' \
               'Denne kombinasjonen har vaert brukt\n' \
               '{0} ganger i totalt {1} produserte\n' \
               'varsel. Utvalget henter bare fra \n' \
               'hovedskredproblemer.' \
        .format(len(level_list), len(warnings),
                problem_combo['distribution'], problem_combo['probability'], problem_combo['size'] ,problem_combo['trigger'])

    plot_histogram(level_list, level_keys, file_name, data_colors=level_colors, figure_title=figure_title, figure_text=fig_text, file_ext=file_ext)

    return
Esempio n. 8
0
    def set_cause_name(self, cause_name_inn):
        # Map wrong use of IDs in AvalCauseKDV in forecasts 2014-15

        aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')
        cause_name_inn = fe.remove_norwegian_letters(cause_name_inn)

        #### Cleanup in db during summer of 2015. Need this no more.
        # if (cause_name_inn == aval_cause_kdv[26].Name):
        #     self.add_metadata('Original AvalCauseName', cause_name_inn)
        #     cause_name_inn = aval_cause_kdv[20]
        # if (cause_name_inn == aval_cause_kdv[27].Name):
        #     self.add_metadata('Original AvalCauseName', cause_name_inn)
        #     cause_name_inn = aval_cause_kdv[23]
        # if (cause_name_inn == aval_cause_kdv[28].Name):
        #     self.add_metadata('Original AvalCauseName', cause_name_inn)
        #     cause_name_inn = aval_cause_kdv[24]
        # if (cause_name_inn == aval_cause_kdv[29].Name):
        #     self.add_metadata('Original AvalCauseName', cause_name_inn)
        #     cause_name_inn = aval_cause_kdv[25].Name

        self.cause_name = cause_name_inn
def make_2015_16_plots():
    """Makes all plots for all regions and saves to web-app folder

    :return:
    """

    from_date = dt.date(2015, 11, 15)
    to_date = dt.date.today() + dt.timedelta(days=2)

    ## Get all regions
    region_id = []
    ForecastRegionKDV = gkdv.get_kdv("ForecastRegionKDV")
    for k, v in ForecastRegionKDV.iteritems():
        if 100 < k < 150 and v.IsActive is True:
            region_id.append(v.ID)

    # All regions span from 6 (Alta) to 33 (Salten).
    for i in region_id:
        problems, dangers, aval_indexes = get_data(i, from_date, to_date, data_from="request")
        make_plots_for_region(i, problems, dangers, aval_indexes, from_date, to_date)

    return
Esempio n. 10
0
        plb.text(x+20, -200, '**  Ferske skred som faretegn ble observert {0} ganger.'.format(danger_sign))

        plb.text(x+20, -270, '*** {0} ganger er ett snoeskred med hoeysete index. \n'
                             '    {1} som skredaktivitet og {2} med skjema for \n'
                             '    enkeltskred.'.format(est_num_1, est_num_1_aval_act, est_num_1_aval))

    return


if __name__ == "__main__":

    region_id = [112, 117, 116, 128]

    ### Get all regions
    region_id = []
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 99 < k < 150 and v.IsActive is True:
            region_id.append(v.ID)

    from_date = dt.date(2014, 11, 30)
    to_date = dt.date(2015, 6, 1)
    #to_date = dt.date.today()

    ### get and make the data set
    # date_region, forecasted_dangers = step_1_make_data_set(region_id, from_date, to_date)
    # mp.pickle_anything([date_region, forecasted_dangers], '{0}runforavalancheactivity_step_1.pickle'.format(env.local_storage))
    #
    # ## Find the observaton of highest value pr region pr date
    # date_region, forecasted_dangers = mp.unpickle_anything('{0}runforavalancheactivity_step_1.pickle'.format(env.local_storage))
    # date_region = step_2_find_most_valued(date_region)
Esempio n. 11
0
        self.danger_elrapp = danger_elrapp_inn

    def set_index_and_observation(self, index_inn, observation_inn, regid_inn):
        self.avalanche_index = index_inn
        self.avalanche_observation = observation_inn
        self.regid_observation = regid_inn


if __name__ == "__main__":

    get_new = True
    ### Get all regions
    #region_ids = [107, 108, 110]

    region_ids = []
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 99 < k < 150 and v.IsActive is True:
            region_ids.append(v.ID)

    from_date = dt.date(2015, 11, 30)
    to_date = dt.date(2016, 6, 1)
    #to_date = dt.date.today()

    drift_nick = 'drift@svv'
    #drift_id = 237
    pickle_file_name = '{0}runelrappdata.pickle'.format(env.local_storage)
    output_file = '{0}elrappdata 2015-16.csv'.format(env.output_folder)

    if get_new:
        dangers = gd.get_all_dangers(region_ids, from_date, to_date)
if __name__ == "__main__":

    ## Get new or load from pickle.
    get_new = False
    ## Use already made data set. Remember to make get_new = False
    make_new = False

    ## Set dates
    from_date = dt.date(2015, 11, 30)
    to_date = dt.date(2016, 6, 1)
    #to_date = dt.date.today()

    ## Get regions
    # region_id = [112, 117, 116, 128]
    region_id = []
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 99 < k < 150 and v.IsActive is True:
            region_id.append(v.ID)

    ## The output
    plot_file_name = 'Danger level and danger sign 2015-16.png'

    ##################################### End of configuration ###################################

    pickle_file_name_1 = '{0}runlevelanddangersign part 1.pickle'.format(env.local_storage)
    pickle_file_name_2 = '{0}runlevelanddangersign part 2.pickle'.format(env.local_storage)

    if get_new:
        # get all data and save to pickle
        all_danger_levels = gd.get_all_dangers(region_id, from_date, to_date)
Esempio n. 13
0
 def set_danger_level(self, danger_level):
     self.danger_level = danger_level
     AvalancheDangerKDV = gkdv.get_kdv("AvalancheDangerKDV")
     self.danger_level_name = AvalancheDangerKDV[danger_level].Name
def get_node_list(pickle_file_name_3, make_new_node_list):
    '''Makes a list of NodesAndValues objects. All nodes get an object and relations between the nodes are
    calculated. Lots of looping.

    :param pickle_file_name_3:
    :param make_new_node_list:
    :return:
    '''

    if make_new_node_list:
        problem_kdv = {0: 'Ikke gitt',
                       3: 'Toerre loessnoeskred',
                       5: 'Vaate loessnoeskred',
                       7: 'Nysnoeflak',
                       10: 'Fokksnoe',
                       20: 'Nysnoe',
                       30: 'Vedvarende svakt lag',
                       37: 'Dypt vedvarende svakt lag',
                       40: 'Vaat snoe',
                       45: 'Vaate flakskred',
                       50: 'Glideskred'}

        cause_kdv = gkdv.get_kdv('AvalCauseKDV')
        danger_kdv = gkdv.get_kdv('AvalancheDangerKDV')
        activity_influenced_kdv = gkdv.get_kdv('ActivityInfluencedKDV')

        nodes_dict = {}
        id_counter = -1

        for cause_tid, cause_kdve in cause_kdv.iteritems():
            cause_name = cause_kdve.Name
            if 'kke gitt' in cause_name:
                cause_name = 'Svakt lag {0}'.format(cause_name)
            if cause_kdve.IsActive:
                id_counter += 1
                nodes_dict[cause_name] = id_counter

        for problem_tid, problem_name in problem_kdv.iteritems():
            if 'kke gitt' in problem_name:
                problem_name = 'Skredproblem {0}'.format(problem_name)
            id_counter += 1
            nodes_dict[problem_name] = id_counter

        for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.iteritems():
            if 'kke gitt' in desired_damage_extent_name:
                desired_damage_extent_name = 'Skadeomfang {0}'.format(desired_damage_extent_name)
            id_counter += 1
            nodes_dict[desired_damage_extent_name] = id_counter

        for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.iteritems():
            if activity_influenced_tid < 200:  # only snow
                activity_influenced_name = activity_influenced_kdve.Name
                if 'kke gitt' in activity_influenced_name:
                    activity_influenced_name = 'Aktivitet {0}'.format(activity_influenced_name)
                if activity_influenced_kdve.IsActive:
                    id_counter += 1
                    nodes_dict[activity_influenced_name] = id_counter

        for danger_tid, danger_kdve in danger_kdv.iteritems():
            danger_name = danger_kdve.Name
            if 'kke gitt' in danger_name:
                'Faregrad {0}'.format(danger_name)
            if danger_kdve.IsActive:
                id_counter += 1
                nodes_dict[danger_name] = id_counter

        make_nodes = True
        nodes_and_values = []
        print_counter = 0

        for i in incident_list:

            print 'Index {0} of 192 in incidentlist'.format(print_counter)
            print_counter += 1

            if i.forecast:
                cause = i.forecast.avalanche_problems[0].cause_name
                if 'kke gitt' in cause: cause = 'Svakt lag {0}'.format(cause)
                problem = i.forecast.avalanche_problems[0].main_cause
                if 'kke gitt' in problem: problem = 'Skredproblem {0}'.format(problem)

                # Loop through the cause and problem list.
                # If it is the first run make the nodes.
                # If the causes in the lists match what is in the list of acutal incidents, add one to the node.
                for cause_tid, cause_kdve in cause_kdv.iteritems():
                    if cause_kdve.IsActive:
                        cause_name = cause_kdve.Name
                        if 'kke gitt' in cause_name: cause_name = 'Svakt lag {0}'.format(cause_name)
                        for problem_tid, problem_name in problem_kdv.iteritems():
                            if 'kke gitt' in problem_name: problem_name = 'Skredproblem {0}'.format(problem_name)
                            if make_nodes:  # the run of the first item of incident_list covers all nodes
                                nodes_and_values.append(NodesAndValues(cause_name, nodes_dict[cause_name], problem_name,
                                                                       nodes_dict[problem_name]))
                            if cause in cause_name and problem in problem_name:
                                for nv in nodes_and_values:
                                    if cause in nv.node_name and problem in nv.target_name:
                                        nv.add_one()

                damage_extent = i.incident.DamageExtentName
                if 'kke gitt' in damage_extent: damage_extent = 'Skadeomfang {0}'.format(damage_extent)

                for problem_tid, problem_name in problem_kdv.iteritems():
                    if 'kke gitt' in problem_name:
                        problem_name = 'Skredproblem {0}'.format(problem_name)
                    for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.iteritems():
                        if 'kke gitt' in desired_damage_extent_name:
                            desired_damage_extent_name = 'Skadeomfang {0}'.format(desired_damage_extent_name)
                        if make_nodes:
                            nodes_and_values.append(
                                NodesAndValues(problem_name, nodes_dict[problem_name], desired_damage_extent_name,
                                               nodes_dict[desired_damage_extent_name]))
                        if problem in problem_name and damage_extent in desired_damage_extent_name:
                            for nv in nodes_and_values:
                                if problem in nv.node_name and damage_extent in nv.target_name:
                                    nv.add_one()

                activity_influenced = i.incident.ActivityInfluencedName
                if 'kke gitt' in activity_influenced: activity_influenced = 'Aktivitet {0}'.format(activity_influenced)

                for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.iteritems():
                    if 'kke gitt' in desired_damage_extent_name:
                        desired_damage_extent_name = 'Skadeomfang {0}'.format(desired_damage_extent_name)
                    for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.iteritems():
                        if activity_influenced_tid < 200:  # only snow
                            activity_influenced_name = activity_influenced_kdve.Name
                            if 'kke gitt' in activity_influenced_name:
                                activity_influenced_name = 'Aktivitet {0}'.format(activity_influenced_name)
                            if activity_influenced_kdve.IsActive:
                                if make_nodes:
                                    nodes_and_values.append(NodesAndValues(desired_damage_extent_name,
                                                                           nodes_dict[desired_damage_extent_name],
                                                                           activity_influenced_name,
                                                                           nodes_dict[activity_influenced_name]))
                                if desired_damage_extent_name in damage_extent and activity_influenced_name in activity_influenced:
                                    for nv in nodes_and_values:
                                        if desired_damage_extent_name in nv.node_name and activity_influenced_name in nv.target_name:
                                            nv.add_one()

                danger = i.forecast.danger_level_name
                if 'kke gitt' in danger: danger = 'Faregrad {0}'.format(danger)

                for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.iteritems():
                    if activity_influenced_tid < 200:
                        activity_influenced_name = activity_influenced_kdve.Name
                        if 'kke gitt' in activity_influenced_name:
                            activity_influenced_name = 'Aktivitet {0}'.format(activity_influenced_name)
                        if activity_influenced_kdve.IsActive:
                            for danger_tid, danger_kdve in danger_kdv.iteritems():
                                danger_name = danger_kdve.Name
                                if 'kke gitt' in danger_name:
                                    'Faregrad {0}'.format(danger_name)
                                if danger_kdve.IsActive:
                                    if make_nodes:
                                        nodes_and_values.append(
                                            NodesAndValues(activity_influenced_name,
                                                           nodes_dict[activity_influenced_name],
                                                           danger_name, nodes_dict[danger_name]))
                                    if activity_influenced_name in activity_influenced and danger_name in danger:
                                        for nv in nodes_and_values:
                                            if activity_influenced_name in nv.node_name and danger_name in nv.target_name:
                                                nv.add_one()

            make_nodes = False

        mp.pickle_anything(nodes_and_values, pickle_file_name_3)
    else:
        nodes_and_values = mp.unpickle_anything(pickle_file_name_3)

    return nodes_and_values
Esempio n. 15
0
def __get_cause_from_old_cause(old_cause_parameter_name, old_cause_tid):
    '''
    INCOMPLETE

    This method returns the value of a new avalalnche caus given an old one.
    This transformation is going to induce som errors because the old avalanche causes are not exactly
    projectet/compatible with the new ones. That why we made new ones...

    :param old_cause_parameter_name:
    :param old_cause_tid:
    :return:

    aval_cause_kdv{
        0: 'Ikke gitt',
        1: 'Regn',
        2: 'Oppvarming',
        3: 'Ingen gjenfrysing',
        4: 'Paalagring',
        5: 'Svake lag',
        6: 'Regn + oppvarming',
        7: 'Regn + oppvarm + ingen gj.frys',
        8: 'Vind',
        9: 'Oppvarm + ingen gj.frys',
        10: 'Lag med loes nysnoe',
        11: 'Lag med overflaterim',
        12: 'Lag med sproehagl',
        13: 'Lag med kantkornet snoe',
        14: 'Glatt skare',
        15: 'I fokksnoeen',
        16: 'Kantkornet ved bakken',
        17: 'Kantkornet rundt vegetasjon',
        18: 'Kantkornet over skaren',
        19: 'Kantkornet under skaren',
        20: 'Gjennomfuktet fra bakken',
        21: 'Gjennomfuktet fra overflaten',
        22: 'Opphopning over skaren',
        23: 'Snoedekket er overmettet av vann',
        24: 'Ubundet loes snoe',
        25: 'Regn/temperaturstigning',
        26: 'Smelting fra bakken',
        27: 'Vannmettet snoe',
        28: 'Loes toerr snoe',
        29: 'Regn / temperaturstigning / soloppvarming'
    }



    avalanche_problem_kdv{
        0: 'Ikke gitt',
        1: 'Nysnoe',
        2: 'Fokksnoe',
        3: 'Tynt snoedekke',
        4: 'Ustabile lag i dekket',
        5: 'Ustabile lag naer bakken',
        7: 'Regn',
        8: 'Rask temperaturstigning',
        9: 'Mye vann i dekket',
        10: 'Solpaavirkning'
    }
    '''
    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')

    if old_cause_parameter_name == 'AvalancheProblem':
        # avalanche_problem_kdv = gkdv.get_kdv('AvalancheProblemKDV')
        if old_cause_tid == 0:
            return aval_cause_kdv[0].Name
        elif old_cause_tid == 1:
            return aval_cause_kdv[28].Name
def plot_causes(region_name, from_date, to_date, causes):
    """

    :param causes:
    :return:
    """

    filename = r"{0} skredproblemer {1}-{2}".format(region_name, from_date.strftime("%Y"), to_date.strftime("%y"))
    print ("Plotting {0}".format(filename))

    AvalCauseKDV = gkdv.get_kdv("AvalCauseKDV")
    list_of_causes = [0, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]
    # list_of_causes = set([c.cause_tid for c in causes])
    list_of_cause_names = [fe.add_norwegian_letters(AvalCauseKDV[tid].Name) for tid in list_of_causes]

    dict_of_causes = {}
    for c in list_of_causes:
        dict_of_causes[c] = []
    for c in causes:
        dict_of_causes[c.cause_tid].append(c)

    # Start plotting
    fsize = (16, 7)
    plt.figure(figsize=fsize)
    plt.clf()

    # plot lines and left and bottom ticks
    y = 0
    for k, values in dict_of_causes.iteritems():
        for v in values:
            x = (v.date - from_date).days
            if "Varsel" in v.source:
                plt.hlines(y - 0.1, x, x + 1, lw=4, color="red")  # ofset the line 0.1 up
            if "Observasjon" in v.source:
                plt.hlines(y + 0.1, x, x + 1, lw=4, color="blue")  # ofset the line 0.1 down
        y += 1

    # Left y-axis labels
    plt.ylim(len(list_of_causes) - 1, -1)  # 16 skredproblemer
    plt.yticks(range(len(list_of_causes) + 1), list_of_cause_names)

    # x-axis labels
    axis_dates = []
    axis_positions = []
    for i in range(0, (to_date - from_date).days, 1):
        date = from_date + dt.timedelta(days=i)
        if date.day == 1:
            axis_dates.append(date.strftime("%b %Y"))
            axis_positions.append(i)
    plt.xticks(axis_positions, axis_dates)

    # Right hand side y-axis
    right_ticks = []
    correlation_sum = 0.0
    for k, values in dict_of_causes.iteritems():
        values_obs = [vo for vo in values if "Observasjon" in vo.source]
        values_fc = [vf for vf in values if "Varsel" in vf.source]
        correlation = 0.0
        for obs in values_obs:
            for fc in values_fc:
                if obs.date == fc.date and obs.cause_tid == fc.cause_tid:
                    correlation += 1
        if len(values_obs) == 0 and len(values_fc) == 0:
            right_ticks.append("")
        else:
            if len(values_obs) == 0:
                right_ticks.append("v{0} o{1} s{2}%".format(len(values_fc), len(values_obs), 0))
            else:
                right_ticks.append(
                    "v{0} o{1} s{2}%".format(len(values_fc), len(values_obs), int(correlation / len(values_obs) * 100))
                )
        correlation_sum += correlation
    right_ticks.reverse()
    plt.twinx()
    plt.ylim(-1, len(right_ticks) - 1)
    plt.yticks(range(len(right_ticks) + 1), right_ticks)

    # the title
    num_obs = len([c for c in causes if "Observasjon" in c.source])
    num_fc = len([c for c in causes if "Varsel" in c.source])
    if num_obs == 0:
        correlation_prct = 0
    else:
        correlation_prct = int(correlation_sum / num_obs * 100)

    title = "Skredproblemer for {0} ({1} - {2}) \n Totalt {3} varslede problemer (roed) og {4} observerte problemer (blaa) \n og det er {5}% samsvar mellom det som er observert og det som er varselt.".format(
        region_name, from_date.strftime("%Y%m%d"), to_date.strftime("%Y%m%d"), num_fc, num_obs, correlation_prct
    )
    title = fe.add_norwegian_letters(title)
    plt.title(title)

    fig = plt.gcf()
    fig.subplots_adjust(left=0.2)
    plt.savefig("{0}{1}".format(env.web_images_folder, filename))
    plt.close(fig)

    return
Esempio n. 17
0
def get_problems_from_AvalancheEvalProblem2V(region_id, start_date, end_date):
    '''Used from 2014-02-10 up to today

    http://api.nve.no/hydrology/regobs/v0.9.8/Odata.svc/AvalancheEvalProblem2V?$filter=DtObsTime%20gt%20datetime%272012-01-10%27%20and%20DtObsTime%20lt%20datetime%272015-01-15%27%20and%20ForecastRegionName%20eq%20%27Senja%27%20and%20LangKey%20eq%201&$format=json
    :param region_name:
    :param region_id:
    :param start_date:
    :param end_date:
    :return:

    Datasample:
    {
        __metadata: {
            id: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheEvalProblem2V(AvalancheEvalProblemID=0,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2014-04-27T19%3A00%3A00',LangKey=1,NickName='MagnusH%40obskorps',ObsLocationID=11031,RegID=34540,UTMEast=639918,UTMNorth=7731868,UTMZone=33)",
            uri: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheEvalProblem2V(AvalancheEvalProblemID=0,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2014-04-27T19%3A00%3A00',LangKey=1,NickName='MagnusH%40obskorps',ObsLocationID=11031,RegID=34540,UTMEast=639918,UTMNorth=7731868,UTMZone=33)",
            type: "RegObsModel.AvalancheEvalProblem2V"
        },
        RegID: 34540,
        AvalancheEvalProblemID: 0,
        DtObsTime: "/Date(1398625200000)/",
        DtRegTime: "/Date(1398633678170)/",
        ObsLocationID: 11031,
        LocationName: "Steinskardtind",
        UTMZone: 33,
        UTMEast: 639918,
        UTMNorth: 7731868,
        ForecastRegionName: "Tromsø",
        MunicipalName: "TROMSØ",
        NickName: "MagnusH@obskorps",
        CompetenceLevelName: "****",
        AvalancheExtTID: 0,
        AvalancheExtName: "Ikke gitt ",
        AvalCauseTID: 22,
        AvalCauseName: "Opphopning av vann over skarelag",
        AvalCauseDepthTID: 2,
        AvalCauseDepthName: "Innen en meter",
        AvalCauseAttributes: 4,
        AvalCauseAttributeName: "Det overliggende laget er mykt.  ",
        DestructiveSizeTID: 2,
        DestructiveSizeName: "2 - Små",
        AvalTriggerSimpleTID: 10,
        AvalTriggerSimpleName: "Stor tilleggsbelastning ",
        AvalProbabilityTID: 3,
        AvalProbabilityName: "Mulig ",
        ValidExposition: "00000000",
        ExposedHeight1: 600,
        ExposedHeight2: 300,
        Comment: "Gammelt skarelag i ferd med å gå i oppløsning. Laget over og under er omvandlet til fuktig/våt grovkornet snø. Skarelaget bærer ikke lenger og kan kollapse. Ser ut til å ha liten evne til propagering, men glir ut som lite flak.",
        LangKey: 1
}

    '''

    region_name = get_forecast_region_name(region_id)
    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')
    view = "AvalancheEvalProblem2V"

    odata_query = "DtObsTime gt datetime'{1}' and " \
                 "DtObsTime lt datetime'{2}' and " \
                 "ForecastRegionName eq '{0}' and " \
                 "LangKey eq 1".format(region_name, start_date, end_date)
    odata_query = fe.add_norwegian_letters(odata_query)

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter={2}&$format=json".decode('utf8').format(
        api_version, view, odata_query)

    result = requests.get(url).json()
    result = result['d']['results']

    print 'getregobs.py -> get_problems_from_AvalancheEvalProblem2V: {0} observations for {1} in from {2} to {3}.'\
        .format(len(result), region_id, start_date, end_date)


    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(result) == 1000:
        time_delta = end_date - start_date
        date_in_middle = start_date + time_delta/2
        problems = get_problems_from_AvalancheEvalProblem2V(region_id, start_date, date_in_middle) \
               + get_problems_from_AvalancheEvalProblem2V(region_id, date_in_middle, end_date)
    else:

        problems = []

        if len(result) != 0:
            for p in result:
                cause = int(p['AvalCauseTID'])
                if cause != 0:

                    date = unix_time_2_normal(int(p['DtObsTime'][6:-2])).date()
                    order = int(p["AvalancheEvalProblemID"])
                    cause_tid = p['AvalCauseTID']
                    cause_name = aval_cause_kdv[cause_tid].Name
                    source = "Observasjon"

                    prob = gp.AvalancheProblem(region_id, region_name, date, order, cause_name, source)

                    prob.set_cause_tid(cause_tid)
                    prob.set_municipal(p['MunicipalName'])
                    prob.set_regid(p['RegID'])
                    prob.set_url("{0}{1}".format(registration_basestring, prob.regid))
                    prob.set_aval_size(p["DestructiveSizeName"])
                    prob.set_problem_combined(p['AvalCauseName'])
                    prob.set_regobs_view(view)
                    prob.set_nick_name(p['NickName'])

                    problems.append(prob)

    return problems
Esempio n. 18
0
        9: 'Mye vann i dekket',
        10: 'Solpaavirkning'
    }
    '''
    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')

    if old_cause_parameter_name == 'AvalancheProblem':
        # avalanche_problem_kdv = gkdv.get_kdv('AvalancheProblemKDV')
        if old_cause_tid == 0:
            return aval_cause_kdv[0].Name
        elif old_cause_tid == 1:
            return aval_cause_kdv[28].Name


if __name__ == "__main__":

    import datetime as dt
    # __get_cause_from_old_cause('AvalancheProblem', 3)
    region_list = gkdv.get_kdv("ForecastRegionKDV")
    aval_cause_kdv = gkdv.get_kdv("AvalCauseKDV")


    avalanche_warning = get_problems_from_AvalancheWarningV(108, dt.date(2011,11,30), dt.date(2013,3,15))
    avalache_problems2_v = get_problems_from_AvalancheEvalProblem2V(108, dt.date(2014,11,30), dt.date(2015,3,15))
    # AvalancheWarnProblem = get_problems_from_AvalancheWarnProblemV(108, '2015-01-01','2015-01-15')

    a = 1



    def set_cause_name(self):

        AvalCauseKDV = gkdv.get_kdv("AvalCauseKDV")
        self.cause_name = AvalCauseKDV[self.cause_tid].Name
Esempio n. 20
0
def get_problems_from_AvalancheWarnProblemV(region_id, start_date, end_date):
    '''AvalancheWarnProblemV used from 2012-11-15 to today. It selects only problems linked to published
    warnings.

    There was made changes to the view in des 2013 which I think affected destructive size and avalanche cause.
    Changes were made to the data model before startup in december 2014. Changes involve use of cause_name
    and avalanche size.

    Notes to do:
    * aval_type distingushes if aval type is dry or wet. Varsom does not this any more.
    * lots of typos in KDV_names. Inconsistent capitalization, blankspace at end of line.
    * exposed terain not included
    * no mapping to the manin categroies of avalanche problems.

    :param region_name:
    :param region_id:
    :param start_date:
    :param end_date:
    :return:

    {
        __metadata: {
            id: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheWarnProblemV(AvalancheWarnProblemID=1,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2015-01-08T11%3A43%3A24.593',LangKey=1,NickName='Silje%40svv',ObsLocationID=39,RegID=45272,TMNorth=7763208,UTMEast=655106,UTMZone=33)",
            uri: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheWarnProblemV(AvalancheWarnProblemID=1,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2015-01-08T11%3A43%3A24.593',LangKey=1,NickName='Silje%40svv',ObsLocationID=39,RegID=45272,TMNorth=7763208,UTMEast=655106,UTMZone=33)",
            type: "RegObsModel.AvalancheWarnProblemV"
        },
        RegID: 45272,
        AvalancheWarnProblemID: 1,
        DtObsTime: "/Date(1420717404593)/",
        DtRegTime: "/Date(1420717404320)/",
        ObsLocationID: 39,
        LocationName: "Tromsø",
        UTMZone: 33,
        UTMEast: 655106,
        TMNorth: 7763208,
        ForecastRegionTID: 108,
        ForecastRegionName: "Tromsø",
        MunicipalName: "INGEN KOMMUNE",
        NickName: "Silje@svv",
        ObserverGroupID: 1,
        CompetenceLevelTID: 120,
        CompetenceLevelName: "***",
        AvalProbabilityTID: 3,
        AvalProbabilityName: "Mulig ",
        AvalTriggerSimpleTID: 10,
        AvalTriggerSimpleName: "Stor tilleggsbelastning ",
        DestructiveSizeExtTID: 0,
        DestructiveSizeExtName: "Ikke gitt",
        AvalancheExtTID: 20,
        AvalancheExtName: "Tørre flakskred",
        AvalCauseTID: 15,
        AvalCauseName: "Dårlig binding mellom lag i fokksnøen",
        AvalCauseExtTID: 0,
        AvalCauseExtName: "ikke gitt ",
        AvalReleaseHeightTID: 0,
        AvalReleaseHeighName: "Ikke gitt ",
        ProbabilityCombined: null,
        CauseCombined: null,
        ReleaseHeightCombined: "",
        AvalancheProblemCombined: "  ",
        Comment: null,
        LangKey: 1,
        ValidExposistion: "11111111",
        ExposedHeight1: 400,
        ExposedHeight2: 0,
        ExposedHeightComboTID: 1,
        DestructiveSizeTID: 1,
        DestructiveSizeName: "1 - Harmløst",
        SortOrder: 1,
        AvalPropagationTID: 1,
        AvalPropagationName: "Isolerte faresoner",
        AvalWeakLayerId: null,
        AdviceText: "Se etter områder hvor vinden nylig har lagt fra seg fokksnø, typisk bak rygger, i renneformasjoner og søkk. Lokale vindeffekter og skiftende vindretning kan gi stor variasjon i hvor fokksnøen legger seg. Snø som sprekker opp rundt skiene/brettet er et typisk tegn. Unngå områder med fokksnø til den har fått stabilisert seg. Det er størst sannsynlighet for å løse ut skred på kul-formasjoner i terrenget og der fokksnøen er myk."
    }


    '''
    region_name = get_forecast_region_name(region_id)
    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')
    view = "AvalancheWarnProblemV"

    # Note this view queries on LocationName and not on ForeCastRegionName as the other views
    odata_query = "DtObsTime gt datetime'{1}' and " \
             "DtObsTime lt datetime'{2}' and " \
             "LocationName eq '{0}' and " \
             "LangKey eq 1".format(region_name, start_date, end_date)
    odata_query = fe.add_norwegian_letters(odata_query)

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter={2}&$format=json".decode('utf8').format(
        api_version, view, odata_query)
    result = requests.get(url).json()
    try:
        result = result['d']['results']
    except:
        result = []

    print 'getregobs.py -> get_problems_from_AvalancheWarnProblemV: {0} observations for {1} in from {2} to {3}.'\
        .format(len(result), region_id, start_date, end_date)

    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(result) == 1000:
        time_delta = end_date - start_date
        date_in_middle = start_date + time_delta/2
        problems = get_problems_from_AvalancheWarnProblemV(region_id, start_date, date_in_middle) \
               + get_problems_from_AvalancheWarnProblemV(region_id, date_in_middle, end_date)
    else:
        valid_regids = fa.get_valid_regids(region_id-100, start_date, end_date)
        problems = []
        if len(result) != 0:
            for p in result:
                regid = p["RegID"]

                if regid in valid_regids:

                    date = datetime.datetime.strptime(valid_regids[regid][0:10], '%Y-%m-%d').date()
                    source = "Varsel"

                    aval_cause_tid = int(p['AvalCauseTID']) + int(p['AvalCauseExtTID'])
                    cause_name = p["CauseCombined"]
                    aval_size = fe.remove_norwegian_letters(p['DestructiveSizeExtName'])
                    aval_type = p['AvalancheExtName']
                    aval_trigger = fe.remove_norwegian_letters(p['AvalTriggerSimpleName'])
                    aval_probability = fe.remove_norwegian_letters(p['AvalProbabilityName'])
                    aval_distribution = fe.remove_norwegian_letters(p['AvalPropagationName'])
                    aval_cause_combined = p['AvalancheProblemCombined']
                    problem_url = 'http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter=RegID eq {2} and LangKey eq 1&$format=json'.format(api_version, view, regid)

                    # from late november 2013 there was a change in data model
                    if date > datetime.datetime.strptime('2013-11-15', '%Y-%m-%d').date():
                        aval_cause_tid = int(p['AvalCauseTID'])
                        cause_name = aval_cause_kdv[aval_cause_tid].Name
                        aval_size = fe.remove_norwegian_letters(p['DestructiveSizeName'])
                        aval_probability = fe.remove_norwegian_letters(p['AvalProbabilityName'])
                        aval_distribution = fe.remove_norwegian_letters(p['AvalPropagationName'])
                        aval_cause_combined = p['AvalCauseName']

                        # http://www.varsom.no/Snoskred/Senja/?date=18.03.2015
                        varsom_name = region_name.replace('æ','a').replace('ø','o').replace('å','a')
                        varsom_date = date.strftime("%d.%m.%Y")
                        problem_url = "http://www.varsom.no/Snoskred/{0}/?date={1}".format(varsom_name, varsom_date)

                    if cause_name is not None and aval_cause_tid != 0:
                        order = int(p["AvalancheWarnProblemID"])
                        prob = gp.AvalancheProblem(region_id, region_name, date, order, cause_name, source)
                        prob.set_aval_type(aval_type)
                        prob.set_aval_size(aval_size)
                        prob.set_aval_trigger(aval_trigger)
                        prob.set_aval_distribution(aval_distribution)
                        prob.set_aval_probability(aval_probability)
                        prob.set_problem_combined(aval_cause_combined)
                        prob.set_regobs_view(view)
                        prob.set_url(problem_url)
                        prob.set_cause_tid(aval_cause_tid)
                        prob.set_main_cause(cause_name)
                        problems.append(prob)

    return problems
Esempio n. 21
0
if __name__ == "__main__":

    ## Get new or load from pickle.
    get_new = False
    ## Use already made data set. Remember to make get_new = False
    make_new = False

    ## Set dates
    from_date = dt.date(2015, 11, 30)
    to_date = dt.date(2016, 6, 1)
    #to_date = dt.date.today()

    ## Get regions
    # region_id = [112, 117, 116, 128]
    region_id = []
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 99 < k < 150 and v.IsActive is True:
            region_id.append(v.ID)

    ## The output
    plot_file_name = 'Danger level and danger sign 2015-16.png'

    ##################################### End of configuration ###################################

    pickle_file_name_1 = '{0}runlevelanddangersign part 1.pickle'.format(
        env.local_storage)
    pickle_file_name_2 = '{0}runlevelanddangersign part 2.pickle'.format(
        env.local_storage)

    if get_new: