Exemple #1
0
def plot_histogram_on_given_problem(problem_combo,
                                    warnings,
                                    file_name,
                                    figure_title='',
                                    file_ext=".png"):
    '''
    Plots histogram of occurences pr dangerlevel for a given avalanche problem (combination of distribution,
    probability, size and trigger). This method uses the more genereic plot_histogram function.

    :param problem_combo:   dictionary of {distribution : value, probability : value, size : value, trigger : value}
    :param warnings:        list of all warnings in selected timespan
    :param file_name:       String. filename including path.
    :param figure_title:    String. If not specified it becomes same as filename
    :param file_ext:        String. Default extencion is png if no other extencion is specified.

    :return:
    '''

    if figure_title == '':
        figure_title = file_name

    # loop trough warnings and look up problems matching the problem_combo. Plot the result.
    level_list = []

    for w in warnings:
        if w.danger_level > 0 and len(w.avalanche_problems) > 0:
            p = w.avalanche_problems[0]
            if (p.aval_distribution == problem_combo['distribution']
                    and p.aval_probability == problem_combo['probability']
                    and p.aval_size == problem_combo['size']
                    and p.aval_trigger == problem_combo['trigger']):
                level_list.append(w.danger_level)

    level_keys = gkdv.get_kdv('AvalancheDangerKDV').keys()
    level_colors = ['0.5', '#ccff66', '#ffff00', '#ff9900', '#ff0000', 'k']

    fig_text = 'Plottet viser tilfeller pr faregrad av \n' \
               'skredproblemer med egenskaper: \n' \
               '{2}, {3}, {4} og\n' \
               '{5}. \n \n' \
               'Denne kombinasjonen har vaert brukt\n' \
               '{0} ganger i totalt {1} produserte\n' \
               'varsel. Utvalget henter bare fra \n' \
               'hovedskredproblemer.' \
        .format(len(level_list), len(warnings),
                problem_combo['distribution'], problem_combo['probability'], problem_combo['size'] ,problem_combo['trigger'])

    plot_histogram(level_list,
                   level_keys,
                   file_name,
                   data_colors=level_colors,
                   figure_title=figure_title,
                   figure_text=fig_text,
                   file_ext=file_ext)

    return
Exemple #2
0
def get_forecast_region_name(region_id):
    """Method takes in the region id (same as ForecastRegionTID in regObs). It looks up the name in ForecastRegionKDV
    and returns the region name.

    :param region_id:    Region ID is an int as given i ForecastRegionKDV
    :return:             Region Name string is returned
    """

    forecast_region_kdv = kdv.get_kdv('ForecastRegionKDV')
    forecast_region_kdvelement = forecast_region_kdv[region_id]
    forecast_region_name = forecast_region_kdvelement.Name

    return forecast_region_name
Exemple #3
0
def get_forecast_region_name(region_id):
    """Method takes in the region id (same as ForecastRegionTID in regObs). It looks up the name in ForecastRegionKDV
    and returns the region name.

    :param region_id:    Region ID is an int as given i ForecastRegionKDV
    :return:             Region Name string is returned
    """

    forecast_region_kdv = gkdv.get_kdv("ForecastRegionKDV")
    forecast_region_kdvelement = forecast_region_kdv[region_id]
    #forecast_region_name = fe.remove_norwegian_letters(forecast_region_kdvelement.Name)
    forecast_region_name = forecast_region_kdvelement.Name

    return forecast_region_name
def get_node_list(pickle_file_name_3, make_new_node_list,
                  desired_damage_extent_kdv, incident_list):
    """Makes a list of NodesAndValues objects. All nodes get an object and relations between the nodes are
    calculated. Lots of looping.

    :param pickle_file_name_3:
    :param make_new_node_list:
    :param desired_damage_extent_kdv
    :param incident_list

    :return:
    """

    if make_new_node_list:
        problem_kdv = {
            0: 'Ikke gitt',
            3: 'Toerre loessnoeskred',
            5: 'Vaate loessnoeskred',
            7: 'Nysnoeflak',
            10: 'Fokksnoe',
            20: 'Nysnoe',
            30: 'Vedvarende svakt lag',
            37: 'Dypt vedvarende svakt lag',
            40: 'Vaat snoe',
            45: 'Vaate flakskred',
            50: 'Glideskred'
        }

        cause_kdv = gkdv.get_kdv('AvalCauseKDV')
        danger_kdv = gkdv.get_kdv('AvalancheDangerKDV')
        activity_influenced_kdv = gkdv.get_kdv('ActivityInfluencedKDV')

        nodes_dict = {}
        id_counter = -1

        for cause_tid, cause_kdve in cause_kdv.items():
            cause_name = cause_kdve.Name
            if 'kke gitt' in cause_name:
                cause_name = 'Svakt lag {0}'.format(cause_name)
            if cause_kdve.IsActive:
                id_counter += 1
                nodes_dict[cause_name] = id_counter

        for problem_tid, problem_name in problem_kdv.items():
            if 'kke gitt' in problem_name:
                problem_name = 'Skredproblem {0}'.format(problem_name)
            id_counter += 1
            nodes_dict[problem_name] = id_counter

        for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.items(
        ):
            if 'kke gitt' in desired_damage_extent_name:
                desired_damage_extent_name = 'Skadeomfang {0}'.format(
                    desired_damage_extent_name)
            id_counter += 1
            nodes_dict[desired_damage_extent_name] = id_counter

        for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.items(
        ):
            if activity_influenced_tid < 200:  # only snow
                activity_influenced_name = activity_influenced_kdve.Name
                if 'kke gitt' in activity_influenced_name:
                    activity_influenced_name = 'Aktivitet {0}'.format(
                        activity_influenced_name)
                if activity_influenced_kdve.IsActive:
                    id_counter += 1
                    nodes_dict[activity_influenced_name] = id_counter

        for danger_tid, danger_kdve in danger_kdv.items():
            danger_name = danger_kdve.Name
            if 'kke gitt' in danger_name:
                'Faregrad {0}'.format(danger_name)
            if danger_kdve.IsActive:
                id_counter += 1
                nodes_dict[danger_name] = id_counter

        make_nodes = True
        nodes_and_values = []
        print_counter = 0

        for i in incident_list:

            print('Index {0} of 192 in incidentlist'.format(print_counter))
            print_counter += 1

            if i.forecast:
                cause = i.forecast.avalanche_problems[0].cause_name
                if 'kke gitt' in cause: cause = 'Svakt lag {0}'.format(cause)
                problem = i.forecast.avalanche_problems[0].main_cause
                if 'kke gitt' in problem:
                    problem = 'Skredproblem {0}'.format(problem)

                # Loop through the cause and problem list.
                # If it is the first run make the nodes.
                # If the causes in the lists match what is in the list of actual incidents, add one to the node.
                for cause_tid, cause_kdve in cause_kdv.items():
                    if cause_kdve.IsActive:
                        cause_name = cause_kdve.Name
                        if 'kke gitt' in cause_name:
                            cause_name = 'Svakt lag {0}'.format(cause_name)
                        for problem_tid, problem_name in problem_kdv.items():
                            if 'kke gitt' in problem_name:
                                problem_name = 'Skredproblem {0}'.format(
                                    problem_name)
                            if make_nodes:  # the run of the first item of incident_list covers all nodes
                                nodes_and_values.append(
                                    NodesAndValues(cause_name,
                                                   nodes_dict[cause_name],
                                                   problem_name,
                                                   nodes_dict[problem_name]))
                            if cause in cause_name and problem in problem_name:
                                for nv in nodes_and_values:
                                    if cause in nv.node_name and problem in nv.target_name:
                                        nv.add_one()

                damage_extent = i.incident.DamageExtentName
                if 'kke gitt' in damage_extent:
                    damage_extent = 'Skadeomfang {0}'.format(damage_extent)

                for problem_tid, problem_name in problem_kdv.items():
                    if 'kke gitt' in problem_name:
                        problem_name = 'Skredproblem {0}'.format(problem_name)
                    for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.items(
                    ):
                        if 'kke gitt' in desired_damage_extent_name:
                            desired_damage_extent_name = 'Skadeomfang {0}'.format(
                                desired_damage_extent_name)
                        if make_nodes:
                            nodes_and_values.append(
                                NodesAndValues(
                                    problem_name, nodes_dict[problem_name],
                                    desired_damage_extent_name,
                                    nodes_dict[desired_damage_extent_name]))
                        if problem in problem_name and damage_extent in desired_damage_extent_name:
                            for nv in nodes_and_values:
                                if problem in nv.node_name and damage_extent in nv.target_name:
                                    nv.add_one()

                activity_influenced = i.incident.ActivityInfluencedName
                if 'kke gitt' in activity_influenced:
                    activity_influenced = 'Aktivitet {0}'.format(
                        activity_influenced)

                for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.items(
                ):
                    if 'kke gitt' in desired_damage_extent_name:
                        desired_damage_extent_name = 'Skadeomfang {0}'.format(
                            desired_damage_extent_name)
                    for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.items(
                    ):
                        if activity_influenced_tid < 200:  # only snow
                            activity_influenced_name = activity_influenced_kdve.Name
                            if 'kke gitt' in activity_influenced_name:
                                activity_influenced_name = 'Aktivitet {0}'.format(
                                    activity_influenced_name)
                            if activity_influenced_kdve.IsActive:
                                if make_nodes:
                                    nodes_and_values.append(
                                        NodesAndValues(
                                            desired_damage_extent_name,
                                            nodes_dict[
                                                desired_damage_extent_name],
                                            activity_influenced_name,
                                            nodes_dict[
                                                activity_influenced_name]))
                                if desired_damage_extent_name in damage_extent and activity_influenced_name in activity_influenced:
                                    for nv in nodes_and_values:
                                        if desired_damage_extent_name in nv.node_name and activity_influenced_name in nv.target_name:
                                            nv.add_one()

                danger = i.forecast.danger_level_name
                if 'kke gitt' in danger: danger = 'Faregrad {0}'.format(danger)

                for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.items(
                ):
                    if activity_influenced_tid < 200:
                        activity_influenced_name = activity_influenced_kdve.Name
                        if 'kke gitt' in activity_influenced_name:
                            activity_influenced_name = 'Aktivitet {0}'.format(
                                activity_influenced_name)
                        if activity_influenced_kdve.IsActive:
                            for danger_tid, danger_kdve in danger_kdv.items():
                                danger_name = danger_kdve.Name
                                if 'kke gitt' in danger_name:
                                    'Faregrad {0}'.format(danger_name)
                                if danger_kdve.IsActive:
                                    if make_nodes:
                                        nodes_and_values.append(
                                            NodesAndValues(
                                                activity_influenced_name,
                                                nodes_dict[
                                                    activity_influenced_name],
                                                danger_name,
                                                nodes_dict[danger_name]))
                                    if activity_influenced_name in activity_influenced and danger_name in danger:
                                        for nv in nodes_and_values:
                                            if activity_influenced_name in nv.node_name and danger_name in nv.target_name:
                                                nv.add_one()

            make_nodes = False

        mp.pickle_anything(nodes_and_values, pickle_file_name_3)
    else:
        nodes_and_values = mp.unpickle_anything(pickle_file_name_3)

    return nodes_and_values
Exemple #5
0
def _plot_causes(region_name,
                 causes,
                 year='2018-19',
                 plot_folder=env.plot_folder + 'regionplots/'):
    """Plots observed and forecasted causes for a region for a given year.

    :param region_name:
    :param year:            [string]
    :param causes:
    :param plot_folder:
    :return:
    """

    if not os.path.exists(plot_folder):
        os.makedirs(plot_folder)

    from_date, to_date = gm.get_forecast_dates(year)

    filename = '{0} skredproblemer {1}'.format(region_name, year)
    ml.log_and_print(
        "[info] plotdangerandproblem.py -> plot_causes: Plotting {0}".format(
            filename))

    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')
    list_of_causes = [
        0, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25
    ]
    list_of_causes = [10, 15, 14, 11, 13, 18, 19, 16, 22, 20, 24, 0]
    # list_of_causes = set([c.cause_tid for c in causes])
    list_of_cause_names = [aval_cause_kdv[tid].Name for tid in list_of_causes]

    dict_of_causes = {}
    for c in list_of_causes:
        dict_of_causes[c] = []
    for c in causes:
        dict_of_causes[c.cause_tid].append(c)

    # Start plotting
    fsize = (16, 7)
    plt.figure(figsize=fsize)
    plt.clf()

    # plot lines and left and bottom ticks
    y = 0
    for k, values in dict_of_causes.items():
        for v in values:
            x = (v.date - from_date).days
            if 'Forecast' in v.source:
                plt.hlines(y - 0.1, x, x + 1, lw=4,
                           color='red')  # offset the line 0.1 up
            if 'Observation' in v.source:
                plt.hlines(y + 0.1, x, x + 1, lw=4,
                           color='blue')  # offset the line 0.1 down
        y += 1

    # Left y-axis labels
    plt.ylim(len(list_of_causes) - 1, -1)  # 12 skredproblemer
    plt.yticks(range(len(list_of_causes) + 1), list_of_cause_names)

    # x-axis labels
    axis_dates = []
    axis_positions = []
    for i in range(0, (to_date - from_date).days, 1):
        date = from_date + dt.timedelta(days=i)
        if date.day == 1:
            axis_dates.append(date.strftime("%b %Y"))
            axis_positions.append(i)
    plt.xticks(axis_positions, axis_dates)

    # Right hand side y-axis
    right_ticks = []
    correlation_sum = 0.
    for k, values in dict_of_causes.items():
        values_obs = [vo for vo in values if 'Observation' in vo.source]
        values_fc = [vf for vf in values if 'Forecast' in vf.source]
        correlation = 0.
        for obs in values_obs:
            for fc in values_fc:
                if obs.date == fc.date and obs.cause_tid == fc.cause_tid:
                    correlation += 1
        if len(values_obs) == 0 and len(values_fc) == 0:
            right_ticks.append("")
        else:
            if len(values_obs) == 0:
                right_ticks.append("v{0} o{1} s{2}%".format(
                    len(values_fc), len(values_obs), 0))
            else:
                right_ticks.append("v{0} o{1} s{2}%".format(
                    len(values_fc), len(values_obs),
                    int(correlation / len(values_obs) * 100)))
        correlation_sum += correlation
    right_ticks.reverse()
    plt.twinx()
    plt.ylim(-1, len(right_ticks) - 1)
    plt.yticks(range(len(right_ticks) + 1), right_ticks)

    # the title
    num_obs = len([c for c in causes if 'Observation' in c.source])
    num_fc = len([c for c in causes if 'Forecast' in c.source])
    if num_obs == 0:
        correlation_prct = 0
    else:
        correlation_prct = int(correlation_sum / num_obs * 100)

    title = 'Skredproblemer for {0} ({1} - {2}) \n Totalt {3} varslede problemer (rød) og {4} observerte problemer (blå) \n og det er {5}% samsvar mellom det som er observert og det som er varselt.'\
        .format(region_name, from_date.strftime('%Y%m%d'), to_date.strftime('%Y%m%d'), num_fc, num_obs, correlation_prct)
    plt.title(title)

    # When is the figure made?
    plt.gcf().text(0.85,
                   0.02,
                   'Figur laget {0:%Y-%m-%d %H:%M}'.format(dt.datetime.now()),
                   color='0.5')

    fig = plt.gcf()
    fig.subplots_adjust(left=0.2)
    plt.savefig(u'{0}{1}'.format(plot_folder, filename))
    plt.close(fig)
Exemple #6
0
    def set_cause_name(self):

        AvalCauseKDV = gkdv.get_kdv("AvalCauseKDV")
        self.cause_name = AvalCauseKDV[self.cause_tid].Name
Exemple #7
0
def pickle_data_set(warnings, file_name, use_ikke_gitt=False):
    '''Data preperation continued. Takes the warnings which is a list of class AvalancheDanger objects and makes a dictionary
    data set of if. The value indexes relate to each other. I.e. distribution, level, probability etc.
    at the nth index originate from the same problem.

    The data set also includes information on what the xKDV tables in regObs contains and preferred colors when
    plotting.

    :param warnings:        list of AvalancheDanger objects
    :param file_name:       full path and filename to pickle the data to
    :param use_ikke_gitt:   If I dont whant to use the ID = 0 (Ikke gitt) values they can be omitted all in all.

    :return:
    '''

    level_list = []
    size_list = []
    trigger_list = []
    probability_list = []
    distribution_list = []

    for w in warnings:
        if w.danger_level > 0 and len(w.avalanche_problems) > 0:
            # The first problem in avalanche_problems is used. This is the main problem.
            level_list.append(w.danger_level)
            try:
                size_list.append(w.avalanche_problems[0].aval_size)
            except:
                size_list.append('Ikke gitt')
            trigger_list.append(w.avalanche_problems[0].aval_trigger)
            probability_list.append(w.avalanche_problems[0].aval_probability)
            distribution_list.append(w.avalanche_problems[0].aval_distribution)

        # Test is lengths match and give warning if not.
        control = (len(level_list) + len(size_list) + len(trigger_list) +
                   len(probability_list) + len(distribution_list)) / 5
        if not control == len(level_list):
            print(
                "runForMatrix -> pickle_data_set: list-lenghts dont match. Error in data."
            )

    level_keys = [v for v in gkdv.get_kdv('AvalancheDangerKDV').keys()]
    size_keys = [v.Name for v in gkdv.get_kdv('DestructiveSizeKDV').values()]
    triggers_keys = [
        v.Name for v in gkdv.get_kdv('AvalTriggerSimpleKDV').values()
    ]
    probability_keys = [
        v.Name for v in gkdv.get_kdv('AvalProbabilityKDV').values()
    ]
    distribution_keys = [
        v.Name for v in gkdv.get_kdv('AvalPropagationKDV').values()
    ]

    level_colors = ['0.5', '#ccff66', '#ffff00', '#ff9900', '#ff0000', 'k']

    if use_ikke_gitt == False:
        level_keys.pop(0)
        size_keys.pop(0)
        triggers_keys.pop(0)
        probability_keys.pop(0)
        distribution_keys.pop(0)

        level_colors.pop(0)

    data_set = {
        'level': {
            'values': level_list,
            'keys': level_keys,
            'colors': level_colors
        },
        'size': {
            'values': size_list,
            'keys': size_keys,
            'colors': ['0.7']
        },
        'trigger': {
            'values': trigger_list,
            'keys': triggers_keys,
            'colors': ['0.7']
        },
        'probability': {
            'values': probability_list,
            'keys': probability_keys,
            'colors': ['0.7']
        },
        'distribution': {
            'values': distribution_list,
            'keys': distribution_keys,
            'colors': ['0.7']
        }
    }

    mp.pickle_anything(data_set, file_name)
Exemple #8
0
def __get_cause_from_old_cause(old_cause_parameter_name, old_cause_tid):
    '''
    INCOMPLETE

    This method returns the value of a new avalalnche caus given an old one.
    This transformation is going to induce som errors because the old avalanche causes are not exactly
    projectet/compatible with the new ones. That why we made new ones...

    :param old_cause_parameter_name:
    :param old_cause_tid:
    :return:

    aval_cause_kdv{
        0: 'Ikke gitt',
        1: 'Regn',
        2: 'Oppvarming',
        3: 'Ingen gjenfrysing',
        4: 'Paalagring',
        5: 'Svake lag',
        6: 'Regn + oppvarming',
        7: 'Regn + oppvarm + ingen gj.frys',
        8: 'Vind',
        9: 'Oppvarm + ingen gj.frys',
        10: 'Lag med loes nysnoe',
        11: 'Lag med overflaterim',
        12: 'Lag med sproehagl',
        13: 'Lag med kantkornet snoe',
        14: 'Glatt skare',
        15: 'I fokksnoeen',
        16: 'Kantkornet ved bakken',
        17: 'Kantkornet rundt vegetasjon',
        18: 'Kantkornet over skaren',
        19: 'Kantkornet under skaren',
        20: 'Gjennomfuktet fra bakken',
        21: 'Gjennomfuktet fra overflaten',
        22: 'Opphopning over skaren',
        23: 'Snoedekket er overmettet av vann',
        24: 'Ubundet loes snoe',
        25: 'Regn/temperaturstigning',
        26: 'Smelting fra bakken',
        27: 'Vannmettet snoe',
        28: 'Loes toerr snoe',
        29: 'Regn / temperaturstigning / soloppvarming'
    }



    avalanche_problem_kdv{
        0: 'Ikke gitt',
        1: 'Nysnoe',
        2: 'Fokksnoe',
        3: 'Tynt snoedekke',
        4: 'Ustabile lag i dekket',
        5: 'Ustabile lag naer bakken',
        7: 'Regn',
        8: 'Rask temperaturstigning',
        9: 'Mye vann i dekket',
        10: 'Solpaavirkning'
    }
    '''
    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')

    if old_cause_parameter_name == 'AvalancheProblem':
        # avalanche_problem_kdv = gkdv.get_kdv('AvalancheProblemKDV')
        if old_cause_tid == 0:
            return aval_cause_kdv[0].Name
        elif old_cause_tid == 1:
            return aval_cause_kdv[28].Name
Exemple #9
0
def get_problems_from_AvalancheWarnProblemV(region_id, start_date, end_date):
    """AvalancheWarnProblemV used from 2012-11-15 to today. It selects only problems linked to published
    warnings.

    * There was made changes to the view in des 2013 which I think affected destructive size
    and avalanche cause.
    * Changes were made to the data model before startup in december 2014. Changes involve use of cause_name
    and avalanche size. The season 2014/15 we had a list og 4 main problems and 11(?) sub-problems/causes thus named
    main_cause and cause_name
    * December 2015 we changed again. Now using the list of avalanche problems and the weak layer as "main_cause"
    and "cause_name"

    Notes to do:
    * aval_type distingushes if aval type is dry or wet. Varsom does not this any more.
    * lots of typos in KDV_names. Inconsistent capitalization, blankspace at end of line.
    * exposed terain not included

    :param region_name:
    :param region_id:
    :param start_date:
    :param end_date:
    :return:

    {
        __metadata: {
            id: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheWarnProblemV(AvalancheWarnProblemID=1,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2015-01-08T11%3A43%3A24.593',LangKey=1,NickName='Silje%40svv',ObsLocationID=39,RegID=45272,TMNorth=7763208,UTMEast=655106,UTMZone=33)",
            uri: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheWarnProblemV(AvalancheWarnProblemID=1,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2015-01-08T11%3A43%3A24.593',LangKey=1,NickName='Silje%40svv',ObsLocationID=39,RegID=45272,TMNorth=7763208,UTMEast=655106,UTMZone=33)",
            type: "RegObsModel.AvalancheWarnProblemV"
        },
        RegID: 45272,
        AvalancheWarnProblemID: 1,
        DtObsTime: "/Date(1420717404593)/",
        DtRegTime: "/Date(1420717404320)/",
        ObsLocationID: 39,
        LocationName: "Tromsø",
        UTMZone: 33,
        UTMEast: 655106,
        TMNorth: 7763208,
        ForecastRegionTID: 108,
        ForecastRegionName: "Tromsø",
        MunicipalName: "INGEN KOMMUNE",
        NickName: "Silje@svv",
        ObserverGroupID: 1,
        CompetenceLevelTID: 120,
        CompetenceLevelName: "***",
        AvalProbabilityTID: 3,
        AvalProbabilityName: "Mulig ",
        AvalTriggerSimpleTID: 10,
        AvalTriggerSimpleName: "Stor tilleggsbelastning ",
        DestructiveSizeExtTID: 0,
        DestructiveSizeExtName: "Ikke gitt",
        AvalancheExtTID: 20,
        AvalancheExtName: "Tørre flakskred",
        AvalCauseTID: 15,
        AvalCauseName: "Dårlig binding mellom lag i fokksnøen",
        AvalCauseExtTID: 0,
        AvalCauseExtName: "ikke gitt ",
        AvalReleaseHeightTID: 0,
        AvalReleaseHeighName: "Ikke gitt ",
        ProbabilityCombined: null,
        CauseCombined: null,
        ReleaseHeightCombined: "",
        AvalancheProblemCombined: "  ",
        Comment: null,
        LangKey: 1,
        ValidExposistion: "11111111",
        ExposedHeight1: 400,
        ExposedHeight2: 0,
        ExposedHeightComboTID: 1,
        DestructiveSizeTID: 1,
        DestructiveSizeName: "1 - Harmløst",
        SortOrder: 1,
        AvalPropagationTID: 1,
        AvalPropagationName: "Isolerte faresoner",
        AvalWeakLayerId: null,
        AdviceText: "Se etter områder hvor vinden nylig har lagt fra seg fokksnø, typisk bak rygger, i renneformasjoner og søkk. Lokale vindeffekter og skiftende vindretning kan gi stor variasjon i hvor fokksnøen legger seg. Snø som sprekker opp rundt skiene/brettet er et typisk tegn. Unngå områder med fokksnø til den har fått stabilisert seg. Det er størst sannsynlighet for å løse ut skred på kul-formasjoner i terrenget og der fokksnøen er myk."
    }

    """
    region_name = get_forecast_region_name(region_id)
    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')
    view = "AvalancheWarnProblemV"

    # Note this view queries on LocationName and not on ForeCastRegionName as the other views
    odata_query = "DtObsTime gt datetime'{1}' and " \
             "DtObsTime lt datetime'{2}' and " \
             "LocationName eq '{0}' and " \
             "LangKey eq 1".format(region_name, start_date, end_date)
    #odata_query = fe.add_norwegian_letters(odata_query)

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter={2}&$format=json".format(
        env.api_version, view, odata_query)
    result = requests.get(url).json()
    try:
        result = result['d']['results']
    except:
        result = []

    print('getregobs.py -> get_problems_from_AvalancheWarnProblemV: {0} observations for {1} in from {2} to {3}.'\
        .format(len(result), region_id, start_date, end_date))

    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(result) == 1000:
        time_delta = end_date - start_date
        date_in_middle = start_date + time_delta / 2
        problems = get_problems_from_AvalancheWarnProblemV(region_id, start_date, date_in_middle) \
               + get_problems_from_AvalancheWarnProblemV(region_id, date_in_middle, end_date)
    else:
        valid_regids = fa.get_valid_regids(region_id, start_date, end_date)
        problems = []
        if len(result) != 0:
            for p in result:
                regid = p["RegID"]

                if regid in valid_regids:

                    date = datetime.datetime.strptime(
                        valid_regids[regid][0:10], '%Y-%m-%d').date()
                    source = "Varsel"

                    aval_cause_tid = int(p['AvalCauseTID']) + int(
                        p['AvalCauseExtTID'])
                    cause_name = p["CauseCombined"]
                    aval_size = p['DestructiveSizeExtName']
                    aval_type = p['AvalancheExtName']
                    aval_trigger = p['AvalTriggerSimpleName']
                    aval_probability = p['AvalProbabilityName']
                    aval_distribution = p['AvalPropagationName']
                    aval_cause_combined = p['AvalancheProblemCombined']
                    problem_url = 'http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter=RegID eq {2} and LangKey eq 1&$format=json'.format(
                        api_version, view, regid)

                    # from late november 2013 there was a change in data model
                    if date > datetime.datetime.strptime(
                            '2013-11-15', '%Y-%m-%d').date():
                        aval_cause_tid = int(p['AvalCauseTID'])
                        cause_name = aval_cause_kdv[aval_cause_tid].Name
                        aval_size = p['DestructiveSizeName']
                        aval_probability = p['AvalProbabilityName']
                        aval_distribution = p['AvalPropagationName']
                        aval_cause_combined = p['AvalCauseName']

                        # http://www.varsom.no/Snoskred/Senja/?date=18.03.2015
                        varsom_name = region_name.replace('æ', 'a').replace(
                            'ø', 'o').replace('å', 'a')
                        varsom_date = date.strftime("%d.%m.%Y")
                        problem_url = "http://www.varsom.no/Snoskred/{0}/?date={1}".format(
                            varsom_name, varsom_date)

                    if cause_name is not None and aval_cause_tid != 0:
                        order = int(p["AvalancheWarnProblemID"])
                        prob = gp.AvalancheProblem(region_id, region_name,
                                                   date, order, cause_name,
                                                   source)
                        prob.set_aval_type(aval_type)
                        prob.set_aval_size(aval_size)
                        prob.set_aval_trigger(aval_trigger)
                        prob.set_aval_distribution(aval_distribution)
                        prob.set_aval_probability(aval_probability)
                        #prob.set_problem_combined(aval_cause_combined)
                        prob.set_regobs_view(view)
                        prob.set_url(problem_url)
                        prob.set_cause_tid(aval_cause_tid)
                        # Maby this fixes issues with ney problems in 2015/16?
                        #if date > datetime.datetime.strptime('2015-11-15', '%Y-%m-%d').date():
                        #    prob.main_cause = p['Problem']
                        #else:
                        #    prob.set_main_cause(cause_name)
                        prob.set_main_cause(cause_name)
                        problems.append(prob)

    return problems
Exemple #10
0
def get_problems_from_AvalancheEvalProblem2V(region_id, start_date, end_date):
    '''Used from 2014-02-10 up to today

    http://api.nve.no/hydrology/regobs/v0.9.8/Odata.svc/AvalancheEvalProblem2V?$filter=DtObsTime%20gt%20datetime%272012-01-10%27%20and%20DtObsTime%20lt%20datetime%272015-01-15%27%20and%20ForecastRegionName%20eq%20%27Senja%27%20and%20LangKey%20eq%201&$format=json
    :param region_name:
    :param region_id:
    :param start_date:
    :param end_date:
    :return:

    Datasample:
    {
        __metadata: {
            id: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheEvalProblem2V(AvalancheEvalProblemID=0,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2014-04-27T19%3A00%3A00',LangKey=1,NickName='MagnusH%40obskorps',ObsLocationID=11031,RegID=34540,UTMEast=639918,UTMNorth=7731868,UTMZone=33)",
            uri: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheEvalProblem2V(AvalancheEvalProblemID=0,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2014-04-27T19%3A00%3A00',LangKey=1,NickName='MagnusH%40obskorps',ObsLocationID=11031,RegID=34540,UTMEast=639918,UTMNorth=7731868,UTMZone=33)",
            type: "RegObsModel.AvalancheEvalProblem2V"
        },
        RegID: 34540,
        AvalancheEvalProblemID: 0,
        DtObsTime: "/Date(1398625200000)/",
        DtRegTime: "/Date(1398633678170)/",
        ObsLocationID: 11031,
        LocationName: "Steinskardtind",
        UTMZone: 33,
        UTMEast: 639918,
        UTMNorth: 7731868,
        ForecastRegionName: "Tromsø",
        MunicipalName: "TROMSØ",
        NickName: "MagnusH@obskorps",
        CompetenceLevelName: "****",
        AvalancheExtTID: 0,
        AvalancheExtName: "Ikke gitt ",
        AvalCauseTID: 22,
        AvalCauseName: "Opphopning av vann over skarelag",
        AvalCauseDepthTID: 2,
        AvalCauseDepthName: "Innen en meter",
        AvalCauseAttributes: 4,
        AvalCauseAttributeName: "Det overliggende laget er mykt.  ",
        DestructiveSizeTID: 2,
        DestructiveSizeName: "2 - Små",
        AvalTriggerSimpleTID: 10,
        AvalTriggerSimpleName: "Stor tilleggsbelastning ",
        AvalProbabilityTID: 3,
        AvalProbabilityName: "Mulig ",
        ValidExposition: "00000000",
        ExposedHeight1: 600,
        ExposedHeight2: 300,
        Comment: "Gammelt skarelag i ferd med å gå i oppløsning. Laget over og under er omvandlet til fuktig/våt grovkornet snø. Skarelaget bærer ikke lenger og kan kollapse. Ser ut til å ha liten evne til propagering, men glir ut som lite flak.",
        LangKey: 1
}

    '''

    region_name = get_forecast_region_name(region_id)
    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')
    view = "AvalancheEvalProblem2V"

    odata_query = "DtObsTime gt datetime'{1}' and " \
                 "DtObsTime lt datetime'{2}' and " \
                 "ForecastRegionName eq '{0}' and " \
                 "LangKey eq 1".format(region_name, start_date, end_date)
    #odata_query = fe.add_norwegian_letters(odata_query)

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter={2}&$format=json".format(
        api_version, view, odata_query)

    result = requests.get(url).json()
    result = result['d']['results']

    print('getregobs.py -> get_problems_from_AvalancheEvalProblem2V: {0} observations for {1} in from {2} to {3}.'\
        .format(len(result), region_id, start_date, end_date))

    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(result) == 1000:
        time_delta = end_date - start_date
        date_in_middle = start_date + time_delta / 2
        problems = get_problems_from_AvalancheEvalProblem2V(region_id, start_date, date_in_middle) \
               + get_problems_from_AvalancheEvalProblem2V(region_id, date_in_middle, end_date)
    else:

        problems = []

        if len(result) != 0:
            for p in result:
                AvalCauseTID = p['AvalCauseTID']

                if AvalCauseTID is None:
                    cause = 0
                else:
                    cause = int(AvalCauseTID)

                if cause != 0:

                    date = unix_time_2_normal(int(p['DtObsTime'][6:-2])).date()
                    order = int(p["AvalancheEvalProblemID"])
                    cause_tid = p['AvalCauseTID']
                    cause_name = aval_cause_kdv[cause_tid].Name
                    source = "Observasjon"

                    prob = gp.AvalancheProblem(region_id, region_name, date,
                                               order, cause_name, source)

                    prob.set_cause_tid(cause_tid)
                    prob.set_municipal(p['MunicipalName'])
                    prob.set_regid(p['RegID'])
                    prob.set_url("{0}{1}".format(registration_basestring,
                                                 prob.regid))
                    prob.set_aval_size(p["DestructiveSizeName"])
                    #prob.set_problem_combined(p['AvalCauseName'])
                    prob.set_regobs_view(view)
                    prob.set_nick_name(p['NickName'])

                    problems.append(prob)

    return problems
Exemple #11
0
        7: 'Regn',
        8: 'Rask temperaturstigning',
        9: 'Mye vann i dekket',
        10: 'Solpaavirkning'
    }
    '''
    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')

    if old_cause_parameter_name == 'AvalancheProblem':
        # avalanche_problem_kdv = gkdv.get_kdv('AvalancheProblemKDV')
        if old_cause_tid == 0:
            return aval_cause_kdv[0].Name
        elif old_cause_tid == 1:
            return aval_cause_kdv[28].Name


if __name__ == "__main__":

    import datetime as dt
    # __get_cause_from_old_cause('AvalancheProblem', 3)
    region_list = gkdv.get_kdv("ForecastRegionKDV")
    aval_cause_kdv = gkdv.get_kdv("AvalCauseKDV")

    avalanche_warning = get_problems_from_AvalancheWarningV(
        108, dt.date(2011, 11, 30), dt.date(2013, 3, 15))
    avalache_problems2_v = get_problems_from_AvalancheEvalProblem2V(
        108, dt.date(2014, 11, 30), dt.date(2015, 3, 15))
    # AvalancheWarnProblem = get_problems_from_AvalancheWarnProblemV(108, '2015-01-01','2015-01-15')

    a = 1