Пример #1
0
    def test_avalanche_activity_2(self):
        from_date, to_date = '2017-03-01', '2017-03-10'

        avalanche_activity_2 = go.get_avalanche_activity_2(from_date, to_date)
        avalanche_activity_2_df = go.get_avalanche_activity_2(from_date, to_date, output='DataFrame')
        avalanche_activity_2_count = go.get_avalanche_activity_2(from_date, to_date, output='Count')

        self.assertIsInstance(avalanche_activity_2[0], go.AvalancheActivityObs2)
        self.assertIsInstance(avalanche_activity_2_df, pd.DataFrame)

        avalanche_activity_2_obs = go.get_data(from_date, to_date, registration_types=33)
        self.assertEqual(avalanche_activity_2_count, len(avalanche_activity_2_obs))
Пример #2
0
# -*- coding: utf-8 -*-
"""This is an example of how to retrieve avalanche observations by using the varsomdata/getobservations.py
module.

It takes time to make requests. Progress can be followed in the log files.
"""

from varsomdata import getobservations as go

__author__ = 'Ragnar Ekker'

# The first avalanche activity form was used between 2012 and 2016.
avalanche_activity = go.get_avalanche_activity('2015-03-01', '2015-03-10')

# The second avalanche activity form was introduces in 2015 and is currently in use (nov 2018).
avalanche_activity_2 = go.get_avalanche_activity_2('2017-03-01', '2017-03-10')

print('Std.out from AvalancheActivityObs2 object')
print('\t', avalanche_activity_2[0].DestructiveSizeName)

# Observations of singe avalanches
avalanche_obs = go.get_avalanche('2015-03-01', '2015-03-10')

print('Std.out from AvalancheObs object')
print('\t', avalanche_obs[0].HeightStartZone,
      avalanche_obs[0].DestructiveSizeName)

# Observations of avalanches given as a danger sign (DangerSignTID = 2).
# A query might be specified on region, eg region_id=3011 is Tromsø.
all_danger_signs = go.get_danger_sign('2018-03-01',
                                      '2018-03-10',
Пример #3
0
def observed_avalanche_activity():
    from_date = dt.date(2014, 12, 1)
    to_date = dt.date(2019, 5, 31)

    # region_ids = [3014, 3028]
    region_ids = [
        3003, 3007, 3009, 3010, 3011, 3012, 3013, 3014, 3015, 3016, 3017, 3022,
        3023, 3024, 3027, 3028, 3029, 3031, 3032, 3034, 3035
    ]
    start = dt.datetime.now()

    aval_act = go.get_avalanche_activity(from_date, to_date, region_ids)
    aval_act2 = go.get_avalanche_activity_2(from_date,
                                            to_date,
                                            region_ids,
                                            lang_key=2)
    a = 0
    # avals = go.get_avalanche(from_date, to_date, region_ids, lang_key=2)
    stop1 = dt.datetime.now()
    print(stop1 - start)
    _df = pd.DataFrame(columns=[
        'Release_type', 'Avalanche_type', 'DestructiveSizeName', 'Date',
        'Elevation', 'EstimatedNumName', 'EstimatedNumTID', 'Latitude',
        'Longitude', 'ForecastRegionName', 'ForecastRegionTID', 'Nick', 'RegID'
    ])
    for av in aval_act:
        _df = _df.append(
            {
                'Release_type': av.AvalancheTriggerName,
                'Avalanche_type': av.AvalancheName,
                'DestructiveSizeName': av.DestructiveSizeName,
                'Date': av.DtObsTime.strftime('%Y-%m-%d'),
                'Elevation': av.Height,
                'EstimatedNumName': av.EstimatedNumName,
                'EstimatedNumTID': av.EstimatedNumTID,
                'Latitude': av.Latitude,
                'Longitude': av.Longitude,
                'ForecastRegionName': av.ForecastRegionName,
                'ForecastRegionTID': av.ForecastRegionTID,
                'Nick': av.NickName,
                'RegID': av.RegID
            },
            ignore_index=True)

    for av in aval_act2:
        _df = _df.append(
            {
                'Release_type': av.AvalTriggerSimpleName,
                'Avalanche_type': av.AvalancheExtName,
                'DestructiveSizeName': av.DestructiveSizeName,
                'Date': av.DtObsTime.strftime('%Y-%m-%d'),
                'Elevation': av.Height,
                'EstimatedNumName': av.EstimatedNumName,
                'EstimatedNumTID': av.EstimatedNumTID,
                'Latitude': av.Latitude,
                'Longitude': av.Longitude,
                'ForecastRegionName': av.ForecastRegionName,
                'ForecastRegionTID': av.ForecastRegionTID,
                'Nick': av.NickName,
                'RegID': av.RegID
            },
            ignore_index=True)
    stop2 = dt.datetime.now()
    print(stop2 - stop1)
    print(stop2 - start)

    _df.to_csv('../localstorage/obs_aval_activity.csv',
               sep=';',
               index_label='index')
Пример #4
0
def get_avalanche_index(from_date,
                        to_date,
                        region_ids=None,
                        observer_ids=None):
    """All tables in regObs containing information on avalanche activity is mapped to an avalanche index. These
    are AvalancheActivityObs, AvalancheActivityObs2, AvalanchObs and DangerObs. The definition of the index is
    found in the input/aval_dl_order_of_size_and_num.csv configuration file.

    :param from_date:
    :param to_date:
    :param region_ids:
    :param observer_ids:
    :return avalanche_indexes:  [list] of class AvalanchIndex
    """

    # get all data
    avalanche_activities = go.get_avalanche_activity(from_date,
                                                     to_date,
                                                     region_ids=region_ids,
                                                     observer_ids=observer_ids)
    avalanche_activities_2 = go.get_avalanche_activity_2(
        from_date, to_date, region_ids=region_ids, observer_ids=observer_ids)
    avalanches = go.get_avalanche(from_date,
                                  to_date,
                                  region_ids=region_ids,
                                  observer_ids=observer_ids)
    danger_signs = go.get_danger_sign(from_date,
                                      to_date,
                                      region_ids=region_ids,
                                      observer_ids=observer_ids)

    # get index definition
    index_definition = rf.read_configuration_file(
        '{0}aval_dl_order_of_size_and_num.csv'.format(
            env.matrix_configurations), AvalancheIndex)

    avalanche_indexes = []

    for aa in avalanche_activities:

        ai = AvalancheIndex()
        ai.set_num_and_size_and_index(aa.EstimatedNumName,
                                      aa.DestructiveSizeName, index_definition)
        ai.set_date_region_observation(aa.DtAvalancheTime,
                                       aa.ForecastRegionName, aa)
        avalanche_indexes.append(ai)

    for aa in avalanche_activities_2:

        if aa.DtStart and aa.DtEnd:
            ai = AvalancheIndex()
            ai.set_num_and_size_and_index(aa.EstimatedNumName,
                                          aa.DestructiveSizeName,
                                          index_definition)
            # Activity date is the avarage of DtStart and DtEnd
            activity_date = aa.DtStart + (aa.DtEnd - aa.DtStart) / 2
            ai.set_date_region_observation(activity_date.date(),
                                           aa.ForecastRegionName, aa)
            avalanche_indexes.append(ai)

    for aa in avalanches:

        ai = AvalancheIndex()
        # Make sure size is not None
        ai.set_num_and_size_and_index("Ett (1)", aa.DestructiveSizeName,
                                      index_definition)
        ai.set_date_region_observation(aa.DtAvalancheTime,
                                       aa.ForecastRegionName, aa)
        avalanche_indexes.append(ai)

    for ds in danger_signs:

        ai = AvalancheIndex()
        if 'Ferske skred' in ds.DangerSignName:
            ai.set_avalanches_as_dangersign()
        elif 'Ingen faretegn observert' in ds.DangerSignName:
            ai.set_no_avalanche_activity()
        else:
            continue
        ai.set_date_region_observation(ds.DtObsTime, ds.ForecastRegionName, ds)
        avalanche_indexes.append(ai)

    return avalanche_indexes
Пример #5
0
def step_1_make_data_set(region_ids, from_date, to_date):
    """Makes the data set of all observed avalanche activity (including signs and single avalanches obs) and maps
    to forecasts for the days they are observed.

    :param region_ids:  [int or list of ints]
    :param from_date:   [date]
    :param to_date:     [date]
    :return date_region, forecasted_dangers:    [], []
    """

    # get all data
    dangers = gd.get_forecasted_dangers(region_ids, from_date, to_date)
    avalanches = go.get_avalanche_activity(from_date, to_date, region_ids)
    avalanches += go.get_avalanche_activity_2(from_date, to_date, region_ids)
    single_avalanches = go.get_avalanche(from_date, to_date, region_ids)
    danger_signs = go.get_danger_sign(from_date, to_date, region_ids)

    # List of only forecasts
    forecasted_dangers = []
    for d in dangers:
        if 'Forecast' in d.data_table and d.danger_level != 0:
            forecasted_dangers.append(d)

    # List of only valid activity observations
    observed_activity = []
    for a in avalanches:
        if a.EstimatedNumName is not None:
            if not 'Ikke gitt' in a.EstimatedNumName:
                if a.DestructiveSizeName is None:
                    a.DestructiveSizeName = 'Ikke gitt'
                observed_activity.append(a)

    # list of relevant danger observations
    danger_sign_avalanches = []
    for ds in danger_signs:
        if 'Ferske skred' in ds.DangerSignName or 'Ingen faretegn observert' in ds.DangerSignName:
            danger_sign_avalanches.append(ds)

    # list of relevant singel avalanches
    observed_avalanche = []
    for sa in single_avalanches:
        if sa.DestructiveSizeName is not None:
            if 'Ikke gitt' not in sa.DestructiveSizeName:
                observed_avalanche.append(sa)

    # Make list of all regions pr date and append forecasts and observations.
    data_date_region = []
    for d in forecasted_dangers:
        danger_date = d.date
        print('{0}'.format(danger_date))
        danger_region_name = d.region_name

        data = DataOnDateInRegion(danger_date, danger_region_name)
        data.forecast.append(d)

        for a in observed_activity:
            aval_date = a.DtObsTime.date()
            if isinstance(a, go.AvalancheActivityObs):
                if a.DtAvalancheTime is not None:
                    aval_date = a.DtAvalancheTime.date()
            if isinstance(a, go.AvalancheActivityObs2):
                if a.DtMiddleTime is not None:
                    aval_date = a.DtMiddleTime.date()

            aval_region_name = a.ForecastRegionName
            if aval_date == danger_date and aval_region_name == danger_region_name:
                data.avalanche_activity.append(a)

        for da in danger_sign_avalanches:
            aval_date = da.DtObsTime.date()
            aval_region_name = da.ForecastRegionName
            if aval_date == danger_date and aval_region_name == danger_region_name:
                data.danger_sign.append(da)

        for oa in observed_avalanche:
            aval_date = oa.DtObsTime.date()
            if oa.DtAvalancheTime is not None:
                aval_date = oa.DtAvalancheTime.date()
            aval_region_name = oa.ForecastRegionName
            if aval_date == danger_date and aval_region_name == danger_region_name:
                data.avalanche.append(oa)

        data_date_region.append(data)

    # discard days and regions where no observations present
    date_region = []
    for d in data_date_region:
        if not len(d.avalanche_activity) == 0 or not len(
                d.danger_sign) == 0 or not len(d.avalanche) == 0:
            date_region.append(d)

    return date_region, forecasted_dangers