Exemplo n.º 1
0
def get_danger_signs(from_date, to_date, region_ids):
    ds_list = go.get_danger_sign(from_date,
                                 to_date,
                                 region_ids=None,
                                 location_id=None,
                                 group_id=None,
                                 observer_ids=None,
                                 observer_nick=None,
                                 observer_competence=None,
                                 output='List',
                                 geohazard_tids=10,
                                 lang_key=1)
    df = go._make_data_frame(ds_list)

    return df
Exemplo n.º 2
0
avalanche_activity_2 = go.get_avalanche_activity_2('2017-03-01', '2017-03-10')

print('Std.out from AvalancheActivityObs2 object')
print('\t', avalanche_activity_2[0].DestructiveSizeName)

# Observations of singe avalanches
avalanche_obs = go.get_avalanche('2015-03-01', '2015-03-10')

print('Std.out from AvalancheObs object')
print('\t', avalanche_obs[0].HeightStartZone,
      avalanche_obs[0].DestructiveSizeName)

# Observations of avalanches given as a danger sign (DangerSignTID = 2).
# A query might be specified on region, eg region_id=3011 is Tromsø.
all_danger_signs = go.get_danger_sign('2018-03-01',
                                      '2018-03-10',
                                      region_ids=3011)
avalanche_danger_signs = []
for o in all_danger_signs:
    if o.DangerSignTID == 2:  # danger sign is avalanche activity
        avalanche_danger_signs.append(o)

print('Std.out from DangerSign object')
print('\t', avalanche_danger_signs[0].DangerSignName)

# Note that in this example dates are given as strings, but may also be given as date objects.
# Se ex_observations for more examples on queries.

# Picture url and metadata are in the pictures list on each observation.
# note that the picture of danger signs does not know which danger sign the picture is of, so some pictures
# might be of other danger signs.
Exemplo n.º 3
0
def get_avalanche_index(from_date,
                        to_date,
                        region_ids=None,
                        observer_ids=None):
    """All tables in regObs containing information on avalanche activity is mapped to an avalanche index. These
    are AvalancheActivityObs, AvalancheActivityObs2, AvalanchObs and DangerObs. The definition of the index is
    found in the input/aval_dl_order_of_size_and_num.csv configuration file.

    :param from_date:
    :param to_date:
    :param region_ids:
    :param observer_ids:
    :return avalanche_indexes:  [list] of class AvalanchIndex
    """

    # get all data
    avalanche_activities = go.get_avalanche_activity(from_date,
                                                     to_date,
                                                     region_ids=region_ids,
                                                     observer_ids=observer_ids)
    avalanche_activities_2 = go.get_avalanche_activity_2(
        from_date, to_date, region_ids=region_ids, observer_ids=observer_ids)
    avalanches = go.get_avalanche(from_date,
                                  to_date,
                                  region_ids=region_ids,
                                  observer_ids=observer_ids)
    danger_signs = go.get_danger_sign(from_date,
                                      to_date,
                                      region_ids=region_ids,
                                      observer_ids=observer_ids)

    # get index definition
    index_definition = rf.read_configuration_file(
        '{0}aval_dl_order_of_size_and_num.csv'.format(
            env.matrix_configurations), AvalancheIndex)

    avalanche_indexes = []

    for aa in avalanche_activities:

        ai = AvalancheIndex()
        ai.set_num_and_size_and_index(aa.EstimatedNumName,
                                      aa.DestructiveSizeName, index_definition)
        ai.set_date_region_observation(aa.DtAvalancheTime,
                                       aa.ForecastRegionName, aa)
        avalanche_indexes.append(ai)

    for aa in avalanche_activities_2:

        if aa.DtStart and aa.DtEnd:
            ai = AvalancheIndex()
            ai.set_num_and_size_and_index(aa.EstimatedNumName,
                                          aa.DestructiveSizeName,
                                          index_definition)
            # Activity date is the avarage of DtStart and DtEnd
            activity_date = aa.DtStart + (aa.DtEnd - aa.DtStart) / 2
            ai.set_date_region_observation(activity_date.date(),
                                           aa.ForecastRegionName, aa)
            avalanche_indexes.append(ai)

    for aa in avalanches:

        ai = AvalancheIndex()
        # Make sure size is not None
        ai.set_num_and_size_and_index("Ett (1)", aa.DestructiveSizeName,
                                      index_definition)
        ai.set_date_region_observation(aa.DtAvalancheTime,
                                       aa.ForecastRegionName, aa)
        avalanche_indexes.append(ai)

    for ds in danger_signs:

        ai = AvalancheIndex()
        if 'Ferske skred' in ds.DangerSignName:
            ai.set_avalanches_as_dangersign()
        elif 'Ingen faretegn observert' in ds.DangerSignName:
            ai.set_no_avalanche_activity()
        else:
            continue
        ai.set_date_region_observation(ds.DtObsTime, ds.ForecastRegionName, ds)
        avalanche_indexes.append(ai)

    return avalanche_indexes
Exemplo n.º 4
0
 def test_danger_sign(self):
     danger_signs = go.get_danger_sign('2017-12-13', '2017-12-16', geohazard_tids=10)
     danger_signs_df = go.get_danger_sign('2017-12-13', '2017-12-16', output='DataFrame')
     danger_signs_count = go.get_danger_sign('2017-12-13', '2017-12-16', output='Count')
     self.assertTrue(danger_signs_count <= len(danger_signs) <= 3*danger_signs_count)
     self.assertIsInstance(danger_signs_df, pd.DataFrame)
Exemplo n.º 5
0
def step_1_make_data_set(region_ids, from_date, to_date):
    """Makes the data set of all observed avalanche activity (including signs and single avalanches obs) and maps
    to forecasts for the days they are observed.

    :param region_ids:  [int or list of ints]
    :param from_date:   [date]
    :param to_date:     [date]
    :return date_region, forecasted_dangers:    [], []
    """

    # get all data
    dangers = gd.get_forecasted_dangers(region_ids, from_date, to_date)
    avalanches = go.get_avalanche_activity(from_date, to_date, region_ids)
    avalanches += go.get_avalanche_activity_2(from_date, to_date, region_ids)
    single_avalanches = go.get_avalanche(from_date, to_date, region_ids)
    danger_signs = go.get_danger_sign(from_date, to_date, region_ids)

    # List of only forecasts
    forecasted_dangers = []
    for d in dangers:
        if 'Forecast' in d.data_table and d.danger_level != 0:
            forecasted_dangers.append(d)

    # List of only valid activity observations
    observed_activity = []
    for a in avalanches:
        if a.EstimatedNumName is not None:
            if not 'Ikke gitt' in a.EstimatedNumName:
                if a.DestructiveSizeName is None:
                    a.DestructiveSizeName = 'Ikke gitt'
                observed_activity.append(a)

    # list of relevant danger observations
    danger_sign_avalanches = []
    for ds in danger_signs:
        if 'Ferske skred' in ds.DangerSignName or 'Ingen faretegn observert' in ds.DangerSignName:
            danger_sign_avalanches.append(ds)

    # list of relevant singel avalanches
    observed_avalanche = []
    for sa in single_avalanches:
        if sa.DestructiveSizeName is not None:
            if 'Ikke gitt' not in sa.DestructiveSizeName:
                observed_avalanche.append(sa)

    # Make list of all regions pr date and append forecasts and observations.
    data_date_region = []
    for d in forecasted_dangers:
        danger_date = d.date
        print('{0}'.format(danger_date))
        danger_region_name = d.region_name

        data = DataOnDateInRegion(danger_date, danger_region_name)
        data.forecast.append(d)

        for a in observed_activity:
            aval_date = a.DtObsTime.date()
            if isinstance(a, go.AvalancheActivityObs):
                if a.DtAvalancheTime is not None:
                    aval_date = a.DtAvalancheTime.date()
            if isinstance(a, go.AvalancheActivityObs2):
                if a.DtMiddleTime is not None:
                    aval_date = a.DtMiddleTime.date()

            aval_region_name = a.ForecastRegionName
            if aval_date == danger_date and aval_region_name == danger_region_name:
                data.avalanche_activity.append(a)

        for da in danger_sign_avalanches:
            aval_date = da.DtObsTime.date()
            aval_region_name = da.ForecastRegionName
            if aval_date == danger_date and aval_region_name == danger_region_name:
                data.danger_sign.append(da)

        for oa in observed_avalanche:
            aval_date = oa.DtObsTime.date()
            if oa.DtAvalancheTime is not None:
                aval_date = oa.DtAvalancheTime.date()
            aval_region_name = oa.ForecastRegionName
            if aval_date == danger_date and aval_region_name == danger_region_name:
                data.avalanche.append(oa)

        data_date_region.append(data)

    # discard days and regions where no observations present
    date_region = []
    for d in data_date_region:
        if not len(d.avalanche_activity) == 0 or not len(
                d.danger_sign) == 0 or not len(d.avalanche) == 0:
            date_region.append(d)

    return date_region, forecasted_dangers