Exemplo n.º 1
0
def get_forecast_region_for_regid(reg_id):
    """Returns the forecast region used at a given place in a given season.

    :param reg_id: [int]            regid in regObs
    :return:       [int]            ForecastRegionTID from regObs
                   [string]         ForecastRegionName from regObs
                   [observation]    The full observation on this regID
    """

    region_id, region_name, observation = None, None, None

    try:
        observation = go.get_data_as_class(reg_ids=reg_id)
        utm33x = observation[0].UTMEast
        utm33y = observation[0].UTMNorth
        date = observation[0].DtObsTime
        season = get_season_from_date(date.date())

        region_id, region_name = get_forecast_region_for_coordinate(
            utm33x, utm33y, season)

    except:
        error_msg = sys.exc_info()[0]
        ml.log_and_print(
            '[error] getmisc.py -> get_forecast_region_for_regid: Exception on RegID={0}: {1}.'
            .format(reg_id, error_msg))

    return region_id, region_name, observation
def incident_troms_winter_2018_for_markus():
    """Communication dated 2018-11-29

    Hei Ragnar og Jostein

    Kan en av dere hjelpe meg å ta ut et plott som viser antall registrerte ulykker og hendelser i
    varslingsregionene Tromsø, Lyngen, Sør-Troms og Indre-Troms for
    perioden 15.02 – 15.05.

    ...

    Er du interessert i det som ligger i registrert i
    regObs eller det som er kvalitetssikkert data  og ligger på varsom?

    Skal du ha hendelser som har hatt konsekvens?

    Skal hendelsene plottes i tid eller vises i kart?

    ...

    Varsom
    Ikke nødvendigvis konsekvens
    Tid

    :return:
    """

    pickle_file_name = '{0}incident_troms_winter_2018_for_markus.pickle'.format(
        env.local_storage)
    from_date = dt.date(2018, 2, 15)  # '2018-02-15'
    to_date = dt.date(2018, 5, 15)  # '2018-05-15'

    # Tromsø, Lyngen, Sør-Troms og Indre-Troms
    regions = [3011, 3010, 3012, 3013]

    get_new = False

    if get_new:
        all_varsom_incidents = gm.get_varsom_incidents(
            add_forecast_regions=True, add_observations=True)
        all_regobs_avalobs_and_incidents = go.get_data_as_class(
            from_date,
            to_date,
            registration_types=[11, 26],
            region_ids=regions,
            output='Nest')

        mp.pickle_anything(
            [all_varsom_incidents, all_regobs_avalobs_and_incidents],
            pickle_file_name)

    else:
        [all_varsom_incidents, all_regobs_avalobs_and_incidents
         ] = mp.unpickle_anything(pickle_file_name)

    varsom_incidents = mm.make_date_int_dict(start_date=from_date,
                                             end_date=to_date)
    regobs_avalobs_and_incidents = mm.make_date_int_dict(start_date=from_date,
                                                         end_date=to_date)

    for i in all_varsom_incidents:
        if from_date <= i.date <= to_date:
            if i.region_id in regions:
                if i.date in varsom_incidents.keys():
                    varsom_incidents[i.date] += 1

    for i in all_regobs_avalobs_and_incidents:
        if from_date <= i.DtObsTime.date() <= to_date:
            if i.ForecastRegionTID in regions:
                if i.DtObsTime.date() in regobs_avalobs_and_incidents.keys():
                    regobs_avalobs_and_incidents[i.DtObsTime.date()] += 1

    sum_varsom = sum(varsom_incidents.values())
    sum_regobs = sum(regobs_avalobs_and_incidents.values())

    varsom_incident_troms_winter_2018_for_markus = '{0}varsom_incident_troms_winter_2018_for_markus.csv'.format(
        env.output_folder)
    regobs_incident_troms_winter_2018_for_markus = '{0}regobs_incident_troms_winter_2018_for_markus.csv'.format(
        env.output_folder)

    with open(varsom_incident_troms_winter_2018_for_markus,
              'w',
              encoding='utf-8') as f:
        make_header = True
        for k, v in varsom_incidents.items():
            if make_header:
                f.write('date; number\n')
                make_header = False
            f.write('{}; {}\n'.format(k, v))

    with open(regobs_incident_troms_winter_2018_for_markus,
              'w',
              encoding='utf-8') as f:
        make_header = True
        for k, v in regobs_avalobs_and_incidents.items():
            if make_header:
                f.write('date; number\n')
                make_header = False
            f.write('{}; {}\n'.format(k, v))

    pass
Exemplo n.º 3
0
# If multiple id's wil be used, give them as list. Result may be returned as a list of forms (default) or
# nested, i.e. all forms are listed under their respective observations.
two_obs = go.get_data(reg_ids=[130548, 130328], output='Nest')

# A request may specify a time period and specific geohazards.
# Snow is 10 and ice is 70. Water is 60. Dirt is [20, 30, 40]
all_data_snow = go.get_data('2016-12-30', '2017-01-01', geohazard_tids=10)
ice_data = go.get_data(from_date='2016-10-01',
                       to_date='2016-11-01',
                       geohazard_tids=70,
                       output='Nest')

# The data may be returned as a list of classes, as opposed to the default return in get_data which are dictionaries
# raw as given on regObs webapi.
data_as_classes = go.get_data_as_class('2018-05-01', '2018-08-01')

# We may get observation forms directly. Note, from and to date are first and may also be given as
# positional arguments, even though Is recommend keyword arguments.
land_slides = go.get_land_slide_obs('2018-01-01', '2018-02-01')
incident = go.get_incident('2012-03-01', '2012-03-10')
ice_thicks = go.get_ice_thickness('2018-01-20', '2018-02-10')
snow_surface = go.get_snow_surface_observation('2018-01-28', '2018-02-01')
problems = go.get_avalanche_problem_2('2017-03-01', '2017-03-10')

# We may request an observation count.
# Remember: if forms are grouped under the observation, its a nest.
# If the forms are separate items in the list, its a list.
seasonal_count_regs = go.get_data('2016-08-01',
                                  '2017-08-01',
                                  output='Count nest')
Exemplo n.º 4
0
def get_snow_obs(from_date, to_date):
    all_data_snow = go.get_data_as_class(from_date, to_date, geohazard_tids=10)
    return all_data_snow
Exemplo n.º 5
0
def get_all_observations(year,
                         output='Nest',
                         geohazard_tids=None,
                         lang_key=1,
                         max_file_age=23):
    """Specialized method for getting all observations for one season (1. sept to 31. august).
    For the current season (at the time of writing, 2018-19), if request has been made the last 23hrs,
    data is retrieved from a locally stored pickle, if not, new request is made to the regObs api. Previous
    seasons are not requested if a pickle is found in local storage.

    :param year:                [string] Eg. season '2017-18' (sept-sept) or one single year '2018'
    :param output:              [string] 'Nest' or 'List'
    :param geohazard_tids:      [int or list of ints] Default None gives all. Note, pickle stores all, but this option returns a select
    :param lang_key             [int] 1 is norwegian, 2 is english
    :param max_file_age:        [int] hrs how old the file is before new is retrieved

    :return:
    """

    from_date, to_date = gm.get_dates_from_season(year=year)
    file_name_list = '{0}all_observations_list_{1}_lk{2}.pickle'.format(
        env.local_storage, year, lang_key)
    file_name_nest = '{0}all_observations_nest_{1}_lk{2}.pickle'.format(
        env.local_storage, year, lang_key)
    get_new = True
    date_limit = dt.datetime.now() - dt.timedelta(hours=max_file_age)

    # if we are well out of the current season (30 days) its little chance the data set has changed.
    current_season = gm.get_season_from_date(dt.date.today() -
                                             dt.timedelta(30))

    if geohazard_tids:
        if not isinstance(geohazard_tids, list):
            geohazard_tids = [geohazard_tids]

    if os.path.exists(file_name_list):
        # if file contains a season long gone, dont make new.
        if year == current_season:
            file_age = dt.datetime.fromtimestamp(
                os.path.getmtime(file_name_list))
            # If file is newer than the given time limit, dont make new.
            if file_age > date_limit:
                # If file size larger than that of an nearly empty file, dont make new.
                if os.path.getsize(file_name_list) > 100:  # 100 bytes limit
                    get_new = False
        else:
            get_new = False

    if get_new:
        # When get new, get all geo hazards
        nested_observations = go.get_data_as_class(from_date=from_date,
                                                   to_date=to_date,
                                                   output='Nest',
                                                   geohazard_tids=None,
                                                   lang_key=lang_key)

        mp.pickle_anything(nested_observations, file_name_nest)

        listed_observations = []
        for d in nested_observations:
            for o in d.Observations:
                if _observation_is_not_empty(o):
                    listed_observations.append(o)
            for p in d.Pictures:
                # p['RegistrationName'] = 'Bilde'
                listed_observations.append(p)

        mp.pickle_anything(listed_observations, file_name_list)

    if output == 'Nest':
        all_nested_observations = mp.unpickle_anything(file_name_nest)
        nested_observations = []

        if geohazard_tids:
            for o in all_nested_observations:
                if o.GeoHazardTID in geohazard_tids:
                    nested_observations.append(o)

        else:
            nested_observations = all_nested_observations

        return nested_observations

    elif output == 'List':
        all_listed_observations = mp.unpickle_anything(file_name_list)
        listed_observations = []

        if geohazard_tids:
            for o in all_listed_observations:
                if o.GeoHazardTID in geohazard_tids:
                    listed_observations.append(o)

        else:
            listed_observations = all_listed_observations

        return listed_observations

    else:
        ml.log_and_print(
            '[warning] getvarsompickles.py -> get_all_registrations: Unknown output option'
        )
        return []
Exemplo n.º 6
0
def get_varsom_incidents(add_forecast_regions=False,
                         add_observations=False,
                         add_forecasts=False):
    """Returns the incidents shown on varsom.no in a list of VarsomIncident objects.
    Data input is a utf-8 formatted csv file in input folder. Original file might have newlines and
    semicolons (;) in the cells. These need to be removed before saving as csv.

    :param add_forecast_regions:    [bool] If true the regid is used to get coordinates and the forecast region at the
                                    observation date is added. Note, if true, some time is to be expected getting data.
    :param add_observations:        [bool] If true the observation is added when looking up the region name. This
                                    option is only taken into account if add_forecast_regions is true.
    :param add_forecasts:           [bool] If true the forecast at that time and place is added to the incident. This
                                    option is only taken into account if add_forecast_regions is true.
    """

    incidents_file = '{}varsomsineskredulykker.csv'.format(
        env.varsom_incidents)
    varsom_incidents = rf.read_csv_file(incidents_file, VarsomIncident)

    # map incident to forecast region
    if add_forecast_regions:
        for i in varsom_incidents:
            if i.regid == []:
                ml.log_and_print(
                    "[warning] getmisc.py -> get_varsom_incidents: No regid on incident on {}. No forecast region found."
                    .format(i.date))
            else:
                region_id, region_name, observation = get_forecast_region_for_regid(
                    i.regid[0])
                i.add_forecast_region(region_id, region_name)
                print("regid {}: {}".format(i.regid[0], i.date))

                if add_observations:
                    i.add_observation(observation[0])
                    if len(i.regid) > 1:
                        observations = go.get_data_as_class(
                            reg_ids=i.regid[1:])
                        for o in observations:
                            i.add_observation(o)

        if add_forecasts:
            years = ['2014-15', '2015-16', '2016-17',
                     '2017-18']  # the years with data

            all_forecasts = []
            for y in years:
                region_ids = get_forecast_regions(year=y)
                from_date, to_date = get_forecast_dates(y)
                all_forecasts += gd.get_forecasted_dangers(
                    region_ids, from_date, to_date)

            for i in varsom_incidents:
                incident_date = i.date
                incident_region_id = i.region_id
                print("{}: {}".format(i.location, incident_date))
                for f in all_forecasts:
                    forecast_date = f.date
                    forecast_region_id = f.region_regobs_id
                    if incident_date == forecast_date:
                        if incident_region_id == forecast_region_id:
                            i.add_forecast(f)

    return varsom_incidents