def get_forecast_region_for_regid(reg_id): """Returns the forecast region used at a given place in a given season. :param reg_id: [int] regid in regObs :return: [int] ForecastRegionTID from regObs [string] ForecastRegionName from regObs [observation] The full observation on this regID """ region_id, region_name, observation = None, None, None try: observation = go.get_all_observations(reg_ids=reg_id) utm33x = observation[0].UTMEast utm33y = observation[0].UTMNorth date = observation[0].DtObsTime season = get_season_from_date(date.date()) region_id, region_name = get_forecast_region_for_coordinate( utm33x, utm33y, season) except: error_msg = sys.exc_info()[0] ml.log_and_print( '[error] getmisc.py -> get_forecast_region_for_regid: Exception on RegID={0}: {1}.' .format(reg_id, error_msg)) return region_id, region_name, observation
def get_varsom_incidents(add_forecast_regions=False, add_observations=False, add_forecasts=False): """Returns the incidents shown on varsom.no in a list of VarsomIncident objects. Data input is a utf-8 formatted csv file in input folder. Original file might have newlines and semicolons (;) in the cells. These need to be removed before saving as csv. :param add_forecast_regions: [bool] If true the regid is used to get coordinates and the forecast region at the observation date is added. Note, if true, some time is to be expected getting data. :param add_observations: [bool] If true the observation is added when looking up the region name. This option is only taken into account if add_forecast_regions is true. :param add_forecasts: [bool] If true the forecast at that time and place is added to the incident. This option is only taken into account if add_forecast_regions is true. """ # incidents_file = '{}varsomsineskredulykker.csv'.format(env.varsom_incidents) incidents_file = '{}varsomincidents3.csv'.format(env.varsom_incidents) varsom_incidents = rf.read_csv_file(incidents_file, VarsomIncident) # map incident to forecast region if add_forecast_regions: for i in varsom_incidents: if i.regid == []: ml.log_and_print("[warning] getmisc.py -> get_varsom_incidents: No regid on incident on {}. No forecast region found.".format(i.date)) else: region_id, region_name, observation = get_forecast_region_for_regid(i.regid[0]) i.add_forecast_region(region_id, region_name) print("regid {}: {}".format(i.regid[0], i.date)) if add_observations: i.add_observation(observation[0]) if len(i.regid) > 1: observations = go.get_all_observations(reg_ids=i.regid[1:]) for o in observations: i.add_observation(o) if add_forecasts: years = ['2014-15', '2015-16', '2016-17', '2017-18', '2018-19'] # the years with data all_forecasts = [] for y in years: region_ids = get_forecast_regions(year=y) from_date, to_date = get_forecast_dates(y) all_forecasts += gd.get_forecasted_dangers(region_ids, from_date, to_date) for i in varsom_incidents: incident_date = i.date incident_region_id = i.region_id print("{}: {}".format(i.location, incident_date)) for f in all_forecasts: forecast_date = f.date forecast_region_id = f.region_regobs_id if incident_date == forecast_date: if incident_region_id == forecast_region_id: i.add_forecast(f) return varsom_incidents
def pick_winners_at_conference(): """Pick winers at regObs competition at nordic avalanche conference at Åndalsnes.""" import random as rand romsdalen_konf_obs = go.get_all_observations('2017-11-02', '2017-11-05', region_ids=3023, geohazard_tids=10) romsdalen_konf_regs = go.get_data('2017-11-02', '2017-11-05', region_ids=3023, geohazard_tids=10) rauma_konf_obs = [] rauma_konf_regs = [] for o in romsdalen_konf_obs: if o.MunicipalName == 'RAUMA': rauma_konf_obs.append(o) for r in romsdalen_konf_regs: if r['MunicipalName'] == 'RAUMA': rauma_konf_regs.append(r) print('Antall obs: {}'.format(len(rauma_konf_regs))) print('Antall enkelt obs: {}'.format(len(rauma_konf_obs))) print('1 : {}'.format(rauma_konf_obs[rand.randint(0, len(rauma_konf_obs))].NickName)) print('2 : {}'.format(rauma_konf_obs[rand.randint(0, len(rauma_konf_obs))].NickName)) print('3 : {}'.format(rauma_konf_obs[rand.randint(0, len(rauma_konf_obs))].NickName)) print('4 : {}'.format(rauma_konf_obs[rand.randint(0, len(rauma_konf_obs))].NickName)) print('5 : {}'.format(rauma_konf_obs[rand.randint(0, len(rauma_konf_obs))].NickName))
# One observation with a given ID may one_obs = go.get_data(reg_ids=130548) # If multiple id's wil be used, give them as list. Result may be returned as a list of forms (default) or # nested, i.e. all forms are listed under their respective observations. two_obs = go.get_data(reg_ids=[130548, 130328]) # A request may specify a time period and specific geohazards. # Snow is 10 and ice is 70. Water is 60. Dirt is [20, 30, 40] all_data_snow = go.get_data('2016-12-30', '2017-01-01', geohazard_tids=10) ice_data = go.get_data(from_date='2016-10-01', to_date='2016-11-01', geohazard_tids=70) # The data may be returned as a list of classes, as opposed to the default return in get_data which are dictionaries # raw as given on regObs webapi. data_as_classes = go.get_all_observations('2018-05-01', '2018-08-01') # We may get observation forms directly. Note, from and to date are first and may also be given as # positional arguments, even though Is recommend keyword arguments. land_slides = go.get_land_slide_obs('2018-01-01', '2018-02-01') incident = go.get_incident('2012-03-01', '2012-03-10') ice_thicks = go.get_ice_thickness('2018-01-20', '2018-02-10') snow_surface = go.get_snow_surface_observation('2018-01-28', '2018-02-01') problems = go.get_avalanche_problem_2('2017-03-01', '2017-03-10') # We may request an observation count. seasonal_count_regs = go.get_count('2016-08-01', '2017-08-01') # And give date as a date time, and not a string. # Note the argument specifying a part of an Observer Nick. count_registrations_snow_10 = go.get_data(dt.date(2018, 10, 1), dt.date.today(), geohazard_tids=10, observer_nick='obskorps')
self.ObserverID = d.ObserverID self.RowNumber = d.RowNumber self.GeoHazardName = d.GeoHazardName self.RegistrationName = d.RegistrationName self.TypicalValue1 = d.TypicalValue1 self.TypicalValue2 = d.TypicalValue2 self.LangKey = d.LangKey self.Picture = d.Picture if __name__ == "__main__": from_date = dt.date(2016, 4, 1) to_date = dt.date(2016, 5, 1) region_ids = 116 all_data = go.get_all_observations(from_date, to_date, region_ids) Base.metadata.create_all() Session = sessionmaker(bind=engine) s = Session() all_data_ny_mapping = [] for d in all_data: all_data_ny_mapping.append(AllRegistrations(d)) s.add_all(all_data_ny_mapping) s.commit() a = 1
def get_snow_obs(from_date, to_date): all_data_snow = go.get_all_observations(from_date, to_date, geohazard_tids=10) return all_data_snow
def incident_troms_winter_2018_for_markus(): """Communication dated 2018-11-29 Hei Ragnar og Jostein Kan en av dere hjelpe meg å ta ut et plott som viser antall registrerte ulykker og hendelser i varslingsregionene Tromsø, Lyngen, Sør-Troms og Indre-Troms for perioden 15.02 – 15.05. ... Er du interessert i det som ligger i registrert i regObs eller det som er kvalitetssikkert data og ligger på varsom? Skal du ha hendelser som har hatt konsekvens? Skal hendelsene plottes i tid eller vises i kart? ... Varsom Ikke nødvendigvis konsekvens Tid :return: """ pickle_file_name = '{0}incident_troms_winter_2018_for_markus.pickle'.format( env.local_storage) from_date = dt.date(2018, 2, 15) # '2018-02-15' to_date = dt.date(2018, 5, 15) # '2018-05-15' # Tromsø, Lyngen, Sør-Troms og Indre-Troms regions = [3011, 3010, 3012, 3013] get_new = False if get_new: all_varsom_incidents = gm.get_varsom_incidents( add_forecast_regions=True, add_observations=True) all_regobs_avalobs_and_incidents = go.get_all_observations( from_date, to_date, registration_types=[11, 26], region_ids=regions, output='List') mp.pickle_anything( [all_varsom_incidents, all_regobs_avalobs_and_incidents], pickle_file_name) else: [all_varsom_incidents, all_regobs_avalobs_and_incidents ] = mp.unpickle_anything(pickle_file_name) varsom_incidents = mm.make_date_int_dict(start_date=from_date, end_date=to_date) regobs_avalobs_and_incidents = mm.make_date_int_dict(start_date=from_date, end_date=to_date) for i in all_varsom_incidents: if from_date <= i.date <= to_date: if i.region_id in regions: if i.date in varsom_incidents.keys(): varsom_incidents[i.date] += 1 for i in all_regobs_avalobs_and_incidents: if from_date <= i.DtObsTime.date() <= to_date: if i.ForecastRegionTID in regions: if i.DtObsTime.date() in regobs_avalobs_and_incidents.keys(): regobs_avalobs_and_incidents[i.DtObsTime.date()] += 1 sum_varsom = sum(varsom_incidents.values()) sum_regobs = sum(regobs_avalobs_and_incidents.values()) varsom_incident_troms_winter_2018_for_markus = '{0}varsom_incident_troms_winter_2018_for_markus.csv'.format( env.output_folder) regobs_incident_troms_winter_2018_for_markus = '{0}regobs_incident_troms_winter_2018_for_markus.csv'.format( env.output_folder) with open(varsom_incident_troms_winter_2018_for_markus, 'w', encoding='utf-8') as f: make_header = True for k, v in varsom_incidents.items(): if make_header: f.write('date; number\n') make_header = False f.write('{}; {}\n'.format(k, v)) with open(regobs_incident_troms_winter_2018_for_markus, 'w', encoding='utf-8') as f: make_header = True for k, v in regobs_avalobs_and_incidents.items(): if make_header: f.write('date; number\n') make_header = False f.write('{}; {}\n'.format(k, v)) pass
def get_all_observations(year, output='List', geohazard_tids=None, lang_key=1, max_file_age=23): """Specialized method for getting all observations for one season (1. sept to 31. august). For the current season (at the time of writing, 2018-19), if request has been made the last 23hrs, data is retrieved from a locally stored pickle, if not, new request is made to the regObs api. Previous seasons are not requested if a pickle is found in local storage. :param year: [string] Eg. season '2017-18' (sept-sept) or one single year '2018' :param output: [string] 'List' or 'FlatList' :param geohazard_tids: [int or list of ints] Default None gives all. Note, pickle stores all, but this option returns a select :param lang_key [int] 1 is norwegian, 2 is english :param max_file_age: [int] hrs how old the file is before new is retrieved :return: """ from_date, to_date = gm.get_dates_from_season(year=year) file_name_list = '{0}all_observations_list_{1}_lk{2}.pickle'.format( env.local_storage, year, lang_key) file_name_flat = '{0}all_observations_flat_{1}_lk{2}.pickle'.format( env.local_storage, year, lang_key) get_new = True file_date_limit = dt.datetime.now() - dt.timedelta(hours=max_file_age) # if we are well out of the current season (30 days) its little chance the data set has changed. current_season = gm.get_season_from_date(dt.date.today() - dt.timedelta(30)) if geohazard_tids: if not isinstance(geohazard_tids, list): geohazard_tids = [geohazard_tids] if os.path.exists(file_name_list): # if file contains a season long gone, dont make new. if year == current_season: file_age = dt.datetime.fromtimestamp( os.path.getmtime(file_name_list)) # If file is newer than the given time limit, dont make new. if file_age > file_date_limit: # If file size larger than that of an nearly empty file, dont make new. if os.path.getsize(file_name_list) > 100: # 100 bytes limit get_new = False else: get_new = False if get_new: # When get new, get all geo hazards listed_observations = go.get_all_observations(from_date=from_date, to_date=to_date, output='List', geohazard_tids=None, lang_key=lang_key) mp.pickle_anything(listed_observations, file_name_list) flat_listed_observations = [ o for lo in listed_observations for o in lo.Observations ] mp.pickle_anything(flat_listed_observations, file_name_flat) if output == 'List': all_listed_observations = mp.unpickle_anything(file_name_list) listed_observations = [] if geohazard_tids: for o in all_listed_observations: if o.GeoHazardTID in geohazard_tids: listed_observations.append(o) else: listed_observations = all_listed_observations return listed_observations elif output == 'FlatList': all_flat_listed_observations = mp.unpickle_anything(file_name_flat) flat_listed_observations = [] if geohazard_tids: for o in all_flat_listed_observations: if o.GeoHazardTID in geohazard_tids: flat_listed_observations.append(o) else: flat_listed_observations = all_flat_listed_observations return flat_listed_observations else: lg.warning( "getvarsompickles.py -> get_all_observations: Unknown output option." ) return []