def table_regs_obs_numbs(): """ :return: pr month: list of different forms snow: obskorps, svv, elrapp, other_gov, voluntary ice: NVE, fjelloppsynet, voluntary, webcam images water and dirt: NVE, elrapp, other_gov, voluntary *, **, ***, ****, ***** """ all_obs_201718_list = gvp.get_all_observations('2017-18', output='FlatList', max_file_age=230) all_obs_201718_nest = gvp.get_all_observations('2017-18', output='List', max_file_age=230) monthly_numbs = {} for o in all_obs_201718_nest: month = '{}{}'.format(_str(o.DtObsTime.year), _str(o.DtObsTime.month)) if month not in monthly_numbs.keys(): monthly_numbs[month] = MonthlyNumbers(o.DtObsTime.year, o.DtObsTime.month) else: monthly_numbs[month].add_to_all_obs(o) pass
def count_of_water_forms_used(): """Which forms are use for observing water. Which users submit how much.""" year = '2018' all_water_obs_list = gvp.get_all_observations(year, output='List', geohazard_tids=60, max_file_age=230) all_water_obs_nest = gvp.get_all_observations(year, output='Nest', geohazard_tids=60, max_file_age=230) form_count = {} for o in all_water_obs_list: form_name = o.__class__.__name__ if form_name in form_count.keys(): form_count[form_name] += 1 else: form_count[form_name] = 1 if 'Picture' in form_name: form_name = 'Picture_{}'.format(o.RegistrationName) if form_name in form_count.keys(): form_count[form_name] += 1 else: form_count[form_name] = 1 observation_count = len(all_water_obs_nest) user_count = {} for o in all_water_obs_nest: nick_name = o.NickName if nick_name in user_count.keys(): user_count[nick_name] += 1 else: user_count[nick_name] = 1 with open('{}water_users_and_numbers {}.txt'.format( env.output_folder, year), 'w', encoding='utf-8') as f: f.write('Totalt antall observasjoner; {}\n'.format(observation_count)) f.write('Totalt antall skjema; {}\n'.format(len(all_water_obs_list))) f.write('\n') for k, v in form_count.items(): f.write('{};{}\n'.format(k, v)) f.write('\n') for k, v in user_count.items(): f.write('{};{}\n'.format(k, v)) pass
def count_all_avalanches(year='2017-18'): """How many avalanches? jaas [30-10-2018 6:14 PM] sit og driv med ein presentasjon til IKAR ettermøte. Er det lett å ta ut talet på kor mange skred det er innrapportert på regObs sist vinter? viss mogeleg delt på farteikn-skred, ulykke/hending, og skredaktivitet. (er klar over at dette vert veldig ca, men interessant om det er 100, 1000, eller 10000)""" all_observations = gvp.get_all_observations(year=year, output='List') danger_signs_all = [] danger_signs_count_activity = 0 danger_signs_count_no_danger = 0 avalanches_all = [] avalanche_activities_all = [] avalanche_activities_count_activity = 0 avalanche_activities_count_no_activity = 0 # I thought nobody used this form - it turned out it was used 2 times in 2017-18. old_avalanche_activity_forms = 0 for o in all_observations: if isinstance(o, go.AvalancheActivityObs2) or isinstance( o, go.AvalancheActivityObs): avalanche_activities_all.append(o) if isinstance(o, go.AvalancheActivityObs): old_avalanche_activity_forms += 1 if o.EstimatedNumTID == 1: # no activity avalanche_activities_count_no_activity += 1 else: avalanche_activities_count_activity += 1 if isinstance(o, go.AvalancheObs): avalanches_all.append(o) if isinstance(o, go.DangerSign): danger_signs_all.append(o) if o.DangerSignTID == 1: # no dangers danger_signs_count_no_danger += 1 if o.DangerSignTID == 2: # avalanche activity danger_signs_count_activity += 1 print("SKRED OBSERVERT VINTERN 2017-18") print("") print( "Skredaktivitet som faretegn: {}".format(danger_signs_count_activity)) print("Ingen faretegn observert: {}".format(danger_signs_count_no_danger)) print("") print("Observert skredaktivitet: {}".format( avalanche_activities_count_activity)) print("Ingen skredaktivitet observert: {}".format( avalanche_activities_count_no_activity)) print("") print("Antall skredhendelser observert: {}".format(len(avalanches_all)))
def _get_dl_prob_avindex(region_id, year='2018-19'): """Gets all the data needed for one region to make the plots. :param region_id: [int] Region ID is an int as given i ForecastRegionKDV :param year: [string] :return problems, dangers, aval_indexes: """ all_observations = gvp.get_all_observations(year, output='FlatList', geohazard_tids=10) all_forecasts = gvp.get_all_forecasts(year) observations = [] for o in all_observations: if region_id == o.ForecastRegionTID: observations.append(o) forecasts = [] for f in all_forecasts: if region_id == f.region_id: forecasts.append(f) aval_indexes = gm.get_avalanche_index(observations) dangers_raw = [] for f in forecasts: if f.danger_level > 0: dangers_raw.append(f) for o in observations: if isinstance(o, go.AvalancheEvaluation) or \ isinstance(o, go.AvalancheEvaluation2) or \ isinstance(o, go.AvalancheEvaluation3): dangers_raw.append(o) dangers = gd.make_dangers_conform_from_list(dangers_raw) problems_raw = [] for f in forecasts: if f.danger_level > 0: problems_raw.append(f) for o in observations: if isinstance(o, go.AvalancheEvaluation) or \ isinstance(o, go.AvalancheEvaluation2) or \ isinstance(o, go.AvalancheEvalProblem2): problems_raw.append(o) problems = gp.make_problems_conform_from_list(problems_raw) return problems, dangers, aval_indexes
def total_obs_and_users(year='2018-19'): all_snow_obs = gvp.get_all_observations(year, geohazard_tids=10, max_file_age=23) all_ice_obs = gvp.get_all_observations(year, geohazard_tids=70, max_file_age=23) all_obs = gvp.get_all_observations(year, max_file_age=23) snow_observers = {} ice_observers = {} observers = {} for o in all_snow_obs: if o.NickName in snow_observers.keys(): snow_observers[o.NickName] += 1 else: snow_observers[o.NickName] = 1 for o in all_ice_obs: if o.NickName in ice_observers.keys(): ice_observers[o.NickName] += 1 else: ice_observers[o.NickName] = 1 for o in all_obs: if o.NickName in observers.keys(): observers[o.NickName] += 1 else: observers[o.NickName] = 1 print("For sesongen {} (1. sept til 31. aug)\n".format(year)) print("Totalt snøobservasjoner: {}".format(len(all_snow_obs))) print("Antall snøobservatører: {}\n".format(len(snow_observers))) print("Totalt isobservasjoner: {}".format(len(all_ice_obs))) print("Antall isobservatører: {}\n".format(len(ice_observers))) print("Totalt observasjoner: {}".format(len(all_obs))) print("Antall observatører: {}".format(len(observers))) pass
def make_forecaster_data(year): """ For one season, make the forecaster dictionary with all the necessary data. :param year: [string] Eg. season '2018-19' """ # The data all_warnings = gvp.get_all_forecasts(year, max_file_age=23) all_observation_forms = gvp.get_all_observations(year, geohazard_tids=10, max_file_age=23) forecaster_data = {} for w in all_warnings: if w.author in forecaster_data.keys(): forecaster_data[w.author].add_warning(w) else: forecaster_data[w.author] = Forecaster(w.author) forecaster_data[w.author].add_warning(w) # number_by_author_sorted = sorted(number_by_author.items(), key=lambda kv: kv[1], reverse=True) for o in all_observation_forms: if o.NickName in forecaster_data.keys(): forecaster_data[o.NickName].add_observation(o) forecaster_list_of_dict = [] for v in forecaster_data.values(): forecaster_list_of_dict.append(v.to_dict()) import csv with open('{0}forecaster_followup.txt'.format(env.output_folder), 'w', encoding='utf8') as f: dict_writer = csv.DictWriter( f, delimiter=';', fieldnames=forecaster_list_of_dict[0].keys()) dict_writer.writeheader() dict_writer.writerows(forecaster_list_of_dict) return
def write_to_file_all_obs(): """Writes to file all dates and the number of observations on the dates. Both the total and the numbers pr geohazard.""" years = [ '2012-13', '2013-14', '2014-15', '2015-16', '2016-17', '2017-18', '2018-19' ] all_observations = [] for y in years: all_observations += gvp.get_all_observations(y) num_at_date = _make_date_obscount_dict( ) # number of obs pr day pr geohazard for o in all_observations: date = o.DtObsTime.date() try: num_at_date[date].add_one_to_total() if o.GeoHazardTID == 10: num_at_date[date].add_one_to_snow() if o.GeoHazardTID in [20, 30, 40]: num_at_date[date].add_one_to_landslide() if o.GeoHazardTID == 60: num_at_date[date].add_one_to_water() if o.GeoHazardTID == 70: num_at_date[date].add_one_to_ice() except: pass # Write observed dangers to file with open('{}number_off_obs_pr_date.txt'.format(env.output_folder), 'w', encoding='utf-8') as f: f.write('Date;Water;Landslide;Ice;Snow;Total\n') for k, v in num_at_date.items(): f.write('{};{};{};{};{};{}\n'.format(k, v.water, v.landslide, v.ice, v.snow, v.total))
def total_2018_and_part_water(): from_date = dt.date(2018, 1, 1) to_date = dt.date(2018, 12, 31) all_observations = go.get_data(from_date, to_date, output='Count nest') all_water_observ = go.get_data(from_date, to_date, geohazard_tids=60, output='Count nest') all_snow_observ = go.get_data(from_date, to_date, geohazard_tids=10, output='Count nest') all_dirt_observ = go.get_data(from_date, to_date, geohazard_tids=[20, 30, 40], output='Count nest') all_ice_observ = go.get_data(from_date, to_date, geohazard_tids=70, output='Count nest') # all_observations_2016 = gvp.get_all_observations('2016') # all_forms_2016 = gvp.get_all_observations('2016', output='List') # # all_observations_2017 = gvp.get_all_observations('2017') # all_forms_2017 = gvp.get_all_observations('2017', output='List') # # all_observations_2018 = gvp.get_all_observations('2018') # all_forms_2018 = gvp.get_all_observations('2018', output='List') all_observations_2015_16 = gvp.get_all_observations('2015-16') all_forms_2015_16 = gvp.get_all_observations('2015-16', output='List') all_observations_2016_17 = gvp.get_all_observations('2016-17') all_forms_2016_17 = gvp.get_all_observations('2016-17', output='List') all_observations_2017_18 = gvp.get_all_observations('2017-18') all_forms_2017_18 = gvp.get_all_observations('2017-18', output='List') pass
def plot_numbers_of_3_seasons(output_folder=env.plot_folder + 'regobsplots/'): """Plots the last tree seasons of regObs data to 4 subplots the daily total of observations, forms, forms pr observation and the seasonal total. :param output_folder: :return: """ # Get data all_obs_201920_list = gvp.get_all_observations('2019-20', output='FlatList', max_file_age=23) all_obs_201920_nest = gvp.get_all_observations('2019-20', output='List', max_file_age=23) all_obs_201819_list = gvp.get_all_observations('2018-19', output='FlatList') all_obs_201819_nest = gvp.get_all_observations('2018-19', output='List') all_obs_201718_list = gvp.get_all_observations('2017-18', output='FlatList') all_obs_201718_nest = gvp.get_all_observations('2017-18', output='List') # Make dict with all dates and a empty DailyNumbers object all_year = {} for m in [9, 10, 11, 12, 1, 2, 3, 4, 5, 6, 7, 8]: if m in [1, 3, 5, 7, 8, 10, 12]: for d in range(1, 32, 1): all_year[_str(m) + _str(d)] = DailyNumbers(m, d) if m in [4, 6, 9, 11]: for d in range(1, 31, 1): all_year[_str(m) + _str(d)] = DailyNumbers(m, d) if m in [2]: for d in range(1, 30, 1): all_year[_str(m) + _str(d)] = DailyNumbers(m, d) # Add data to the DailyNumbers for o in all_obs_201920_list: all_year[_str(o.DtObsTime.month) + _str(o.DtObsTime.day)].add_obs_this_season(o) for o in all_obs_201920_nest: all_year[_str(o.DtObsTime.month) + _str(o.DtObsTime.day)].add_regs_this_season(o) for o in all_obs_201819_list: all_year[_str(o.DtObsTime.month) + _str(o.DtObsTime.day)].add_obs_prev_season(o) for o in all_obs_201819_nest: all_year[_str(o.DtObsTime.month) + _str(o.DtObsTime.day)].add_regs_prev_season(o) for o in all_obs_201718_list: all_year[_str(o.DtObsTime.month) + _str(o.DtObsTime.day)].add_obs_two_seasons_ago(o) for o in all_obs_201718_nest: all_year[_str(o.DtObsTime.month) + _str(o.DtObsTime.day)].add_regs_two_seasons_ago(o) obs_this_season, obs_this_season_smooth = _smooth( [v.obs_this_season_num for k, v in all_year.items()], crop_for_season=True) regs_this_season, regs_this_season_smooth = _smooth( [v.regs_this_season_num for k, v in all_year.items()], crop_for_season=True) numbs_this_season, numbs_this_season_smooth = _smooth( [v.numbs_this_season for k, v in all_year.items()], crop_for_season=True) obs_prev_season, obs_prev_season_smooth = _smooth( [v.obs_prev_season_num for k, v in all_year.items()]) regs_prev_season, regs_prev_season_smooth = _smooth( [v.regs_prev_season_num for k, v in all_year.items()]) numbs_prev_season, numbs_prev_season_smooth = _smooth( [v.numbs_prev_season for k, v in all_year.items()]) obs_two_seasons_ago, obs_two_seasons_ago_smooth = _smooth( [v.obs_two_seasons_ago_num for k, v in all_year.items()]) regs_two_seasons_ago, regs_two_seasons_ago_smooth = _smooth( [v.regs_two_seasons_ago_num for k, v in all_year.items()]) numbs_two_seasons_ago, numbs_two_seasons_ago_smooth = _smooth( [v.numbs_two_seasons_ago for k, v in all_year.items()]) sum_regs_this_season = _sum_list(regs_this_season) sum_regs_prev_season = _sum_list(regs_prev_season) sum_regs_two_seasons_ago = _sum_list(regs_two_seasons_ago) # Turn off interactive mode plt.ioff() plt.figure(figsize=(15, 23)) plt.clf() # Make legend legend_handles = [] legend_handles.append(mpatches.Patch(color='0.2', label="2019-20")) legend_handles.append(mpatches.Patch(color='blue', label="2018-19")) legend_handles.append(mpatches.Patch(color='red', label="2017-18")) # x-axis labels axis_dates, axis_positions = [], [] i = -1 for k, v in all_year.items(): i += 1 if v.day == 1: axis_dates.append(v.date_as_string) axis_positions.append(i) # Make plots plt.subplot2grid((4, 1), (0, 0), rowspan=1) plt.title("Antall observasjoner daglig") plt.plot(regs_this_season, color='0.1', linewidth=0.2) plt.plot(regs_this_season_smooth, color='0.1') plt.plot(regs_prev_season, color='blue', linewidth=0.2) plt.plot(regs_prev_season_smooth, color='blue') plt.plot(regs_two_seasons_ago, color='red', linewidth=0.2) plt.plot(regs_two_seasons_ago_smooth, color='red') plt.legend(handles=legend_handles) plt.xticks(axis_positions, axis_dates) plt.subplot2grid((4, 1), (1, 0), rowspan=1) plt.title("Sesong sum av observasjoner") plt.plot(sum_regs_this_season, color='0.1') plt.plot(sum_regs_prev_season, color='blue') plt.plot(sum_regs_two_seasons_ago, color='red') plt.legend(handles=legend_handles) plt.xticks(axis_positions, axis_dates) plt.subplot2grid((4, 1), (2, 0), rowspan=1) plt.title("Antall skjema daglig") plt.plot(obs_this_season, color='0.1', linewidth=0.2) plt.plot(obs_this_season_smooth, color='0.1') plt.plot(obs_prev_season, color='blue', linewidth=0.2) plt.plot(obs_prev_season_smooth, color='blue') plt.plot(obs_two_seasons_ago, color='red', linewidth=0.2) plt.plot(obs_two_seasons_ago_smooth, color='red') plt.legend(handles=legend_handles) plt.xticks(axis_positions, axis_dates) plt.subplot2grid((4, 1), (3, 0), rowspan=1) plt.title("Antall skjema pr observasjon daglig") plt.plot(numbs_this_season, color='0.1', linewidth=0.2) plt.plot(numbs_this_season_smooth, color='0.1') plt.plot(numbs_prev_season, color='blue', linewidth=0.2) plt.plot(numbs_prev_season_smooth, color='blue') plt.plot(numbs_two_seasons_ago, color='red', linewidth=0.2) plt.plot(numbs_two_seasons_ago_smooth, color='red') plt.legend(handles=legend_handles) plt.xticks(axis_positions, axis_dates) plt.gcf().text(0.78, 0.06, 'Figur laget {0:%Y-%m-%d %H:%M}'.format(dt.datetime.now()), color='0.5') # plt.grid(color='0.6', linestyle='--', linewidth=0.7, zorder=0) if not os.path.exists(output_folder): os.makedirs(output_folder) plt.savefig('{}numbersof3seasons.png'.format(output_folder)) plt.close()
def data_from_the_beginning_of_time(output='File', plot_output_folder=env.plot_folder + 'regobsplots/', file_output_folder=env.output_folder): """Writes to file all dates and the number of observations on the dates. Both the total and the numbers pr geohazard. :param output [string] 'Plot' or 'File' or 'File and Plot'. Not case sensitive. :param plot_output_folder: :param file_output_folder: """ years = [ '2012-13', '2013-14', '2014-15', '2015-16', '2016-17', '2017-18', '2018-19', '2019-20' ] all_observations = [] for y in years: all_observations += gvp.get_all_observations(y) num_at_date = _make_date_obscount_dict( ) # number of obs pr day pr geohazard for o in all_observations: date = o.DtObsTime.date() try: num_at_date[date].add_one_to_total() if o.GeoHazardTID == 10: num_at_date[date].add_one_to_snow() if o.GeoHazardTID in [20, 30, 40]: num_at_date[date].add_one_to_landslide() if o.GeoHazardTID == 60: num_at_date[date].add_one_to_water() if o.GeoHazardTID == 70: num_at_date[date].add_one_to_ice() except Exception: error_msg = sys.exc_info()[0] lg.warning( "regobsstatistics.py -> data_from_the_beginning_of_time: Exception adding one data point on {0}. Msg: {1}" .format(date, error_msg)) pass output = output.lower() # Write data to file if 'file' in output: with open('{}number_of_obs_pr_date.txt'.format(file_output_folder), 'w', encoding='utf-8') as f: f.write('Date;Water;Landslide;Ice;Snow;Total\n') for k, v in num_at_date.items(): f.write('{};{};{};{};{};{}\n'.format(k, v.water, v.landslide, v.ice, v.snow, v.total)) # Write data to plot if 'plot' in output: x = [] y_total = [] y_snow = [] y_ice = [] y_landslide = [] y_water = [] for k, v in num_at_date.items(): x.append(k) y_total.append(v.total) y_snow.append(v.snow) y_ice.append(v.ice) y_landslide.append(v.landslide) y_water.append(v.water) y_total_smooth = _smooth(y_total, window_size=30) # Turn off interactive mode plt.ioff() fig, ax = plt.subplots(figsize=(14, 7)) ax.plot(x, y_total, color='darkblue', linewidth=0.3, alpha=0.5) ax.plot(x, y_total_smooth[1], color='k') fig.suptitle("Daily observations", fontsize=20) ax.grid() # Change tick label size for tick in ax.xaxis.get_major_ticks(): tick.label.set_fontsize(14) for tick in ax.yaxis.get_major_ticks(): tick.label.set_fontsize(14) fig.text(0.2, 0.64, "TOTAL: \n\nSnow:\nIce:\nDirt:\nWater:", bbox={ 'edgecolor': 'none', 'facecolor': 'lavender', 'alpha': 1., 'pad': 8 }) fig.text( 0.26, 0.64, "{0:7,d}\n\n{1:7,d}\n{2:7,d}\n{3:7,d}\n {4:7d}".format( sum(y_total), sum(y_snow), sum(y_ice), sum(y_landslide), sum(y_water))) # When is the figure made? plt.gcf().text(0.77, 0.02, 'Figure made {0:%Y-%m-%d %H:%M}'.format( dt.datetime.now()), color='0.5') fig.savefig('{}number_of_obs_pr_date.png'.format(plot_output_folder)) # plt.show() # If the words plot or file do not show up in the output request, log warning. if 'plot' not in output and 'file' not in output: lg.warning( "regobsstatistics.py -> data_from_the_beginning_of_time: Unknown output. Must be 'Plot', 'File' or both." )
def total_2018_and_part_water(): # from_date = dt.date(2018, 1, 1) # to_date = dt.date(2018, 12, 31) # all_observations = go.get_data(from_date, to_date, output='Count nest') # all_water_observ = go.get_data(from_date, to_date, geohazard_tids=60, output='Count nest') # all_snow_observ = go.get_data(from_date, to_date, geohazard_tids=10, output='Count nest') # all_dirt_observ = go.get_data(from_date, to_date, geohazard_tids=[20,30,40], output='Count nest') # all_ice_observ = go.get_data(from_date, to_date, geohazard_tids=70, output='Count nest') # all_observations_2016 = gvp.get_all_observations('2016') # all_forms_2016 = gvp.get_all_observations('2016', output='List') years = ['2013', '2014', '2015', '2016', '2017', '2018'] for y in years: all_observations = gvp.get_all_observations(y) all_forms = gvp.get_all_observations(y, output='List') all_water_forms = [f for f in all_forms if f.GeoHazardTID == 60 and not isinstance(f, go.PictureObservation)] all_water_pictures = [f for f in all_forms if f.GeoHazardTID == 60 and isinstance(f, go.PictureObservation)] all_snow_forms = [f for f in all_forms if f.GeoHazardTID == 10 and not isinstance(f, go.PictureObservation)] all_snow_pictures = [f for f in all_forms if f.GeoHazardTID == 10 and isinstance(f, go.PictureObservation)] all_dirt_forms = [f for f in all_forms if f.GeoHazardTID in [20,30,40] and not isinstance(f, go.PictureObservation)] all_dirt_pictures = [f for f in all_forms if f.GeoHazardTID in [20,30,40] and isinstance(f, go.PictureObservation)] all_ice_forms = [f for f in all_forms if f.GeoHazardTID == 70 and not isinstance(f, go.PictureObservation)] all_ice_pictures = [f for f in all_forms if f.GeoHazardTID == 70 and isinstance(f, go.PictureObservation)] print(y) print("All observations:\t{0}".format(len(all_observations))) print("All forms:\t{0}".format(len(all_forms))) print("Snow forms:\t{0}".format(len(all_snow_forms))) print("Snow pictures:\t{0}".format(len(all_snow_pictures))) print("Water forms:\t{0}".format(len(all_water_forms))) print("Water pictures:\t{0}".format(len(all_water_pictures))) print("Dirt forms:\t{0}".format(len(all_dirt_forms))) print("Dirt pictures:\t{0}".format(len(all_dirt_pictures))) print("Ice forms:\t{0}".format(len(all_ice_forms))) print("Ice pictures:\t{0}".format(len(all_ice_pictures))) print() # all_observations_2015_16 = gvp.get_all_observations('2015-16') # all_forms_2015_16 = gvp.get_all_observations('2015-16', output='List') # # all_observations_2016_17 = gvp.get_all_observations('2016-17') # all_forms_2016_17 = gvp.get_all_observations('2016-17', output='List') # # all_observations_2017_18 = gvp.get_all_observations('2017-18') # all_forms_2017_18 = gvp.get_all_observations('2017-18', output='List') pass
if __name__ == "__main__": # included_observers = get_observer_dict_for_2017_18_plotting(5) # id1, region1 = get_forecast_region_for_coordinate(499335, 7576105, '2014-15') # Lofoten? # id2, region2 = get_forecast_region_for_coordinate(687380, 7672286, '2012-13') # Tamok # a = get_observer_v() # from_date = dt.date(2018, 1, 21) # from_date = dt.date.today()-dt.timedelta(days=1) # to_date = dt.date.today()+dt.timedelta(days=1) # region_ids = get_forecast_regions(year='2017-18') # region_ids = [116, 117] all_observations = gvp.get_all_observations('2018-19', output='FlatList', geohazard_tids=10) avalanche_indexes = get_avalanche_index(all_observations) # a = get_obs_location(from_date, to_date) # observer_list = [1090, 79, 43, 1084, 33, 119, 67, 101, 952, 41, 34, 125, 126, 8, 384, 955, 14, 841, 50, 175, 1123, 199, 1068, 1598, 1646, 637, 1664, 1307, 135, 307, 1212, 1279, 1310] # nicks = get_observer_nicks_given_ids(observer_list) # print(nicks) # observer_list = get_observer_dict_for_2015_16_ploting() # import makepickle as mp # mp.pickle_anything(observer_list, '{0}observerlist.pickle'.format(env.web_root_folder)) # observer_nicks = get_observer_v() # trips = get_trip(from_date, to_date, output='csv')
def get_tests_and_layer_info_to_gustav(): """Gustav Pless wrote 28th Oct 2019: Hei Jeg jobber med en liten studie der jeg sammenligner et sett stabilitetstester og registrerer forskjeller. Jeg utfører et sett tester sammen med en snøprofil og kan se på hvilke tester som er effektive på hvilken type svakt lag, dybde, hardhet etc. Ulempen min er at jeg ikke får et godt statistisk utvalg, men det finnes jo i regobs. I hvert fall på ECT og CT. Vedlagt er det relativt begrensede datagrunnlaget jeg har enn så lenge. Er det mulig å få dratt ut data fra alle ECT og CT tester fra regobs? Da kan jeg for eksempel se på resultater som på hvilken dybde ECT og CT gir respons, hvis det finnes noen sammenheng mellom forplanting og dybde etc. I tillegg skulle jeg kunne se på alle stabilitetstester som er utført sammen med snøprofiler og trekke ut data fra det. Men det er jeg redd blir en mye mer manuell oppgave. Mvh Gustav :return: """ years = ['2019-20', '2018-19', '2017-18', '2016-17'] # years = ['2018-19'] all_observations = [] for y in years: all_observations += gvp.get_all_observations(y) tests = [] for o in all_observations: for f in o.Observations: if isinstance(f, go.ColumnTest) or isinstance( f, go.ProfileColumnTest): if f.CompetenceLevelTID >= 120: # Level of Competence at *** or more if 'Ikke gitt' not in f.PropagationName: profile = None if f.IncludeInSnowProfile: for tf in o.Observations: if isinstance(tf, go.SnowProfile): profile = tf tests.append(TestsAndLayerInfo(f, profile)) file_and_folder = f'{env.output_folder}tests_to_gustav_pless.csv' # Write to file with open(file_and_folder, 'w', encoding='utf-8') as f: make_header = True for t in tests: if make_header: f.write(';'.join([_str(d) for d in t.to_ord_dict().keys()]) + '\n') make_header = False f.write(';'.join([_str(d) for d in t.to_ord_dict().values()]) + '\n')
def make_season_calender_plots(year='2019-20', plot_folder=env.plot_folder, html_folder=env.output_folder + 'views/', web_pickle_folder=env.output_folder + 'webpickles/'): """Makes observation calender plots for both observer and region for display on web page for the season 2018-19. Method includes a request for list of relevant observers.""" from_year = int(year[0:4]) to_year = int('20' + year[-2:]) from_day = dt.date(from_year, 11, 1) to_day = dt.date(to_year, 6, 30) # if the seasons expected end is after todays date, set it to today. if to_day > dt.date.today(): to_day = dt.date.today() # list of months to be plotted months = [] month = from_day while month < to_day: months.append(month) almost_next = month + dt.timedelta(days=35) month = dt.date(almost_next.year, almost_next.month, 1) # Get all regions region_ids = gm.get_forecast_regions(year) # get a list of relevant observers to plot and make pickle for adding to the web-folder all_observations_nest = gvp.get_all_observations(year, output='List', geohazard_tids=10) all_observations_list = gvp.get_all_observations(year, output='FlatList', geohazard_tids=10) observer_dict = {} for o in all_observations_nest: if o.ObserverID in observer_dict.keys(): observer_dict[o.ObserverID].add_one_observation_count() else: observer_dict[o.ObserverID] = ObserverData(o.ObserverID, o.NickName, observation_count_inn=1) observer_list = [] observer_list_web = [] ordered_observer_dict = col.OrderedDict( sorted(observer_dict.items(), key=lambda t: t[1].observation_count, reverse=True)) for k, v in ordered_observer_dict.items(): if v.observation_count > 4: observer_list.append( ObserverData(v.observer_id, v.observer_nick, observation_count_inn=v.observation_count)) observer_list_web.append( [v.observer_id, v.observer_nick, v.observation_count]) if not os.path.exists(web_pickle_folder): os.makedirs(web_pickle_folder) mp.pickle_anything(observer_list_web, '{0}observerlist.pickle'.format(web_pickle_folder)) # run the stuff make_observer_plots(all_observations_list, observer_list, months, plot_folder=plot_folder, html_folder=html_folder) make_region_plots(all_observations_list, region_ids, months, plot_folder=plot_folder, html_folder=html_folder) make_svv_plots(all_observations_list, observer_dict, region_ids, months, plot_folder=plot_folder, html_folder=html_folder)
make_svv_plots(all_observations_list, observer_dict, region_ids, months, plot_folder=plot_folder, html_folder=html_folder) if __name__ == "__main__": make_season_calender_plots(year='2018-19') observer = [ ObserverData(325, 'Siggen@obskorps'), ObserverData(10, 'Andreas@nve') ] all_observations = gvp.get_all_observations('2018-19', output='FlatList', geohazard_tids=10, max_file_age=1000) months = [ dt.date(2019, 1, 1), dt.date(2019, 2, 1), dt.date(2019, 3, 1), dt.date(2019, 4, 1) ] make_observer_plots(all_observations, observer, months) pass
def pick_winners_varsom_friflyt_konk_2019(): """Method for picking winners of the varsom/friflyt competition. Utvalgskriterier: • Premie for flest observasjoner av snøoverflate og faretegn • Premie til den som sender inn den grundigste observasjonen • Uttrekkspremier blant de som leverer flere enn 10 observasjoner • Uttrekkspremier blant de som leverer flere enn 5 isobservasjoner """ year = '2018-19' all_snow_obs = gvp.get_all_observations(year, geohazard_tids=10, max_file_age=23) all_ice_obs = gvp.get_all_observations(year, geohazard_tids=70, max_file_age=23) voluntary_snow_obs = [] users_and_numbers_snow = {} voluntary_snowcover_dangersign_obs = [] users_and_numbers_sc_ds = {} voluntary_manyforms_obs = [] users_and_numbers_manyforms = {} for o in all_snow_obs: competence_level = o.CompetenceLevelName nick_name = o.NickName.lower() observer_id_and_nick = '{};{}'.format(o.ObserverID, nick_name) if '****' not in competence_level: # dont include obs with **** or more competance if 'obskorps' not in nick_name: if 'svv' not in nick_name and 'vegvesen' not in nick_name: if 'nve' not in nick_name and 'met' not in nick_name: if 'wyssen' not in nick_name and 'forsvaret' not in nick_name: if 'kjus@nortind' not in nick_name: voluntary_snow_obs.append(o) if observer_id_and_nick in users_and_numbers_snow.keys(): users_and_numbers_snow[observer_id_and_nick] += 1 else: users_and_numbers_snow[observer_id_and_nick] = 1 for oo in o.Observations: if isinstance(oo, go.SnowSurfaceObservation): voluntary_snowcover_dangersign_obs.append(o) if observer_id_and_nick in users_and_numbers_sc_ds.keys(): users_and_numbers_sc_ds[observer_id_and_nick] += 1 else: users_and_numbers_sc_ds[observer_id_and_nick] = 1 elif isinstance(oo, go.DangerSign): voluntary_snowcover_dangersign_obs.append(o) if observer_id_and_nick in users_and_numbers_sc_ds.keys(): users_and_numbers_sc_ds[observer_id_and_nick] += 1 else: users_and_numbers_sc_ds[observer_id_and_nick] = 1 if len(o.Observations) > 7: voluntary_manyforms_obs.append(o) if observer_id_and_nick in users_and_numbers_manyforms.keys(): users_and_numbers_manyforms[observer_id_and_nick] += 1 else: users_and_numbers_manyforms[observer_id_and_nick] = 1 voluntary_ice_obs = [] users_and_numbers_ice = {} for o in all_ice_obs: competence_level = o.CompetenceLevelName nick_name = o.NickName.lower() observer_id_and_nick = '{};{}'.format(o.ObserverID, nick_name) if '*****' not in competence_level: # dont include obs with **** or more competance voluntary_ice_obs.append(o) if observer_id_and_nick in users_and_numbers_ice.keys(): users_and_numbers_ice[observer_id_and_nick] += 1 else: users_and_numbers_ice[observer_id_and_nick] = 1 import operator from random import shuffle sorted_users_and_numbers_snow = sorted(users_and_numbers_snow.items(), key=operator.itemgetter(1), reverse=True) sorted_users_and_numbers_ice = sorted(users_and_numbers_ice.items(), key=operator.itemgetter(1), reverse=True) # Write lists of users and numbers to file. List can be sorted in excel.Need to lookup email and full name. # all snow obseravtions with open('{}snow_observations_pr_user {}.txt'.format(env.output_folder, year), 'w', encoding='utf-8') as f: for k, v in users_and_numbers_snow.items(): f.write('{};{}\n'.format(k, v)) # all users with 10 or more snowobs with danger signs and/or snow cover obs. with open('{}snow_cover_or_danger_sign_pr_user {}.txt'.format(env.output_folder, year), 'w', encoding='utf-8') as f: for k, v in users_and_numbers_sc_ds.items(): if v > 9: # only users with 10 or more obs f.write('{};{}\n'.format(k, v)) # Comprehensive snow observations (many forms) with open('{}many_forms_pr_users {}.txt'.format(env.output_folder, year), 'w', encoding='utf-8') as f: for k, v in users_and_numbers_snow.items(): f.write('{};{}\n'.format(k, v)) # Write list of users and numbers to file. List can be sorted in excel. Need to lookup email and full name. with open('{}ice_observations_pr_user {}.txt'.format(env.output_folder, year), 'w', encoding='utf-8') as f: for k, v in users_and_numbers_ice.items(): if v > 4: # only users with 5 or more obs f.write('{};{}\n'.format(k, v)) # Shuffle the list of observations shuffle(voluntary_snow_obs) shuffle(voluntary_snowcover_dangersign_obs) shuffle(voluntary_manyforms_obs) shuffle(voluntary_ice_obs) # Write the random list of users and regid to file with open('{}random_snowobs {}.txt'.format(env.output_folder, year), 'w', encoding='utf-8') as f: for o in voluntary_snow_obs: f.write('{};{};{}\n'.format(o.ObserverID, o.NickName, o.RegID)) with open('{}random_snowcover_dangersign {}.txt'.format(env.output_folder, year), 'w', encoding='utf-8') as f: for o in voluntary_snowcover_dangersign_obs: f.write('{};{};{}\n'.format(o.ObserverID, o.NickName, o.RegID)) with open('{}random_manyforms {}.txt'.format(env.output_folder, year), 'w', encoding='utf-8') as f: for o in voluntary_manyforms_obs: f.write('{};{};{}\n'.format(o.ObserverID, o.NickName, o.RegID)) with open('{}random_ice_obs {}.txt'.format(env.output_folder, year), 'w', encoding='utf-8') as f: for o in voluntary_ice_obs: f.write('{};{};{}\n'.format(o.ObserverID, o.NickName, o.RegID)) pass
def pick_winners_varsom_friflyt_konk_2018(): """Method for picking winners of the varsom/friflyt competition. Winter 2017-18 we encouraged voluntary observations. We pick som winners among snow obs. 1st, 2nd and 3rd to those who observed most. The rest on random pick in relevant obs. :return: """ year = '2017-18' all_snow_obs = gvp.get_all_observations(year, output='Nest', geohazard_tids=10, max_file_age=23) voluntary_obs = [] users_and_numbers = {} for o in all_snow_obs: competence_level = o.CompetenceLevelName nick_name = o.NickName.lower() observer_id_and_nick = '{};{}'.format(o.ObserverId, nick_name) if '****' not in competence_level: # dont include obs with *** or more cometance if 'obskorps' not in nick_name: if 'svv' not in nick_name and 'vegvesen' not in nick_name: if 'nve' not in nick_name and 'met' not in nick_name: if 'wyssen' not in nick_name and 'forsvaret' not in nick_name: if 'kjus@nortind' not in nick_name: voluntary_obs.append(o) if observer_id_and_nick in users_and_numbers.keys( ): users_and_numbers[ observer_id_and_nick] += 1 else: users_and_numbers[observer_id_and_nick] = 1 import operator from random import shuffle sorted_users_and_numbers = sorted(users_and_numbers.items(), key=operator.itemgetter(1), reverse=True) # Write list of users and numbers to file. List can be sorted in excel. Need to lookup email and full name. with open('{}users_and_numbers {}.txt'.format(env.output_folder, year), 'w', encoding='utf-8') as f: for k, v in users_and_numbers.items(): f.write('{};{}\n'.format(k, v)) # Shuffle the list of observations shuffle(voluntary_obs) # Write the random list of users and regid to file with open('{}random_users_and_obs {}.txt'.format(env.output_folder, year), 'w', encoding='utf-8') as f: for o in voluntary_obs: f.write('{};{};{}\n'.format(o.ObserverId, o.NickName, o.RegID)) pass