def pickle_M3(data_set, config_file_name, pickle_m3_file_name): """Makes a list of elements matching the m3 matrix. Uses a configuration file as for the matrix elements and runs through all warnings adding occurances and danger level used at each combination of the matrix. :param data_set: :param pickle_m3_file_name: :return: """ config_file_name = '{0}{1}'.format(env.input_folder, config_file_name) m3_elements = rf.read_configuration_file(config_file_name, M3Element) # read out the data_set and add to M3Elements for i in range(0, len(data_set['level']['values']), 1): size = data_set['size']['values'][i] if size is None: size = '0 - Ikke gitt' print( 'matrix.py -> picke_M3 -> Warning: Encountered occurrence where avalanche size is None. Set to 0 - Ikke gitt.' ) trigger = data_set['trigger']['values'][i] probability = data_set['probability']['values'][i] distribution = data_set['distribution']['values'][i] for e in m3_elements: m3_size = e.avalanche_size m3_trigger = e.trigger m3_probability = e.probability m3_distribution = e.distribution if size.strip() in m3_size and trigger.strip( ) in m3_trigger and probability.strip( ) in m3_probability and distribution.strip() in m3_distribution: level = data_set['level']['values'][i] e.add_danger_level(level) # count all levels added (for debug only) for control and make some stats count = 0 for e in m3_elements: count += len(e.danger_level_list) e.set_level_average() e.set_level_standard_dev() mp.pickle_anything(m3_elements, pickle_m3_file_name)
def step_2_find_most_valued(date_region): # most valued obs could change name to observation with highest index index = rf.read_configuration_file( '{0}aval_dl_order_of_size_and_num.csv'.format( env.matrix_configurations), IndexOfSizeAndNumber) for d in date_region: for aa in d.avalanche_activity: max_index = 0 for i in index: if i.estimated_num in aa.EstimatedNumName and i.destructive_size in aa.DestructiveSizeName: max_index = int(i.index) if d.highest_value < max_index: d.highest_value = max_index d.most_valued_observation = aa for a in d.avalanche: max_index = 0 for i in index: if i.estimated_num in 'Ett (1)' and i.destructive_size in a.DestructiveSizeName: max_index = int(i.index) if d.highest_value < max_index: d.highest_value = max_index d.most_valued_observation = a for ds in d.danger_sign: if 'Ferske skred' in ds.DangerSignName: if d.highest_value < 2: d.highest_value = 2 d.most_valued_observation = ds if 'Ingen faretegn observert' in ds.DangerSignName: if d.highest_value < 1: d.highest_value = 1 d.most_valued_observation = ds return date_region
def get_avalanche_index(observations): """All tables in Regobs containing information on avalanche activity will be mapped to an avalanche index. These are AvalancheActivityObs, AvalancheActivityObs2, AvalancheObs and DangerObs. The definition of the index is found in the input/aval_dl_order_of_size_and_num.csv configuration file. It takes a list of observations. Observations not containing info on avalanche activity will be ignored. :param observations: [list] of observations :return avalanche_indexes: [list] of class AvalancheIndex """ # get index definition index_definition = rf.read_configuration_file( '{0}aval_dl_order_of_size_and_num.csv'.format( env.matrix_configurations), AvalancheIndex) avalanche_activities = [] avalanche_activities_2 = [] avalanches = [] danger_signs = [] for o in observations: if isinstance(o, go.AvalancheActivityObs): avalanche_activities.append(o) if isinstance(o, go.AvalancheActivityObs2): avalanche_activities_2.append(o) if isinstance(o, go.AvalancheObs): avalanches.append(o) if isinstance(o, go.DangerSign): danger_signs.append(o) avalanche_indexes = [] for aa in avalanche_activities: ai = AvalancheIndex() ai.set_num_and_size_and_index(aa.EstimatedNumName, aa.DestructiveSizeName, index_definition) ai.set_date_region_observation(aa.DtAvalancheTime, aa.ForecastRegionName, aa) avalanche_indexes.append(ai) for aa in avalanche_activities_2: if aa.DtStart and aa.DtEnd: ai = AvalancheIndex() ai.set_num_and_size_and_index(aa.EstimatedNumName, aa.DestructiveSizeName, index_definition) # Activity date is the average of DtStart and DtEnd activity_date = aa.DtStart + (aa.DtEnd - aa.DtStart) / 2 ai.set_date_region_observation(activity_date.date(), aa.ForecastRegionName, aa) avalanche_indexes.append(ai) for aa in avalanches: ai = AvalancheIndex() # Make sure size is not None ai.set_num_and_size_and_index("Ett (1)", aa.DestructiveSizeName, index_definition) ai.set_date_region_observation(aa.DtAvalancheTime, aa.ForecastRegionName, aa) avalanche_indexes.append(ai) for ds in danger_signs: ai = AvalancheIndex() if 'Ferske skred' in ds.DangerSignName: ai.set_avalanches_as_dangersign() elif 'Ingen faretegn observert' in ds.DangerSignName: ai.set_no_avalanche_activity() else: continue ai.set_date_region_observation(ds.DtObsTime, ds.ForecastRegionName, ds) avalanche_indexes.append(ai) return avalanche_indexes
def get_avalanche_index(from_date, to_date, region_ids=None, observer_ids=None): """All tables in regObs containing information on avalanche activity is mapped to an avalanche index. These are AvalancheActivityObs, AvalancheActivityObs2, AvalanchObs and DangerObs. The definition of the index is found in the input/aval_dl_order_of_size_and_num.csv configuration file. :param from_date: :param to_date: :param region_ids: :param observer_ids: :return avalanche_indexes: [list] of class AvalanchIndex """ # get all data avalanche_activities = go.get_avalanche_activity(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids) avalanche_activities_2 = go.get_avalanche_activity_2( from_date, to_date, region_ids=region_ids, observer_ids=observer_ids) avalanches = go.get_avalanche(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids) danger_signs = go.get_danger_sign(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids) # get index definition index_definition = rf.read_configuration_file( '{0}aval_dl_order_of_size_and_num.csv'.format( env.matrix_configurations), AvalancheIndex) avalanche_indexes = [] for aa in avalanche_activities: ai = AvalancheIndex() ai.set_num_and_size_and_index(aa.EstimatedNumName, aa.DestructiveSizeName, index_definition) ai.set_date_region_observation(aa.DtAvalancheTime, aa.ForecastRegionName, aa) avalanche_indexes.append(ai) for aa in avalanche_activities_2: if aa.DtStart and aa.DtEnd: ai = AvalancheIndex() ai.set_num_and_size_and_index(aa.EstimatedNumName, aa.DestructiveSizeName, index_definition) # Activity date is the avarage of DtStart and DtEnd activity_date = aa.DtStart + (aa.DtEnd - aa.DtStart) / 2 ai.set_date_region_observation(activity_date.date(), aa.ForecastRegionName, aa) avalanche_indexes.append(ai) for aa in avalanches: ai = AvalancheIndex() # Make sure size is not None ai.set_num_and_size_and_index("Ett (1)", aa.DestructiveSizeName, index_definition) ai.set_date_region_observation(aa.DtAvalancheTime, aa.ForecastRegionName, aa) avalanche_indexes.append(ai) for ds in danger_signs: ai = AvalancheIndex() if 'Ferske skred' in ds.DangerSignName: ai.set_avalanches_as_dangersign() elif 'Ingen faretegn observert' in ds.DangerSignName: ai.set_no_avalanche_activity() else: continue ai.set_date_region_observation(ds.DtObsTime, ds.ForecastRegionName, ds) avalanche_indexes.append(ai) return avalanche_indexes
'{0}runforavalancheactivity_step_1.pickle'.format( env.local_storage)) ### Find the observaton of highest value pr region pr date date_region, forecasted_dangers = mp.unpickle_anything( '{0}runforavalancheactivity_step_1.pickle'.format(env.local_storage)) date_region = step_2_find_most_valued(date_region) mp.pickle_anything([date_region, forecasted_dangers], '{0}runforavalancheactivity_step_2.pickle'.format( env.local_storage)) ### ready to add to count elements date_region, forecasted_dangers = mp.unpickle_anything( '{0}runforavalancheactivity_step_2.pickle'.format(env.local_storage)) elements = rf.read_configuration_file( '{0}aval_dl_configuration.csv'.format(env.matrix_configurations), ActivityAndDanger) elements = step_3_count_occurances(date_region, elements) mp.pickle_anything([date_region, forecasted_dangers, elements], '{0}runforavalancheactivity_step_3.pickle'.format( env.local_storage)) ### ready to plot? date_region, forecasted_dangers, elements = mp.unpickle_anything( '{0}runforavalancheactivity_step_3.pickle'.format(env.local_storage)) step_4_plot( date_region, forecasted_dangers, elements, '{0}Avalanches and dangers {1}'.format(env.plot_folder, season), season) # Do a count on observations..