def get_avalanche_index(from_date, to_date, region_ids=None, observer_ids=None): ''' :param from_date: :param to_date: :param region_ids: :param observer_ids: :return: ''' # get all data avalanche_activities = go.get_avalanche_activity(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids) avalanche_activities_2 = go.get_avalanche_activity_2(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids) avalanches = go.get_avalanche(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids) danger_signs = go.get_danger_sign(from_date, to_date, region_ids=region_ids, observer_ids=observer_ids) # get index definition index_definition = rf.read_configuration_file('{0}aval_dl_order_of_size_and_num.csv'.format(env.input_folder), AvalancheIndex) avalanche_indexes = [] for aa in avalanche_activities: ai = AvalancheIndex() ai.set_num_and_size_and_index(aa.EstimatedNumName, aa.DestructiveSizeName, index_definition) ai.set_date_region_observation(aa.DtAvalancheTime, aa.ForecastRegionName, aa) avalanche_indexes.append(ai) for aa in avalanche_activities_2: ai = AvalancheIndex() ai.set_num_and_size_and_index(aa.EstimatedNumName, aa.DestructiveSizeName, index_definition) # Activity date is the avarage of DtStart and DtEnd activity_date = aa.DtStart+(aa.DtEnd-aa.DtStart)/2 ai.set_date_region_observation(activity_date.date(), aa.ForecastRegionName, aa) avalanche_indexes.append(ai) for aa in avalanches: ai = AvalancheIndex() # Make sure size is not None ai.set_num_and_size_and_index("Ett (1)", aa.DestructiveSizeName, index_definition) ai.set_date_region_observation(aa.DtAvalancheTime, aa.ForecastRegionName, aa) avalanche_indexes.append(ai) for ds in danger_signs: ai = AvalancheIndex() if 'Ferske skred' in ds.DangerSignName: ai.set_avalanches_as_dangersign() elif 'Ingen faretegn observert' in ds.DangerSignName: ai.set_no_avalanche_activity() else: continue ai.set_date_region_observation(ds.DtObsTime, ds.ForecastRegionName, ds) avalanche_indexes.append(ai) return avalanche_indexes
def step_1_make_data_set(region_id, from_date, to_date): """Makes the dataset of all observed avalanche activity (inl signs and isingle avalanches obs) and mapps to forecasts for those days. :param region_id: [int or list of ints] :param from_date: [date] :param to_date: [date] :return: """ # get all data dangers = gd.get_all_dangers(region_id, from_date, to_date) avalanches = go.get_avalanche_activity(region_id, from_date, to_date) single_avalanches = go.get_avalanche(region_id, from_date, to_date) danger_signs = go.get_danger_sign(region_id, from_date, to_date) # List of only forecasts forecasted_dangers = [] for d in dangers: if 'Forecast' in d.data_table and d.danger_level != 0: forecasted_dangers.append(d) # List of only valid activity observations observed_activity = [] for a in avalanches: if not 'Ikke gitt' in a.EstimatedNumName: observed_activity.append(a) # list of relevant danger observations danger_sign_avalanches = [] for ds in danger_signs: if 'Ferske skred' in ds.DangerSignName or 'Ingen faretegn observert' in ds.DangerSignName: danger_sign_avalanches.append(ds) # list of relevant singel avalanches observed_avalanche = [] for sa in single_avalanches: if not 'Ikke gitt' in sa.DestructiveSizeName: observed_avalanche.append(sa) # Make list of all regions pr date and append forecasts and observations. data_date_region = [] for d in forecasted_dangers: danger_date = d.date print '{0}'.format(danger_date) danger_region_name = d.region_name data = DataOnDateInRegion(danger_date, danger_region_name) data.forecast.append(d) for a in observed_activity: aval_date = a.DtAvalancheTime.date() aval_region_name = a.ForecastRegionName if aval_date == danger_date and aval_region_name == danger_region_name: data.avalanche_activity.append(a) for da in danger_sign_avalanches: aval_date = da.DtObsTime.date() aval_region_name = da.ForecastRegionName if aval_date == danger_date and aval_region_name == danger_region_name: data.danger_sign.append(da) for oa in observed_avalanche: aval_date = oa.DtAvalancheTime.date() aval_region_name = oa.ForecastRegionName if aval_date == danger_date and aval_region_name == danger_region_name: data.avalanche.append(oa) data_date_region.append(data) # discard days and regions where no observations present date_region = [] for d in data_date_region: if not len(d.avalanche_activity) == 0 or not len(d.danger_sign) == 0 or not len(d.avalanche) == 0: date_region.append(d) return date_region, forecasted_dangers
for k, v in ForecastRegionKDV.iteritems(): if 99 < k < 150 and v.IsActive is True: region_id.append(v.ID) ## The output plot_file_name = 'Danger level and danger sign 2015-16.png' ##################################### End of configuration ################################### pickle_file_name_1 = '{0}runlevelanddangersign part 1.pickle'.format(env.local_storage) pickle_file_name_2 = '{0}runlevelanddangersign part 2.pickle'.format(env.local_storage) if get_new: # get all data and save to pickle all_danger_levels = gd.get_all_dangers(region_id, from_date, to_date) all_danger_signs = go.get_danger_sign(from_date, to_date, region_ids=region_id, geohazard_tid=10) mp.pickle_anything([all_danger_levels, all_danger_signs], pickle_file_name_1) else: # load data from pickle all_danger_levels, all_danger_signs = mp.unpickle_anything(pickle_file_name_1) if make_new: # for counting days with danger levels level_count = [] data = {1:[], 2:[], 3:[], 4:[], 5:[]} for dl in all_danger_levels: if dl.source == 'Varsel' and dl.danger_level is not 0: level_count.append(dl.danger_level) for ds in all_danger_signs: if dl.date == ds.DtObsTime.date() and dl.region_name in ds.ForecastRegionName: print '{0}'.format(dl.date)
## The output plot_file_name = 'Danger level and danger sign 2015-16.png' ##################################### End of configuration ################################### pickle_file_name_1 = '{0}runlevelanddangersign part 1.pickle'.format( env.local_storage) pickle_file_name_2 = '{0}runlevelanddangersign part 2.pickle'.format( env.local_storage) if get_new: # get all data and save to pickle all_danger_levels = gd.get_all_dangers(region_id, from_date, to_date) all_danger_signs = go.get_danger_sign(from_date, to_date, region_ids=region_id, geohazard_tid=10) mp.pickle_anything([all_danger_levels, all_danger_signs], pickle_file_name_1) else: # load data from pickle all_danger_levels, all_danger_signs = mp.unpickle_anything( pickle_file_name_1) if make_new: # for counting days with danger levels level_count = [] data = {1: [], 2: [], 3: [], 4: [], 5: []} for dl in all_danger_levels: if dl.source == 'Varsel' and dl.danger_level is not 0: level_count.append(dl.danger_level)