def calc_tc(expo_dict, tracks, data_dir): """ Compute tropical cyclone events from tracks at every island group, if not contained in data_dir. """ try: abs_path = os.path.join(data_dir, 'tc_isl.p') with open(abs_path, 'rb') as f: tc_dict = pickle.load(f) print('Loaded tc_isl:', len(tc_dict)) except FileNotFoundError: all_isl = BlackMarble() for ent_iso, ent_val in expo_dict.items(): all_isl.append(ent_val) centr = Centroids() centr.coord = all_isl.coord centr.id = np.arange(centr.lat.size) + 1 centr.region_id = all_isl.region_id tc = TropCyclone() tc.set_from_tracks(tracks, centr) tc_dict = dict() for ent_iso, ent_val in expo_dict.items(): reg_id = np.unique(ent_val.region_id)[0] tc_dict[ent_iso] = tc.select(reg_id=reg_id) save(os.path.join(data_dir, 'tc_isl.p'), tc_dict) return tc_dict
def unzip_tif_to_py(file_gz): """Unzip image file, read it, flip the x axis, save values as pickle and remove tif. Parameters: file_gz (str): file fith .gz format to unzip Returns: str (file_name of unzipped file) sparse.csr_matrix (nightlight) """ LOGGER.info("Unzipping file %s.", file_gz) file_name = path.splitext(file_gz)[0] with gzip.open(file_gz, 'rb') as f_in: with open(file_name, 'wb') as f_out: shutil.copyfileobj(f_in, f_out) nightlight = sparse.csc.csc_matrix(plt.imread(file_name)) # flip X axis nightlight.indices = -nightlight.indices + nightlight.shape[0] - 1 nightlight = nightlight.tocsr() remove(file_name) file_name = path.splitext(file_name)[0] + ".p" save(file_name, nightlight) return file_name, nightlight
def calc_exposure(data_dir): """ Compute exposure in every island group, if not contained in data_dir.""" try: abs_path = os.path.join(data_dir, 'exp_irma.p') with open(abs_path, 'rb') as f: expo_dict = pickle.load(f) print('Loaded exp_irma:', len(expo_dict)) except FileNotFoundError: expo_dict = dict() for cntry, cntry_iso in zip(CNTRIES, CNTRIES_ISO): if cntry == 'Netherlands': ent = BlackMarble() ent.set_countries({cntry: ['St. Eustatius', 'Saba']}, YEAR, res_km=RESOL, poly_val=POLY_VAL) ent.value = ent.value / ent.value.sum() * GDP_NLD_ISL * ( INC_GRP['NLD'] + 1) else: ent = BlackMarble() ent.set_countries([cntry], YEAR, res_km=RESOL, poly_val=POLY_VAL, **{ 'gdp': GDP, 'inc_grp': INC_GRP }) expo_dict[cntry_iso] = ent save(os.path.join(data_dir, 'exp_irma.p'), expo_dict) return expo_dict
def unzip_tif_to_py(file_gz): """Unzip image file, read it, flip the x axis, save values as pickle and remove tif. Parameters ---------- file_gz : str file fith .gz format to unzip Returns ------- fname : str file_name of unzipped file nightlight : sparse.csr_matrix """ LOGGER.info("Unzipping file %s.", file_gz) file_name = Path(Path(file_gz).stem) with gzip.open(file_gz, 'rb') as f_in: with file_name.open('wb') as f_out: shutil.copyfileobj(f_in, f_out) nightlight = sparse.csc.csc_matrix(plt.imread(file_name)) # flip X axis nightlight.indices = -nightlight.indices + nightlight.shape[0] - 1 nightlight = nightlight.tocsr() file_name.unlink() file_path = SYSTEM_DIR.joinpath(file_name.stem + ".p") save(file_path, nightlight) return file_name, nightlight
def calc_tc(expo_dict, tracks, data_dir, pool): """ Compute tropical cyclone events from tracks at every island group, if not contained in data_dir. """ try: abs_path = os.path.join(data_dir, 'tc_isl.p') with open(abs_path, 'rb') as f: tc_dict = pickle.load(f) print('Loaded tc_isl:', len(tc_dict)) except FileNotFoundError: all_isl = BlackMarble(pd.concat(list(expo_dict.values()))) centr = Centroids() centr.set_lat_lon(all_isl.latitude.values, all_isl.longitude.values) centr.region_id = all_isl.region_id.values centr.check() tc = TropCyclone(pool) tc.set_from_tracks(tracks, centr) tc_dict = dict() for ent_iso, ent_val in expo_dict.items(): reg_id = np.unique(ent_val.region_id)[0] tc_dict[ent_iso] = tc.select(reg_id=reg_id) save(os.path.join(data_dir, 'tc_isl.p'), tc_dict) return tc_dict
def test_entity_in_save_dir(self): """Returns the same list if its length is correct.""" ent = {'value': [1, 2, 3]} with self.assertLogs('climada.util.save', level='INFO') as cm: save('save_test.pkl', ent) self.assertTrue(('save_test.pkl' in cm.output[0]) or \ ('save_test.pkl' in cm.output[1]))
def test_load_pass(self): """Load previously saved variable""" file_name = 'save_test.pkl' ent = {'value': [1, 2, 3]} save(file_name, ent) res = load(file_name) self.assertTrue('value' in res) self.assertTrue(res['value'] == ent['value'])
def test_entity_in_save_dir(self): """Returns the same list if its length is correct.""" file_name = 'save_test.pkl' ent = {'value': [1, 2, 3]} with self.assertLogs('climada.util.save', level='INFO') as cm: save(file_name, ent) self.assertTrue(CONFIG.local_data.save_dir.dir().joinpath(file_name).is_file()) self.assertTrue((file_name in cm.output[0]) or (file_name in cm.output[1]))
def test_entity_in_save_dir(self): """Returns the same list if its length is correct.""" file_name = 'save_test.pkl' ent = {'value': [1, 2, 3]} with self.assertLogs('climada.util.save', level='INFO') as cm: save(file_name, ent) self.assertTrue(os.path.isfile(os.path.join(DATA_DIR, file_name))) self.assertTrue((file_name in cm.output[0]) or \ (file_name in cm.output[1]))
def calc_tracks(data_dir, pool): """ Compute tracks from ibtracs data, if not contained in data_dir. This functions is the longest one to execute.""" try: abs_path = os.path.join(data_dir, 'sel_hist_syn_1h.p') with open(abs_path, 'rb') as f: sel_ibtracs = pickle.load(f) print('Loaded sel_hist_syn_1h:', sel_ibtracs.size) except FileNotFoundError: # set parallel computing sel_ibtracs = TCTracks(pool) sel_ibtracs.read_ibtracs_netcdf(provider='usa', storm_id=C_TRACKS_NAME, correct_pres=True) print('num tracks hist:', sel_ibtracs.size) sel_ibtracs.calc_random_walk(49) print('num tracks hist+syn:', sel_ibtracs.size) sel_ibtracs.equal_timestep(1) save(os.path.join(data_dir, 'sel_hist_syn_1h.p'), sel_ibtracs) return sel_ibtracs
def calc_tracks(data_dir): """ Compute tracks from ibtracs data, if not contained in data_dir. This functions is the longest one to execute.""" try: abs_path = os.path.join(data_dir, 'sel_hist_syn_1h.p') with open(abs_path, 'rb') as f: sel_ibtracs = pickle.load(f) print('Loaded sel_hist_syn_1h:', sel_ibtracs.size) except FileNotFoundError: abs_path = os.path.join(data_dir, 'sel_hist.p') with open(abs_path, 'rb') as f: sel_ibtracs = pickle.load(f) print('Loaded sel_hist:', sel_ibtracs.size) sel_ibtracs.calc_random_walk(49) print('num tracks hist+syn:', sel_ibtracs.size) save(os.path.join(data_dir, 'sel_hist_syn.p'), sel_ibtracs) sel_ibtracs.equal_timestep(1) save(os.path.join(data_dir, 'sel_hist_syn_1h.p'), sel_ibtracs) return sel_ibtracs
def calc_imp(expo_dict, tc_dict, data_dir): """ Compute impacts of TCs in every island group. """ try: abs_path = os.path.join(data_dir, 'imp_isl.p') with open(abs_path, 'rb') as f: imp_dict = pickle.load(f) print('Loaded imp_isl:', len(imp_dict)) except FileNotFoundError: if_exp = ImpactFuncSet() if_em = IFTropCyclone() if_em.set_emanuel_usa() if_exp.add_func(if_em) imp_dict = dict() for isl_iso in expo_dict: imp = Impact() imp.calc(expo_dict[isl_iso], if_exp, tc_dict[isl_iso]) imp_dict[isl_iso] = imp save(os.path.join(data_dir, 'imp_isl.p'), imp_dict) return imp_dict