def load_inputs(cfg: dict) -> Tuple[GeoDataFrame]: """ Loads all of the inputs specified by the `cfg` and returns a tuple of :class:`GeoDataFrame` objects, the earthquake catalog and the bins. :param cfg: Configuration for the evaluations, such as that parsed from the YAML config file. """ rupture_gdf = load_ruptures_from_ssm(cfg) bin_gdf = make_bin_gdf_from_rupture_gdf( rupture_gdf, h3_res=cfg["input"]["bins"]["h3_res"], min_mag=cfg["input"]["bins"]["mfd_bin_min"], max_mag=cfg["input"]["bins"]["mfd_bin_max"], bin_width=cfg["input"]["bins"]["mfd_bin_width"], ) logger.info("bin_gdf shape: {}".format(bin_gdf.shape)) logger.info("rupture_gdf shape: {}".format(rupture_gdf.shape)) logger.debug("rupture_gdf memory: {} GB".format( sum(rupture_gdf.memory_usage(index=True, deep=True)) * 1e-9)) logger.info("adding ruptures to bins") add_ruptures_to_bins(rupture_gdf, bin_gdf) if cfg["input"]["subset"]["file"] is not None: logger.info(" Subsetting bin_gdf") bin_gdf = subset_source( bin_gdf, subset_file=cfg["input"]["subset"]["file"], buffer=cfg["input"]["subset"]["buffer"], ) del rupture_gdf logger.debug("bin_gdf memory: {} GB".format( sum(bin_gdf.memory_usage(index=True, deep=True)) * 1e-9)) eq_gdf = load_obs_eq_catalog(cfg) logger.info("adding earthquakes to bins") add_earthquakes_to_bins(eq_gdf, bin_gdf, h3_res=cfg["input"]["bins"]["h3_res"]) if "prospective_catalog" in cfg["input"].keys(): logger.info("adding prospective earthquakes to bins") pro_gdf = load_pro_eq_catalog(cfg) add_earthquakes_to_bins( pro_gdf, bin_gdf, h3_res=cfg["input"]["bins"]["h3_res"], category="prospective", ) return bin_gdf, eq_gdf, pro_gdf else: return bin_gdf, eq_gdf
def test_add_earthquakes_to_bins(self): self.eq_df = make_earthquake_gdf_from_csv(self.test_dir + "data/phl_eqs.csv") add_earthquakes_to_bins(self.eq_df, self.bin_gdf) sbin = self.bin_gdf.loc["836860fffffffff"].SpacemagBin self.assertEqual(len(sbin.mag_bins[6.0].observed_earthquakes), 2)