def test_dump_adjsrc(): array = np.array([1., 2., 3., 4., 5.]) adj = AdjointSource("cc_traveltime_misfit", 2.0, 1.0, 17, 40, "BHZ", adjoint_source=array, network="II", station="AAK", location="", starttime=UTCDateTime(1990, 1, 1)) station_info = { "latitude": 1.0, "longitude": 2.0, "depth_in_m": 3.0, "elevation_in_m": 4.0 } adj_array, adj_path, parameters = sa.dump_adjsrc(adj, station_info) npt.assert_array_almost_equal(adj_array, array) for key in station_info: npt.assert_almost_equal(station_info[key], parameters[key]) assert adj_path == "II_AAK_BHZ" npt.assert_almost_equal(parameters["misfit"], 2.0) npt.assert_almost_equal(parameters["dt"], 1.0) npt.assert_almost_equal(parameters["min_period"], 17.0) npt.assert_almost_equal(parameters["max_period"], 40.0) assert parameters["adjoint_source_type"] == "cc_traveltime_misfit" assert parameters["station_id"] == "II.AAK" assert parameters["component"], "BHZ" assert UTCDateTime(parameters["starttime"]) == UTCDateTime(1990, 1, 1) assert parameters["units"] == "m"
def save_adjoint_to_asdf(outputfile, events, adjoint_sources, stations): """ Save events(obspy.Catalog) and adjoint sources, together with staiton information, to asdf file on disk. """ print("=" * 15 + "\nWrite to file: %s" % outputfile) outputdir = os.path.dirname(outputfile) if not os.path.exists(outputdir): os.makedirs(outputdir) if os.path.exists(outputfile): print("Output file exists and removed:%s" % outputfile) os.remove(outputfile) ds = ASDFDataSet(outputfile, mode='a', compression=None) ds.add_quakeml(events) for adj_id in sorted(adjoint_sources): adj = adjoint_sources[adj_id] sta_tag = "%s_%s" % (adj.network, adj.station) sta_info = stations[sta_tag] adj_array, adj_path, parameters = \ dump_adjsrc(adj, sta_info) ds.add_auxiliary_data(adj_array, data_type="AdjointSources", path=adj_path, parameters=parameters)
def test_load_to_adjsrc(): array = np.array([1., 2., 3., 4., 5.]) adj = AdjointSource( "cc_traveltime_misfit", 2.0, 1.0, 17, 40, "BHZ", adjoint_source=array, network="II", station="AAK", location="", starttime=UTCDateTime(1990, 1, 1)) station_info = {"latitude": 1.0, "longitude": 2.0, "depth_in_m": 3.0, "elevation_in_m": 4.0} adj_array, adj_path, parameters = sa.dump_adjsrc(adj, station_info) # ensemble a faked adjoint source from hdf5 hdf5_adj = namedtuple("HDF5Adj", ['data', 'parameters']) hdf5_adj.data = array hdf5_adj.parameters = parameters # load and check loaded_adj, loaded_station_info = sa.load_to_adjsrc(hdf5_adj) adjoint_equal(loaded_adj, adj) for k in station_info: npt.assert_almost_equal(station_info[k], loaded_station_info[k]) assert loaded_station_info["station"] == "AAK" assert loaded_station_info["network"] == "II" assert loaded_station_info["location"] == ""
def test_dump_adjsrc(): array = np.array([1., 2., 3., 4., 5.]) adj = AdjointSource( "cc_traveltime_misfit", 2.0, 1.0, 17, 40, "BHZ", adjoint_source=array, network="II", station="AAK", location="", starttime=UTCDateTime(1990, 1, 1)) station_info = {"latitude": 1.0, "longitude": 2.0, "depth_in_m": 3.0, "elevation_in_m": 4.0} adj_array, adj_path, parameters = sa.dump_adjsrc(adj, station_info) npt.assert_array_almost_equal(adj_array, array) for key in station_info: npt.assert_almost_equal(station_info[key], parameters[key]) assert adj_path == "II_AAK_BHZ" npt.assert_almost_equal(parameters["misfit"], 2.0) npt.assert_almost_equal(parameters["dt"], 1.0) npt.assert_almost_equal(parameters["min_period"], 17.0) npt.assert_almost_equal(parameters["max_period"], 40.0) assert parameters["adjoint_source_type"] == "cc_traveltime_misfit" assert parameters["station_id"] == "II.AAK" assert parameters["component"], "BHZ" assert UTCDateTime(parameters["starttime"]) == UTCDateTime(1990, 1, 1) assert parameters["units"] == "m"
def test_load_to_adjsrc(): array = np.array([1., 2., 3., 4., 5.]) adj = AdjointSource("cc_traveltime_misfit", 2.0, 1.0, 17, 40, "BHZ", adjoint_source=array, network="II", station="AAK", location="", starttime=UTCDateTime(1990, 1, 1)) station_info = { "latitude": 1.0, "longitude": 2.0, "depth_in_m": 3.0, "elevation_in_m": 4.0 } adj_array, adj_path, parameters = sa.dump_adjsrc(adj, station_info) # ensemble a faked adjoint source from hdf5 hdf5_adj = namedtuple("HDF5Adj", ['data', 'parameters']) hdf5_adj.data = array hdf5_adj.parameters = parameters # load and check loaded_adj, loaded_station_info = sa.load_to_adjsrc(hdf5_adj) adjoint_equal(loaded_adj, adj) for k in station_info: npt.assert_almost_equal(station_info[k], loaded_station_info[k]) assert loaded_station_info["station"] == "AAK" assert loaded_station_info["network"] == "II" assert loaded_station_info["location"] == ""
def save_adjoint_to_asdf(outputfile, events, adjoint_sources, stations): """ Save events(obspy.Catalog) and adjoint sources, together with staiton information, to asdf file on disk. """ print("="*15 + "\nWrite to file: %s" % outputfile) outputdir = os.path.dirname(outputfile) if not os.path.exists(outputdir): os.makedirs(outputdir) if os.path.exists(outputfile): print("Output file exists and removed:%s" % outputfile) os.remove(outputfile) ds = ASDFDataSet(outputfile, mode='a', compression=None) ds.add_quakeml(events) for adj_id in sorted(adjoint_sources): adj = adjoint_sources[adj_id] sta_tag = "%s_%s" % (adj.network, adj.station) sta_info = stations[sta_tag] adj_array, adj_path, parameters = \ dump_adjsrc(adj, sta_info) ds.add_auxiliary_data(adj_array, data_type="AdjointSources", path=adj_path, parameters=parameters)