Exemple #1
0
def example_data_set(tmpdir):
    """
    Fixture creating a small example file.
    """
    asdf_filename = os.path.join(tmpdir.strpath, "test.h5")
    data_path = os.path.join(data_dir, "small_sample_data_set")

    data_set = ASDFDataSet(asdf_filename)

    for filename in glob.glob(os.path.join(data_path, "*.xml")):
        if "quake.xml" in filename:
            data_set.add_quakeml(filename)
        else:
            data_set.add_stationxml(filename)

    for filename in glob.glob(os.path.join(data_path, "*.mseed")):
        data_set.add_waveforms(filename, tag="raw_recording",
                               event_id=data_set.events[0])

    # Flush and finish writing.
    del data_set

    # Return filename and path to tempdir, no need to always create a
    # new one.
    return Namespace(filename=asdf_filename, tmpdir=tmpdir.strpath)
Exemple #2
0
def combine_asdf(base_asdf_path, append_asdf_path, output_asdf_path):
    """
    combine_asdf: merge the waveforms in append_asdf to base_asdf, and generate a new asdf.
    """
    base_asdf = ASDFDataSet(base_asdf_path, mode="r", mpi=False)
    append_asdf = ASDFDataSet(append_asdf_path, mode="r", mpi=False)
    output_asdf = ASDFDataSet(output_asdf_path, mpi=False)
    # * add events
    events = base_asdf.events
    event = events[0]
    output_asdf.add_quakeml(events)
    # * add waveforms and stationxml
    # firstly we add base asdf
    rep_net_sta = base_asdf.waveforms.list()[0]
    tag_default = base_asdf.waveforms[rep_net_sta].get_waveform_tags()[0]
    for each_net_sta in base_asdf.waveforms.list():
        tag = base_asdf.waveforms[each_net_sta].get_waveform_tags()[0]
        assert tag == tag_default
        st = base_asdf.waveforms[each_net_sta][tag]
        inv = base_asdf.waveforms[each_net_sta]["StationXML"]
        output_asdf.add_waveforms(st, tag=tag, event_id=event)
        output_asdf.add_stationxml(inv)
    # secondly we add append asdf
    for each_net_sta in append_asdf.waveforms.list():
        tag = append_asdf.waveforms[each_net_sta].get_waveform_tags()[0]
        assert tag == tag_default
        st = append_asdf.waveforms[each_net_sta][tag]
        inv = append_asdf.waveforms[each_net_sta]["StationXML"]
        output_asdf.add_waveforms(st, tag=tag, event_id=event)
        output_asdf.add_stationxml(inv)
    del base_asdf
    del append_asdf
    del output_asdf
Exemple #3
0
def test_data_set_creation(tmpdir):
    """
    Test data set creation with a small test dataset.

    It tests that the the stuff that goes in is correctly saved and
    can be retrieved again.
    """
    asdf_filename = os.path.join(tmpdir.strpath, "test.h5")
    data_path = os.path.join(data_dir, "small_sample_data_set")

    data_set = ASDFDataSet(asdf_filename)

    for filename in glob.glob(os.path.join(data_path, "*.mseed")):
        data_set.add_waveforms(filename, tag="raw_recording")

    for filename in glob.glob(os.path.join(data_path, "*.xml")):
        if "quake.xml" in filename:
            data_set.add_quakeml(filename)
        else:
            data_set.add_stationxml(filename)

    # Flush and finish writing.
    del data_set

    # Open once again
    data_set = ASDFDataSet(asdf_filename)

    # ObsPy is tested enough to make this comparison meaningful.
    for station in (("AE", "113A"), ("TA", "POKR")):
        # Test the waveforms
        stream_asdf = \
            getattr(data_set.waveforms, "%s_%s" % station).raw_recording
        stream_file = obspy.read(os.path.join(
            data_path, "%s.%s.*.mseed" % station))
        # Delete the file format specific stats attributes. These are
        # meaningless inside ASDF data sets.
        for trace in stream_file:
            del trace.stats.mseed
            del trace.stats._format
        for trace in stream_asdf:
            del trace.stats.asdf
            del trace.stats._format
        assert stream_asdf == stream_file

        # Test the inventory data.
        inv_asdf = \
            getattr(data_set.waveforms, "%s_%s" % station).StationXML
        inv_file = obspy.read_inventory(
            os.path.join(data_path, "%s.%s..BH*.xml" % station))
        assert inv_file == inv_asdf
    # Test the event.
    cat_file = obspy.readEvents(os.path.join(data_path, "quake.xml"))
    cat_asdf = data_set.events
    # from IPython.core.debugger import Tracer; Tracer(colors="Linux")()
    assert cat_file == cat_asdf
Exemple #4
0
def convert_to_asdf(filelist,
                    asdf_fn,
                    quakemlfile,
                    staxml_filelist=None,
                    tag=None):
    """
    Convert files(sac or mseed) to asdf
    """

    nfiles = len(filelist)
    if nfiles == 0:
        print "No file specified. Return..."
        return

    if os.path.exists(asdf_fn):
        raise Exception("File '%s' exists." % asdf_fn)

    ds = ASDFDataSet(asdf_fn)

    # Add event
    if quakemlfile is not None and os.path.exists(quakemlfile):
        print "Event info added"
        ds.add_quakeml(quakemlfile)
        event = ds.events[0]
    else:
        raise ValueError("No Event file")

    # Add waveforms.
    print "Adding Waveform data"
    for _i, filename in enumerate(filelist):
        if os.path.exists(filename):
            #print("Adding file %i of %i: %s" % (_i + 1,
            #       len(filelist), os.path.basename(filename)))
            ds.add_waveforms(filename, tag=tag, event_id=event)
        else:
            print("File not exist %i of %i")

    # Add StationXML files.
    if staxml_filelist is not None and len(staxml_filelist) > 0:
        for _i, filename in enumerate(staxml_filelist):
            if os.path.exists(filename):
                #print("Adding StationXML file %i of %i..." % (_i + 1, len(filenames)))
                ds.add_stationxml(filename)
    else:
        print("No stationxml added")
Exemple #5
0
def convert_to_asdf(filelist, asdf_fn, quakemlfile, staxml_filelist=None, tag=None):
    """
    Convert files(sac or mseed) to asdf
    """

    nfiles = len(filelist)
    if nfiles == 0:
        print "No file specified. Return..."
        return

    if os.path.exists(asdf_fn):
        raise Exception("File '%s' exists." % asdf_fn)

    ds = ASDFDataSet(asdf_fn)

    # Add event
    if quakemlfile is not None and os.path.exists(quakemlfile):
        print "Event info added"
        ds.add_quakeml(quakemlfile)
        event = ds.events[0]
    else:
        raise ValueError("No Event file")

    # Add waveforms.
    print "Adding Waveform data"
    for _i, filename in enumerate(filelist):
        if os.path.exists(filename):
            #print("Adding file %i of %i: %s" % (_i + 1, 
            #       len(filelist), os.path.basename(filename)))
            ds.add_waveforms(filename, tag=tag, event_id=event)
        else:
            print("File not exist %i of %i")

    # Add StationXML files.
    if staxml_filelist is not None and len(staxml_filelist) > 0: 
        for _i, filename in enumerate(staxml_filelist):
            if os.path.exists(filename):
                #print("Adding StationXML file %i of %i..." % (_i + 1, len(filenames)))
                ds.add_stationxml(filename)
    else:
        print("No stationxml added")
import glob
import os

from pyasdf import ASDFDataSet

filename = "synthetic.h5"

if os.path.exists(filename):
    raise Exception("File '%s' exists." % filename)

ds = ASDFDataSet(filename)

# Add event
ds.add_quakeml("./GCMT_event_SOUTH_SANDWICH_ISLANDS_REGION_Mag_5.6_2010-3-11-6.xml")
event = ds.events[0]

# Add waveforms.
filenames = glob.glob("./SYNTHETIC_SAC/*.sem")
for _i, filename in enumerate(filenames):
    print("Adding SAC file %i of %i..." % (_i + 1, len(filenames)))
    ds.add_waveforms(filename, tag="synthetic", event_id=event)

# Add StationXML files.
filenames = glob.glob("./StationXML/*.xml")
for _i, filename in enumerate(filenames):
    print("Adding StationXML file %i of %i..." % (_i + 1, len(filenames)))
    ds.add_stationxml(filename)
Exemple #7
0
                        code=network,
                        stations=[
                            obspy.core.inventory.Station(
                                code=station,
                                latitude=group2[0][2],
                                longitude=group2[0][3],
                                elevation=group2[0][4],
                                start_date=starttime,
                                end_date=endtime,
                                creation_date=obspy.UTCDateTime(),
                                site=obspy.core.inventory.Site(name=""),
                                channels=[
                                    obspy.core.inventory.Channel(
                                        code=channel,
                                        location_code=location,
                                        latitude=latitude,
                                        longitude=longitude,
                                        elevation=elevation,
                                        depth=depth,
                                        start_date=starttime,
                                        end_date=endtime)
                                ])
                        ])
                ],
                source="pyasdf sac2asdf converter")
            ds.add_stationxml(inventory)

    # add sac_headers as auxiliary data
    ds.add_auxiliary_data_file(dumpjson(headers), path='SacHeaders')
    del ds