Exemplo n.º 1
0
def example_data_set(tmpdir):
    """
    Fixture creating a small example file.
    """
    asdf_filename = os.path.join(tmpdir.strpath, "test.h5")
    data_path = os.path.join(data_dir, "small_sample_data_set")

    data_set = ASDFDataSet(asdf_filename)

    for filename in glob.glob(os.path.join(data_path, "*.xml")):
        if "quake.xml" in filename:
            data_set.add_quakeml(filename)
        else:
            data_set.add_stationxml(filename)

    for filename in glob.glob(os.path.join(data_path, "*.mseed")):
        data_set.add_waveforms(filename, tag="raw_recording",
                               event_id=data_set.events[0])

    # Flush and finish writing.
    del data_set

    # Return filename and path to tempdir, no need to always create a
    # new one.
    return Namespace(filename=asdf_filename, tmpdir=tmpdir.strpath)
Exemplo n.º 2
0
    def dump_to_asdf(self, outputfile):
        """
        Dump self.adjoin_sources into adjoint file
        """
        print("=" * 15 + "\nWrite to file: %s" % outputfile)
        if os.path.exists(outputfile):
            print("Output file exists and removed:%s" % outputfile)
            os.remove(outputfile)

        ds = ASDFDataSet(outputfile, mode='a', compression=None)
        ds.add_quakeml(self.events)
        event = self.events[0]
        origin = event.preferred_origin()
        event_time = origin.time

        for adj_id in sorted(self.adjoint_sources):
            adj = self.adjoint_sources[adj_id]
            sta_tag = "%s_%s" % (adj.network, adj.station)
            sta_info = self.stations[sta_tag]
            adj_array, adj_path, parameters = \
                dump_adjsrc(adj, sta_info, event_time)
            ds.add_auxiliary_data(adj_array,
                                  data_type="AdjointSources",
                                  path=adj_path,
                                  parameters=parameters)
Exemplo n.º 3
0
def combine_asdf(base_asdf_path, append_asdf_path, output_asdf_path):
    """
    combine_asdf: merge the waveforms in append_asdf to base_asdf, and generate a new asdf.
    """
    base_asdf = ASDFDataSet(base_asdf_path, mode="r", mpi=False)
    append_asdf = ASDFDataSet(append_asdf_path, mode="r", mpi=False)
    output_asdf = ASDFDataSet(output_asdf_path, mpi=False)
    # * add events
    events = base_asdf.events
    event = events[0]
    output_asdf.add_quakeml(events)
    # * add waveforms and stationxml
    # firstly we add base asdf
    rep_net_sta = base_asdf.waveforms.list()[0]
    tag_default = base_asdf.waveforms[rep_net_sta].get_waveform_tags()[0]
    for each_net_sta in base_asdf.waveforms.list():
        tag = base_asdf.waveforms[each_net_sta].get_waveform_tags()[0]
        assert tag == tag_default
        st = base_asdf.waveforms[each_net_sta][tag]
        inv = base_asdf.waveforms[each_net_sta]["StationXML"]
        output_asdf.add_waveforms(st, tag=tag, event_id=event)
        output_asdf.add_stationxml(inv)
    # secondly we add append asdf
    for each_net_sta in append_asdf.waveforms.list():
        tag = append_asdf.waveforms[each_net_sta].get_waveform_tags()[0]
        assert tag == tag_default
        st = append_asdf.waveforms[each_net_sta][tag]
        inv = append_asdf.waveforms[each_net_sta]["StationXML"]
        output_asdf.add_waveforms(st, tag=tag, event_id=event)
        output_asdf.add_stationxml(inv)
    del base_asdf
    del append_asdf
    del output_asdf
Exemplo n.º 4
0
def save_adjoint_to_asdf(outputfile, events, adjoint_sources, stations):
    """
    Save events(obspy.Catalog) and adjoint sources, together with
    staiton information, to asdf file on disk.
    """
    print("=" * 15 + "\nWrite to file: %s" % outputfile)
    outputdir = os.path.dirname(outputfile)
    if not os.path.exists(outputdir):
        os.makedirs(outputdir)

    if os.path.exists(outputfile):
        print("Output file exists and removed:%s" % outputfile)
        os.remove(outputfile)

    ds = ASDFDataSet(outputfile, mode='a', compression=None)
    ds.add_quakeml(events)
    for adj_id in sorted(adjoint_sources):
        adj = adjoint_sources[adj_id]
        sta_tag = "%s_%s" % (adj.network, adj.station)
        sta_info = stations[sta_tag]
        adj_array, adj_path, parameters = \
            dump_adjsrc(adj, sta_info)
        ds.add_auxiliary_data(adj_array,
                              data_type="AdjointSources",
                              path=adj_path,
                              parameters=parameters)
Exemplo n.º 5
0
def test_adding_event_in_various_manners(tmpdir):
    """
    Events can be added either as filenames, open files, BytesIOs, or ObsPy
    objects. In any case, the result should be the same.
    """
    asdf_filename = os.path.join(tmpdir.strpath, "test.h5")
    data_path = os.path.join(data_dir, "small_sample_data_set")
    event_filename = os.path.join(data_path, "quake.xml")

    ref_cat = obspy.readEvents(event_filename)

    # Add as filename
    data_set = ASDFDataSet(asdf_filename)
    assert len(data_set.events) == 0
    data_set.add_quakeml(event_filename)
    assert len(data_set.events) == 1
    assert data_set.events == ref_cat
    del data_set
    os.remove(asdf_filename)

    # Add as open file.
    data_set = ASDFDataSet(asdf_filename)
    assert len(data_set.events) == 0
    with open(event_filename, "rb") as fh:
        data_set.add_quakeml(fh)
    assert len(data_set.events) == 1
    assert data_set.events == ref_cat
    del data_set
    os.remove(asdf_filename)

    # Add as BytesIO.
    data_set = ASDFDataSet(asdf_filename)
    assert len(data_set.events) == 0
    with open(event_filename, "rb") as fh:
        temp = io.BytesIO(fh.read())
    temp.seek(0, 0)
    data_set.add_quakeml(temp)
    assert len(data_set.events) == 1
    assert data_set.events == ref_cat
    del data_set
    os.remove(asdf_filename)

    # Add as ObsPy Catalog.
    data_set = ASDFDataSet(asdf_filename)
    assert len(data_set.events) == 0
    data_set.add_quakeml(ref_cat.copy())
    assert len(data_set.events) == 1
    assert data_set.events == ref_cat
    del data_set
    os.remove(asdf_filename)

    # Add as an ObsPy event.
    data_set = ASDFDataSet(asdf_filename)
    assert len(data_set.events) == 0
    data_set.add_quakeml(ref_cat.copy()[0])
    assert len(data_set.events) == 1
    assert data_set.events == ref_cat
    del data_set
    os.remove(asdf_filename)
Exemplo n.º 6
0
def test_data_set_creation(tmpdir):
    """
    Test data set creation with a small test dataset.

    It tests that the the stuff that goes in is correctly saved and
    can be retrieved again.
    """
    asdf_filename = os.path.join(tmpdir.strpath, "test.h5")
    data_path = os.path.join(data_dir, "small_sample_data_set")

    data_set = ASDFDataSet(asdf_filename)

    for filename in glob.glob(os.path.join(data_path, "*.mseed")):
        data_set.add_waveforms(filename, tag="raw_recording")

    for filename in glob.glob(os.path.join(data_path, "*.xml")):
        if "quake.xml" in filename:
            data_set.add_quakeml(filename)
        else:
            data_set.add_stationxml(filename)

    # Flush and finish writing.
    del data_set

    # Open once again
    data_set = ASDFDataSet(asdf_filename)

    # ObsPy is tested enough to make this comparison meaningful.
    for station in (("AE", "113A"), ("TA", "POKR")):
        # Test the waveforms
        stream_asdf = \
            getattr(data_set.waveforms, "%s_%s" % station).raw_recording
        stream_file = obspy.read(os.path.join(
            data_path, "%s.%s.*.mseed" % station))
        # Delete the file format specific stats attributes. These are
        # meaningless inside ASDF data sets.
        for trace in stream_file:
            del trace.stats.mseed
            del trace.stats._format
        for trace in stream_asdf:
            del trace.stats.asdf
            del trace.stats._format
        assert stream_asdf == stream_file

        # Test the inventory data.
        inv_asdf = \
            getattr(data_set.waveforms, "%s_%s" % station).StationXML
        inv_file = obspy.read_inventory(
            os.path.join(data_path, "%s.%s..BH*.xml" % station))
        assert inv_file == inv_asdf
    # Test the event.
    cat_file = obspy.readEvents(os.path.join(data_path, "quake.xml"))
    cat_asdf = data_set.events
    # from IPython.core.debugger import Tracer; Tracer(colors="Linux")()
    assert cat_file == cat_asdf
Exemplo n.º 7
0
def convert_to_asdf(asdf_fn,
                    waveform_filelist,
                    tag,
                    quakemlfile=None,
                    staxml_filelist=None,
                    verbose=False,
                    status_bar=False,
                    create_simple_inv=False):
    """
    Convert files(sac or mseed) to asdf
    """

    if verbose:
        print("*" * 10 + " ASDF Converter " + "*" * 10)

    nwaveform = len(waveform_filelist)
    if nwaveform == 0:
        print("No file specified. Return...")
        return
    if os.path.exists(asdf_fn):
        raise Exception("File '%s' exists." % asdf_fn)

    ds = ASDFDataSet(asdf_fn, mode='a')

    # Add event
    if quakemlfile:
        if not os.path.exists(quakemlfile):
            raise ValueError("Quakeml file not exists:%s" % quakemlfile)
        ds.add_quakeml(quakemlfile)
        event = ds.events[0]
        if status_bar:
            drawProgressBar(1.0, "Adding Quakeml data")
    else:
        raise ValueError("No Event file")

    sta_dict = add_waveform_to_asdf(ds,
                                    waveform_filelist,
                                    tag,
                                    event=event,
                                    create_simple_inv=create_simple_inv,
                                    status_bar=status_bar)

    add_stationxml_to_asdf(ds,
                           staxml_filelist,
                           event=event,
                           create_simple_inv=create_simple_inv,
                           sta_dict=sta_dict,
                           status_bar=status_bar)

    if verbose:
        print("ASDF filesize: %s" % ds.pretty_filesize)
    del ds
Exemplo n.º 8
0
def test_adding_same_event_twice_raises(tmpdir):
    """
    Adding the same event twice raises.
    """
    asdf_filename = os.path.join(tmpdir.strpath, "test.h5")
    data_path = os.path.join(data_dir, "small_sample_data_set")

    data_set = ASDFDataSet(asdf_filename)

    # Add once, all good.
    data_set.add_quakeml(os.path.join(data_path, "quake.xml"))
    assert len(data_set.events) == 1

    # Adding again should raise an error.
    with pytest.raises(ValueError):
        data_set.add_quakeml(os.path.join(data_path, "quake.xml"))
Exemplo n.º 9
0
def convert_to_asdf(filelist,
                    asdf_fn,
                    quakemlfile,
                    staxml_filelist=None,
                    tag=None):
    """
    Convert files(sac or mseed) to asdf
    """

    nfiles = len(filelist)
    if nfiles == 0:
        print "No file specified. Return..."
        return

    if os.path.exists(asdf_fn):
        raise Exception("File '%s' exists." % asdf_fn)

    ds = ASDFDataSet(asdf_fn)

    # Add event
    if quakemlfile is not None and os.path.exists(quakemlfile):
        print "Event info added"
        ds.add_quakeml(quakemlfile)
        event = ds.events[0]
    else:
        raise ValueError("No Event file")

    # Add waveforms.
    print "Adding Waveform data"
    for _i, filename in enumerate(filelist):
        if os.path.exists(filename):
            #print("Adding file %i of %i: %s" % (_i + 1,
            #       len(filelist), os.path.basename(filename)))
            ds.add_waveforms(filename, tag=tag, event_id=event)
        else:
            print("File not exist %i of %i")

    # Add StationXML files.
    if staxml_filelist is not None and len(staxml_filelist) > 0:
        for _i, filename in enumerate(staxml_filelist):
            if os.path.exists(filename):
                #print("Adding StationXML file %i of %i..." % (_i + 1, len(filenames)))
                ds.add_stationxml(filename)
    else:
        print("No stationxml added")
Exemplo n.º 10
0
def convert_to_asdf(asdf_fn, waveform_filelist, tag, quakemlfile=None,
                    staxml_filelist=None, verbose=False, status_bar=False,
                    create_simple_inv=False):
    """
    Convert files(sac or mseed) to asdf
    """

    if verbose:
        print("*"*10 + " ASDF Converter " + "*"*10)

    nwaveform = len(waveform_filelist)
    if nwaveform == 0:
        print("No file specified. Return...")
        return
    if os.path.exists(asdf_fn):
        raise Exception("File '%s' exists." % asdf_fn)

    ds = ASDFDataSet(asdf_fn, mode='a')

    # Add event
    if quakemlfile:
        if not os.path.exists(quakemlfile):
            raise ValueError("Quakeml file not exists:%s" % quakemlfile)
        ds.add_quakeml(quakemlfile)
        event = ds.events[0]
        if status_bar:
            drawProgressBar(1.0, "Adding Quakeml data")
    else:
        raise ValueError("No Event file")

    sta_dict = add_waveform_to_asdf(ds, waveform_filelist, tag, event=event,
                                    create_simple_inv=create_simple_inv,
                                    status_bar=status_bar)

    add_stationxml_to_asdf(ds, staxml_filelist, event=event,
                           create_simple_inv=create_simple_inv,
                           sta_dict=sta_dict,
                           status_bar=status_bar)

    if verbose:
        print("ASDF filesize: %s" % ds.pretty_filesize)
    del ds
Exemplo n.º 11
0
def convert_to_asdf(filelist, asdf_fn, quakemlfile, staxml_filelist=None, tag=None):
    """
    Convert files(sac or mseed) to asdf
    """

    nfiles = len(filelist)
    if nfiles == 0:
        print "No file specified. Return..."
        return

    if os.path.exists(asdf_fn):
        raise Exception("File '%s' exists." % asdf_fn)

    ds = ASDFDataSet(asdf_fn)

    # Add event
    if quakemlfile is not None and os.path.exists(quakemlfile):
        print "Event info added"
        ds.add_quakeml(quakemlfile)
        event = ds.events[0]
    else:
        raise ValueError("No Event file")

    # Add waveforms.
    print "Adding Waveform data"
    for _i, filename in enumerate(filelist):
        if os.path.exists(filename):
            #print("Adding file %i of %i: %s" % (_i + 1, 
            #       len(filelist), os.path.basename(filename)))
            ds.add_waveforms(filename, tag=tag, event_id=event)
        else:
            print("File not exist %i of %i")

    # Add StationXML files.
    if staxml_filelist is not None and len(staxml_filelist) > 0: 
        for _i, filename in enumerate(staxml_filelist):
            if os.path.exists(filename):
                #print("Adding StationXML file %i of %i..." % (_i + 1, len(filenames)))
                ds.add_stationxml(filename)
    else:
        print("No stationxml added")
Exemplo n.º 12
0
def save_adjoint_to_asdf(outputfile, events, adjoint_sources, stations):
    """
    Save events(obspy.Catalog) and adjoint sources, together with
    staiton information, to asdf file on disk.
    """
    print("="*15 + "\nWrite to file: %s" % outputfile)
    outputdir = os.path.dirname(outputfile)
    if not os.path.exists(outputdir):
        os.makedirs(outputdir)

    if os.path.exists(outputfile):
        print("Output file exists and removed:%s" % outputfile)
        os.remove(outputfile)

    ds = ASDFDataSet(outputfile, mode='a', compression=None)
    ds.add_quakeml(events)
    for adj_id in sorted(adjoint_sources):
        adj = adjoint_sources[adj_id]
        sta_tag = "%s_%s" % (adj.network, adj.station)
        sta_info = stations[sta_tag]
        adj_array, adj_path, parameters = \
            dump_adjsrc(adj, sta_info)
        ds.add_auxiliary_data(adj_array, data_type="AdjointSources",
                              path=adj_path, parameters=parameters)
Exemplo n.º 13
0
def test_saving_event_id(tmpdir):
    """
    Tests that the event_id can be saved and retrieved automatically.
    """
    data_path = os.path.join(data_dir, "small_sample_data_set")
    filename = os.path.join(tmpdir.strpath, "example.h5")
    event = obspy.readEvents(os.path.join(data_path, "quake.xml"))[0]

    # Add the event object, and associate the waveform with it.
    data_set = ASDFDataSet(filename)
    data_set.add_quakeml(event)
    waveform = obspy.read(os.path.join(data_path, "TA.*.mseed")).sort()
    data_set.add_waveforms(waveform, "raw_recording", event_id=event)
    st = data_set.waveforms.TA_POKR.raw_recording
    for tr in st:
        assert tr.stats.asdf.event_id.getReferredObject() == event
    del data_set
    os.remove(filename)

    # Add as a string.
    data_set = ASDFDataSet(filename)
    data_set.add_quakeml(event)
    waveform = obspy.read(os.path.join(data_path, "TA.*.mseed")).sort()
    data_set.add_waveforms(waveform, "raw_recording",
                           event_id=str(event.resource_id.id))
    st = data_set.waveforms.TA_POKR.raw_recording
    for tr in st:
        assert tr.stats.asdf.event_id.getReferredObject() == event
    del data_set
    os.remove(filename)

    # Add as a resource identifier object.
    data_set = ASDFDataSet(filename)
    data_set.add_quakeml(event)
    waveform = obspy.read(os.path.join(data_path, "TA.*.mseed")).sort()
    data_set.add_waveforms(waveform, "raw_recording",
                           event_id=event.resource_id)
    st = data_set.waveforms.TA_POKR.raw_recording
    for tr in st:
        assert tr.stats.asdf.event_id.getReferredObject() == event
    del data_set
    os.remove(filename)
import glob
import os

from pyasdf import ASDFDataSet

filename = "synthetic.h5"

if os.path.exists(filename):
    raise Exception("File '%s' exists." % filename)

ds = ASDFDataSet(filename)

# Add event
ds.add_quakeml("./GCMT_event_SOUTH_SANDWICH_ISLANDS_REGION_Mag_5.6_2010-3-11-6.xml")
event = ds.events[0]

# Add waveforms.
filenames = glob.glob("./SYNTHETIC_SAC/*.sem")
for _i, filename in enumerate(filenames):
    print("Adding SAC file %i of %i..." % (_i + 1, len(filenames)))
    ds.add_waveforms(filename, tag="synthetic", event_id=event)

# Add StationXML files.
filenames = glob.glob("./StationXML/*.xml")
for _i, filename in enumerate(filenames):
    print("Adding StationXML file %i of %i..." % (_i + 1, len(filenames)))
    ds.add_stationxml(filename)
Exemplo n.º 15
0
        if e.timestamp > endtime:
            endtime = e

        ds.add_waveforms(trace, args.tag, event_id, labels=[sac_filename])

    # add events
    catalog = obspy.core.event.Catalog()
    for event_coords, event_id in events.items():
        latitude, longitude, depth, origin_time = event_coords
        origin = obspy.core.event.Origin(time=origin_time,
                                         longitude=longitude,
                                         latitude=latitude,
                                         depth=depth)
        catalog.append(
            obspy.core.event.Event(resource_id=event_id, origins=[origin]))
    ds.add_quakeml(catalog)

    # add stations
    for group1, group2 in sort_by_station(channels):
        network, station = group1
        for location, channel, latitude, longitude, depth, elevation in group2:
            inventory = obspy.core.inventory.Inventory(
                networks=[
                    obspy.core.inventory.Network(
                        code=network,
                        stations=[
                            obspy.core.inventory.Station(
                                code=station,
                                latitude=group2[0][2],
                                longitude=group2[0][3],
                                elevation=group2[0][4],