def write_new_synt_asdf(self, file_prefix):
        new_synt_dict = self._sort_new_synt()

        for tag, win_array in new_synt_dict.iteritems():
            filename = "%s.%s.h5" % (file_prefix, tag)
            if os.path.exists(filename):
                os.remove(filename)
                logger.info("Output file exists, removed: %s" % filename)
            else:
                logger.info("Output new synt asdf: %s" % filename)

            ds = ASDFDataSet(filename, mode='w')
            added_list = []
            for window in win_array:
                synt_id = window.datalist['new_synt'].id
                # skip duplicate obsd location id.
                # for example, II.AAK.00.BHZ and II.AAK.10.BHZ will
                # be treated as different traces. But the synt and
                # new synt will be the same. So we only add one
                if synt_id in added_list:
                    continue
                else:
                    added_list.append(synt_id)
                ds.add_waveforms(window.datalist['new_synt'], tag=tag)
            # add stationxml
            _staxml_asdf = self._asdf_file_dict['synt']
            ds_sta = ASDFDataSet(_staxml_asdf)
            self.__add_staxml_from_other_asdf(ds, ds_sta)
            ds.flush()
Ejemplo n.º 2
0
def combine_asdf(base_asdf_path, append_asdf_path, output_asdf_path):
    """
    combine_asdf: merge the waveforms in append_asdf to base_asdf, and generate a new asdf.
    """
    base_asdf = ASDFDataSet(base_asdf_path, mode="r", mpi=False)
    append_asdf = ASDFDataSet(append_asdf_path, mode="r", mpi=False)
    output_asdf = ASDFDataSet(output_asdf_path, mpi=False)
    # * add events
    events = base_asdf.events
    event = events[0]
    output_asdf.add_quakeml(events)
    # * add waveforms and stationxml
    # firstly we add base asdf
    rep_net_sta = base_asdf.waveforms.list()[0]
    tag_default = base_asdf.waveforms[rep_net_sta].get_waveform_tags()[0]
    for each_net_sta in base_asdf.waveforms.list():
        tag = base_asdf.waveforms[each_net_sta].get_waveform_tags()[0]
        assert tag == tag_default
        st = base_asdf.waveforms[each_net_sta][tag]
        inv = base_asdf.waveforms[each_net_sta]["StationXML"]
        output_asdf.add_waveforms(st, tag=tag, event_id=event)
        output_asdf.add_stationxml(inv)
    # secondly we add append asdf
    for each_net_sta in append_asdf.waveforms.list():
        tag = append_asdf.waveforms[each_net_sta].get_waveform_tags()[0]
        assert tag == tag_default
        st = append_asdf.waveforms[each_net_sta][tag]
        inv = append_asdf.waveforms[each_net_sta]["StationXML"]
        output_asdf.add_waveforms(st, tag=tag, event_id=event)
        output_asdf.add_stationxml(inv)
    del base_asdf
    del append_asdf
    del output_asdf
Ejemplo n.º 3
0
    def load_asdf(self, filename, mode="a"):
        """
        Load asdf file

        :param filename:
        :param mode:
        :return:
        """
        if self.mpi_mode:
            return ASDFDataSet(filename, compression=None, debug=self._debug,
                               mode=mode, mpi=self.mpi_mode)
        else:
            return ASDFDataSet(filename, mode=mode)
Ejemplo n.º 4
0
def writeraw(event: obspy.core.event.event.Event, rawfolder: str, statloc: str,
             verbose: bool):
    """
    Write the downloaded miniseed, event, and stationxmls to a single asdf
    file.

    :param event: Event that all the waveforms are associated to.
    :type event: obspy.core.event.event.Event
    :param rawfolder: Folder to save the .h5 file to.
    :type rawfolder: str
    :param statloc: Folder, in which the station xmls can be found
    :type statloc: str
    :param verbose: show warnings?
    :type verbose: bool
    """
    # Folder to save asdf to
    outfolder = os.path.join(rawfolder, os.pardir)

    # Start out by adding the event, which later will be associated to
    # each of the waveforms
    with ASDFDataSet(os.path.join(outfolder, 'raw.h5')) as ds:
        # Retrieve eventid - not the most elgant way, but works
        evtid = event.resource_id
        try:
            ds.add_quakeml(event)
        except ValueError:
            if verbose:
                warn(
                    'Event with event-id %s already in DB, skipping...' %
                    str(evtid), UserWarning)
            else:
                pass

    # Read all the waveforms associated to this event
    try:
        st = read(os.path.join(rawfolder, '*.mseed'))
        # Write the waveforms to the asdf
        with ASDFDataSet(os.path.join(outfolder, 'raw.h5')) as ds:
            ds.add_waveforms(st, tag='raw_recording', event_id=evtid)

        # Lastly, we will want to save the stationxmls
        statxml = read_inventory(os.path.join(statloc, '*.xml'))
        with ASDFDataSet(os.path.join(outfolder, 'raw.h5')) as ds:
            ds.add_stationxml(statxml)
    except Exception:
        # For some cases, there will be events without
        # waveforms associated to them
        pass
Ejemplo n.º 5
0
def convert_adjsrcs_from_asdf(asdf_fn, outputdir, _verbose=True):
    """
    Convert adjoint sources from asdf to ASCII file(for specfem3d_globe use)
    """
    if not os.path.exists(asdf_fn):
        raise ValueError("No asdf file: %s" % asdf_fn)
    if not os.path.exists(outputdir):
        os.makedirs(outputdir)

    print("Input ASDF: %s" % asdf_fn)
    print("Output dir: %s" % outputdir)

    ds = ASDFDataSet(asdf_fn, mode='r')
    if "AdjointSources" not in ds.auxiliary_data:
        print("No adjoint source exists in asdf file: %s" % asdf_fn)
        return
    adjsrcs = ds.auxiliary_data.AdjointSources
    nadj = len(adjsrcs)
    print("Number of adjoint sources: %d" % nadj)

    for idx, adj in enumerate(adjsrcs):
        if _verbose:
            print("Adjoint sources(%d/%d) from: %s" % (idx, nadj, adj.path))
        time_offset = adj.parameters["time_offset"]
        dt = adj.parameters['dt']
        npts = len(adj.data)
        times = np.array([time_offset + i * dt for i in range(npts)])
        _data = np.zeros([npts, 2])
        _data[:, 0] = times[:]
        _data[:, 1] = adj.data[:]
        adj_path = adj.path.replace("_", ".")
        filename = os.path.join(outputdir, "%s.adj" % adj_path)
        np.savetxt(filename, _data)
Ejemplo n.º 6
0
def test_pyaflowa_setup(source_name, PAR, PATH):
    """
    Test the one-time setup of Pyaflowa which creates the IO object
    """
    pyaflowa = Pyaflowa(structure="seisflows", sfpaths=PATH, sfpar=PAR)

    # Requirement that STATION file exists for Pyaflowa to run setup
    with pytest.raises(FileNotFoundError):
        pyaflowa.path_structure.format(source_name=source_name)
    open(os.path.join(PATH.SOLVER, source_name, "DATA", "STATIONS"), "w")

    # SeisFlows usually takes care of placing source files into the data
    # directory, so we need to do it manually here
    src = os.path.join("test_data", "test_CMTSOLUTION_2018p130600")
    dst = os.path.join(PATH.SOLVER, source_name, "DATA", "CMTSOLUTION")
    shutil.copy(src, dst)

    # Initiate Pyaflowa which will create directory structure, read in source
    # file and create an ASDFDataSet
    io = pyaflowa.setup(source_name=source_name)

    # Simple check to make sure event id is set correctly and event reading
    # machinery is working
    assert (io.config.event_id == source_name)
    assert (os.path.exists(io.paths.ds_file))
    with ASDFDataSet(io.paths.ds_file) as ds:
        assert (source_name in ds.events[0].resource_id.id)
Ejemplo n.º 7
0
def write_st(st: Stream,
             event: Event,
             outfolder: str,
             statxml: Inventory,
             resample: bool = True):
    """
    Write raw waveform data to an asdf file. This includes the corresponding
    (teleseismic) event and the station inventory (i.e., response information).

    :param st: The stream holding the raw waveform data.
    :type st: Stream
    :param event: The seismic event associated to the recorded data.
    :type event: Event
    :param outfolder: Output folder to write the asdf file to.
    :type outfolder: str
    :param statxml: The station inventory
    :type statxml: Inventory
    :param resample: Resample the data to 10Hz sampling rate? Defaults to True.
    :type resample: bool, optional
    """
    fname = '%s.%s.h5' % (st[0].stats.network, st[0].stats.station)
    if resample:
        st.filter('lowpass_cheby_2', freq=4, maxorder=12)
        st = resample_or_decimate(st, 10, filter=False)
    with ASDFDataSet(os.path.join(outfolder, fname)) as ds:
        # Events should not be added because it will read the whole
        # catalogue every single time!
        ds.add_waveforms(st, tag='raw_recording')
        ds.add_stationxml(statxml)  # If there are still problems, we will have
Ejemplo n.º 8
0
def empty_dataset(tmpdir):
    """Re-used test data pointing to STATIONS file"""
    fid = os.path.join(tmpdir, "empty_dataset.h5")
    # Make sure the dataset is actually empty
    if os.path.exists(fid):
        os.remove(fid)
    return ASDFDataSet(fid)
Ejemplo n.º 9
0
    def dump_to_asdf(self, outputfile):
        """
        Dump self.adjoin_sources into adjoint file
        """
        print("=" * 15 + "\nWrite to file: %s" % outputfile)
        if os.path.exists(outputfile):
            print("Output file exists and removed:%s" % outputfile)
            os.remove(outputfile)

        ds = ASDFDataSet(outputfile, mode='a', compression=None)
        ds.add_quakeml(self.events)
        event = self.events[0]
        origin = event.preferred_origin()
        event_time = origin.time

        for adj_id in sorted(self.adjoint_sources):
            adj = self.adjoint_sources[adj_id]
            sta_tag = "%s_%s" % (adj.network, adj.station)
            sta_info = self.stations[sta_tag]
            adj_array, adj_path, parameters = \
                dump_adjsrc(adj, sta_info, event_time)
            ds.add_auxiliary_data(adj_array,
                                  data_type="AdjointSources",
                                  path=adj_path,
                                  parameters=parameters)
Ejemplo n.º 10
0
def save_adjoint_to_asdf(outputfile, events, adjoint_sources, stations):
    """
    Save events(obspy.Catalog) and adjoint sources, together with
    staiton information, to asdf file on disk.
    """
    print("=" * 15 + "\nWrite to file: %s" % outputfile)
    outputdir = os.path.dirname(outputfile)
    if not os.path.exists(outputdir):
        os.makedirs(outputdir)

    if os.path.exists(outputfile):
        print("Output file exists and removed:%s" % outputfile)
        os.remove(outputfile)

    ds = ASDFDataSet(outputfile, mode='a', compression=None)
    ds.add_quakeml(events)
    for adj_id in sorted(adjoint_sources):
        adj = adjoint_sources[adj_id]
        sta_tag = "%s_%s" % (adj.network, adj.station)
        sta_info = stations[sta_tag]
        adj_array, adj_path, parameters = \
            dump_adjsrc(adj, sta_info)
        ds.add_auxiliary_data(adj_array,
                              data_type="AdjointSources",
                              path=adj_path,
                              parameters=parameters)
Ejemplo n.º 11
0
def extract_stations(d: Directory, dst: str):
    """Extract STATIONS from ASDFDataSet."""
    from os.path import join

    from pyasdf import ASDFDataSet

    for src in d.ls():
        event = src.split('.')[0]
        lines = {}
        out = join(dst, f'STATIONS.{event}')

        if d.has(out):
            continue

        with ASDFDataSet(src, mode='r', mpi=False) as ds:
            for station in ds.waveforms.list():
                if not hasattr(ds.waveforms[station], 'StationXML'):
                    print('  ' + station)
                    continue

                sta = ds.waveforms[station].StationXML.networks[0].stations[
                    0]  # type: ignore

                ll = station.split('.')
                ll.reverse()
                ll.append(f'{sta.latitude:.4f}')
                ll.append(f'{sta.longitude:.4f}')
                ll.append(f'{sta.elevation:.1f}')
                ll.append(f'{sta.channels[0].depth:.1f}')

                _format_station(lines, ll)

        d.writelines(lines.values(), join(dst, f'STATIONS.{event}'))
Ejemplo n.º 12
0
def test_pyaflowa_process_station(tmpdir, seisflows_workdir, seed_data,
                                  source_name, station_name, PAR, PATH):
    """
    Test the single station processing function 
    """
    # Turn off client to avoid searching FDSN, force local data search
    PAR.CLIENT = None
    PATH.DATA = tmpdir.strpath
    pyaflowa = Pyaflowa(structure="seisflows",
                        sfpaths=PATH,
                        sfpar=PAR,
                        iteration=1,
                        step_count=0)

    # Copy working directory to tmpdir to avoid creating unnecessary files
    shutil.copytree(src=seisflows_workdir, dst=os.path.join(tmpdir, "scratch"))
    shutil.copytree(src=seed_data, dst=os.path.join(tmpdir, "seed"))

    # Set up the same machinery as process_event()
    io = pyaflowa.setup(source_name)
    with ASDFDataSet(io.paths.ds_file) as ds:
        mgmt = Manager(ds=ds, config=io.config)
        mgmt, io = pyaflowa.process_station(mgmt=mgmt,
                                            code="NZ.BFZ.??.???",
                                            io=io)

    assert (io.nwin == mgmt.stats.nwin == 3)
    assert (io.misfit == pytest.approx(65.39037, .001))
Ejemplo n.º 13
0
def test_asdf_event_fetch(internal_fetcher, dataset_fid):
    """
    Get event from an ASDFDataSet.
    """
    with ASDFDataSet(dataset_fid) as ds:
        internal_fetcher.ds = ds
        internal_fetcher.asdf_event_fetch()
Ejemplo n.º 14
0
def test_asdf_station_fetch(internal_fetcher, dataset_fid, code):
    """
    Get station from an ASDFDataSet
    """
    with ASDFDataSet(dataset_fid) as ds:
        internal_fetcher.ds = ds
        inv = internal_fetcher.asdf_station_fetch(code)
        assert len(inv[0][0]) == 3
Ejemplo n.º 15
0
def load_asdf_info(asdf_fname):
    # asdf file, we don't use parallel io here
    with ASDFDataSet(asdf_fname, mode="r", mpi=False) as asdf_file:
        lat = asdf_file.events[0].preferred_origin().latitude
        lon = asdf_file.events[0].preferred_origin().longitude
        dep = asdf_file.events[0].preferred_origin().depth
        time = asdf_file.events[0].preferred_origin().time

    return lat, lon, dep, time
Ejemplo n.º 16
0
def test_save_adjsrcs(tmpdir, mgmt_post):
    """
    Checks that adjoint sources can be written to dataset and will match the 
    formatting required by Specfem3D
    """
    with ASDFDataSet(os.path.join(tmpdir, "test_dataset.h5")) as ds:
        mgmt_post.ds = ds
        mgmt_post.save_adjsrcs()
        assert(hasattr(ds.auxiliary_data.AdjointSources.default, "NZ_BFZ_BXN"))
Ejemplo n.º 17
0
 def __init__(self, ds_fid):
     """
     The class contains a PyASDF Dataset that will be used to load observed
     and synthetic data, based on user input
     """
     self.ds = ASDFDataSet(ds_fid)
     # Initiate an empty Manager to get access to its config
     self.mgmt = Manager()
     self.info = Info()
Ejemplo n.º 18
0
def load_asdf_info(asdf_fname):
    # asdf file
    with ASDFDataSet(asdf_fname, mode="r") as asdf_file:
        lat = asdf_file.events[0].preferred_origin().latitude
        lon = asdf_file.events[0].preferred_origin().longitude
        dep = asdf_file.events[0].preferred_origin().depth
        time = asdf_file.events[0].preferred_origin().time

    return lat, lon, dep, time
Ejemplo n.º 19
0
def test_asdf_waveform_fetch(internal_fetcher, dataset_fid, code, config):
    """
    Get waveforms from an ASDFDataSet
    """
    with ASDFDataSet(dataset_fid) as ds:
        internal_fetcher.ds = ds
        for tag in [config.observed_tag, config.synthetic_tag]:
            st = internal_fetcher.asdf_waveform_fetch(code, tag)
            assert len(st) == 3
Ejemplo n.º 20
0
    def _core(self, path, param):
        """
        Core function that handles one pair of asdf file(observed and
        synthetic), windows and configuration for adjoint source

        :param path: path information, path of observed asdf, synthetic
            asdf, windows files, observed tag, synthetic tag, output adjoint
            file, figure mode and figure directory
        :type path: dict
        :param param: parameter information for constructing adjoint source
        :type param: dict
        :return:
        """
        adjoint_param = param["adjoint_config"]

        obsd_file = path["obsd_asdf"]
        synt_file = path["synt_asdf"]
        obsd_tag = path["obsd_tag"]
        synt_tag = path["synt_tag"]
        window_file = path["window_file"]
        output_filename = path["output_file"]

        self.check_input_file(obsd_file)
        self.check_input_file(synt_file)
        self.check_input_file(window_file)
        self.check_output_file(output_filename)

        obsd_ds = self.load_asdf(obsd_file, mode="r")
        synt_ds = self.load_asdf(synt_file, mode="r")

        windows = self.load_windows(window_file)

        adj_src_type = adjoint_param["adj_src_type"]
        adjoint_param.pop("adj_src_type", None)

        config = load_adjoint_config(adjoint_param, adj_src_type)

        if self.mpi_mode and self.rank == 0:
            output_ds = ASDFDataSet(output_filename, mpi=False)
            if output_ds.events:
                output_ds.events = obsd_ds.events
            del output_ds
        if self.mpi_mode:
            self.comm.barrier()

        measure_adj_func = \
            partial(measure_adjoint_wrapper, config=config,
                    obsd_tag=obsd_tag, synt_tag=synt_tag,
                    windows=windows,
                    adj_src_type=adj_src_type)

        results = obsd_ds.process_two_files(synt_ds, measure_adj_func)

        if self.rank == 0:
            print("output filename: %s" % output_filename)
            write_measurements(results, output_filename)
Ejemplo n.º 21
0
def test_gather_event(gatherer, dataset_fid):
    """
    Ensure gatherer can get an event from the correct sources
    """
    assert gatherer.gather_event(try_fm=False) is not None

    with ASDFDataSet(dataset_fid) as ds:
        gatherer.ds = ds
        gatherer.Client = None
        assert gatherer.gather_event(try_fm=False) is not None
Ejemplo n.º 22
0
def test_station_fetch(internal_fetcher, dataset_fid, code):
    """
    Test the mid level fetching function which chooses whether to search via
    ASDFDataSet or directory structure
    """
    internal_fetcher.config.paths["responses"] = "./test_data/test_seed"
    assert internal_fetcher.station_fetch(code) is not None
    internal_fetcher.config.paths["responses"] = None

    with ASDFDataSet(dataset_fid) as ds:
        internal_fetcher.ds = ds
        assert internal_fetcher.station_fetch(code) is not None
Ejemplo n.º 23
0
def convert_to_asdf(asdf_fn,
                    waveform_filelist,
                    tag,
                    quakemlfile=None,
                    staxml_filelist=None,
                    verbose=False,
                    status_bar=False,
                    create_simple_inv=False):
    """
    Convert files(sac or mseed) to asdf
    """

    if verbose:
        print("*" * 10 + " ASDF Converter " + "*" * 10)

    nwaveform = len(waveform_filelist)
    if nwaveform == 0:
        print("No file specified. Return...")
        return
    if os.path.exists(asdf_fn):
        raise Exception("File '%s' exists." % asdf_fn)

    ds = ASDFDataSet(asdf_fn, mode='a')

    # Add event
    if quakemlfile:
        if not os.path.exists(quakemlfile):
            raise ValueError("Quakeml file not exists:%s" % quakemlfile)
        ds.add_quakeml(quakemlfile)
        event = ds.events[0]
        if status_bar:
            drawProgressBar(1.0, "Adding Quakeml data")
    else:
        raise ValueError("No Event file")

    sta_dict = add_waveform_to_asdf(ds,
                                    waveform_filelist,
                                    tag,
                                    event=event,
                                    create_simple_inv=create_simple_inv,
                                    status_bar=status_bar)

    add_stationxml_to_asdf(ds,
                           staxml_filelist,
                           event=event,
                           create_simple_inv=create_simple_inv,
                           sta_dict=sta_dict,
                           status_bar=status_bar)

    if verbose:
        print("ASDF filesize: %s" % ds.pretty_filesize)
    del ds
Ejemplo n.º 24
0
def test_event_fetch(internal_fetcher, dataset_fid, event_id):
    """
    Test the mid level fetching function which chooses whether to search via
    ASDFDataSet or directory structure for event information
    """
    internal_fetcher.config.paths["events"] = "./test_data"
    assert internal_fetcher.event_fetch(event_id,
                                        prefix="test_CMTSOLUTION_") is not None
    internal_fetcher.config.paths["events"] = None

    with ASDFDataSet(dataset_fid) as ds:
        internal_fetcher.ds = ds
        assert internal_fetcher.event_fetch(code) is not None
Ejemplo n.º 25
0
Archivo: misfit.py Proyecto: icui/pyper
def read_misfit(src: str) -> float:
    """Get output misfit value."""
    from pyasdf import ASDFDataSet

    mf = 0.0

    with ASDFDataSet(Directory(src).abs('adjoint.h5'), mode='r',
                     mpi=False) as ds:
        group = ds.auxiliary_data.AdjointSources

        for sta in group.list():
            mf += group[sta].parameters['misfit']

    return mf
Ejemplo n.º 26
0
    def process_event(self, source_name, codes=None, loc="*", cha="*",
                      **kwargs):
        """
        The main processing function for Pyaflowa misfit quantification.

        Processes waveform data for all stations related to a given event,
        produces waveform and map plots during the processing step, saves data
        to an ASDFDataSet and writes adjoint sources and STATIONS_ADJOINT file,
        required by SPECFEM3D's adjoint simulations, to disk.

        Kwargs passed to pyatoa.Manager.flow() function.

        :type source_name: str
        :param source_name: event id to be used for data gathering, processing
        :type codes: list of str
        :param codes: list of station codes to be used for processing. If None,
            will read station codes from the provided STATIONS file
        :type loc: str
        :param loc: if codes is None, Pyatoa will generate station codes based 
            on the SPECFEM STATIONS file, which does not contain location info.
            This allows user to set the location values manually when building
            the list of station codes. Defaults to wildcard '??', which is 
            usually acceptable
        :type cha: str
        :param cha: if codes is None, Pyatoa will generate station codes based
            on the SPECFEM STATIONS file, which does not contain channel info. 
            This variable allows the user to set channel searching manually,
            wildcards okay. Defaults to 'HH?' for high-gain, high-sampling rate
            broadband seismometers, but this is dependent on the available data.
        :rtype: float
        :return: the total scaled misfit collected during the processing chain
        """
        # Create the event specific configurations and attribute container (io)
        io = self.setup(source_name, **kwargs)
       
        # Allow user to provide a list of codes, else read from station file 
        if codes is None:
            codes = read_station_codes(io.paths.stations_file, 
                                       loc=loc, cha=cha)

        # Open the dataset as a context manager and process all events in serial
        with ASDFDataSet(io.paths.ds_file) as ds:
            mgmt = pyatoa.Manager(ds=ds, config=io.config)
            for code in codes:
                mgmt_out, io = self.process_station(mgmt=mgmt, code=code,
                                                    io=io, **kwargs)

        scaled_misfit = self.finalize(io)

        return scaled_misfit
Ejemplo n.º 27
0
Archivo: ortho.py Proyecto: icui/pyper
    def _diff(self):
        import numpy as np
        from pyasdf import ASDFDataSet

        with ASDFDataSet(self.path_synthetic, mode='r', mpi=False) as syn_ds, \
            ASDFDataSet(self.path_observed, mode='r', mpi=False) as obs_ds:
            fellows = {}

            syn_aux = syn_ds.auxiliary_data.FT
            obs_aux = obs_ds.auxiliary_data.FT

            syn_keys = syn_aux.list()
            obs_keys = obs_aux.list()

            for key in syn_keys:
                if key not in obs_keys:
                    continue

                keypath = key.split('_')
                cha = keypath[-1]
                station = '.'.join(keypath[:-1])

                if cha not in fellows:
                    fellows[cha] = {}

                # phase and amplitude difference
                syn = np.array(syn_aux[key].data)
                obs = np.array(obs_aux[key].data)

                phase_diff = np.angle(syn / obs)
                amp_diff = np.abs(syn) / np.abs(obs)

                fellows[cha][station] = phase_diff, amp_diff, np.squeeze(
                    np.where(np.isnan(syn) | np.isnan(obs)))

            self.dump(fellows, 'fellows.pickle')
Ejemplo n.º 28
0
def check_event_information_in_asdf_files(asdf_files):
    if len(asdf_files) == 0:
        raise ValueError("Number of input asdf files is 0")

    asdf_events = {}
    # extract event information from asdf file
    for asdf_fn in asdf_files:
        ds = ASDFDataSet(asdf_fn, mode='r')
        asdf_events[asdf_fn] = ds.events

    check_events_consistent(asdf_events)

    event_base = asdf_events[asdf_events.keys()[0]]
    origin = event_base[0].preferred_origin()
    return event_base, origin
Ejemplo n.º 29
0
def extract_adjoint_misfit(asdf_file, verbose):

    print("Input asdf file: %s" % asdf_file)

    if not os.path.exists(asdf_file):
        raise ValueError("ASDF file not exists: %s" % asdf_file)
    ds = ASDFDataSet(asdf_file, mode='r')
    try:
        adjsrc_group = ds.auxiliary_data.AdjointSources
    except Exception as err:
        raise ValueError("Can not get adjoint misfit information(due to %s). "
                         "Check if the adjoint source group exists in the "
                         "file" % err)

    nadj = 0
    nadj_cat = {}
    misfit_cat = {}
    misfit_dict = {}
    for adj in adjsrc_group:
        nadj += 1

        nw = adj.parameters["station_id"].split(".")[0]
        sta = adj.parameters["station_id"].split(".")[1]
        comp = adj.parameters["component"]
        loc = adj.parameters["location"]
        station_id = "%s.%s.%s.%s" % (nw, sta, loc, comp)
        misfit = adj.parameters["misfit"]

        misfit_dict[station_id] = misfit
        if comp not in misfit_cat:
            misfit_cat[comp] = 0
            nadj_cat[comp] = 0
        misfit_cat[comp] += misfit
        nadj_cat[comp] += 1

    content = {
        "asdf_file": asdf_file,
        "misfit": misfit_dict,
        "misfit_category": misfit_cat,
        "nadj_total": nadj,
        "nadj_category": nadj_cat
    }

    if verbose:
        print("Number of adjoint sources:", nadj)

    return content
    def check_and_load_asdf_file(self, asdf_file_dict):

        if not isinstance(asdf_file_dict, dict):
            raise TypeError("asdf_file_dict should be dictionary. Key from "
                            "parlist and value is the asdf file name")

        necessary_keys = ["obsd", "synt"] + list(self.parlist)
        for key in necessary_keys:
            if key not in asdf_file_dict.keys():
                raise ValueError("key(%s) in parlist is not in "
                                 "asdf_file_dict(%s)" %
                                 (key, asdf_file_dict.keys()))

        dataset = dict()
        for key in necessary_keys:
            dataset[key] = ASDFDataSet(asdf_file_dict[key])
        return dataset