示例#1
0
    def windows_from_obspy_traces(
            traces, model_name, phase_names,
            components, t_before=10., t_after=40.):
        events = set()
        stations = defaultdict(list)
        for trace in traces:
            event = Event(
                trace.stats.sac.kevnm,
                trace.stats.sac.evla,
                trace.stats.sac.evlo,
                trace.stats.sac.evdp,
                None, None, None)
            station = Station(
                trace.stats.station,
                trace.stats.network,
                trace.stats.sac.stla,
                trace.stats.sac.stlo)
            events.add(event)
            stations[event].append(station)

        windows = []
        for event in events:
            tmp_windows = WindowMaker.compute(
                event, stations[event], model_name, phase_names,
                components, t_before, t_after)
            windows += tmp_windows
        return windows
示例#2
0
def get_dataset(model,
                tlen=1638.4,
                nspc=64,
                sampling_hz=20,
                mode=0,
                add_noise=False,
                noise_normalized_std=1.):
    #TODO fix outputs.us=NaN when event.latitude==station.latitude
    event = get_ref_event()
    events = [event]
    stations = [
        Station('{:03d}'.format(i), 'DSM', event.latitude + 5 + 0.5 * i,
                event.longitude + 0.1) for i in range(61)
    ]
    dataset = Dataset.dataset_from_arrays(events, [stations],
                                          sampling_hz=sampling_hz)

    pydsm_input = PyDSMInput.input_from_arrays(event, stations, model, tlen,
                                               nspc, sampling_hz)
    pydsm_output = compute(pydsm_input, mode=mode)
    pydsm_output.to_time_domain()
    dataset.data = np.zeros((1, ) + pydsm_output.us.shape, dtype=np.float64)
    dataset.data[0] = pydsm_output.us

    if add_noise:
        noise_arr = white_noise(noise_normalized_std, dataset.data.shape)
        npts_cut = int(dataset.data.shape[3] * 0.9)
        norm = np.abs(dataset.data[:, :, :npts_cut]).max(axis=3, keepdims=True)
        noise_arr *= norm
        dataset.data += noise_arr

    return dataset, pydsm_output
def get_output(tlen=1638.4, nspc=64, sampling_hz=20, mode=0):
    catalog = read_catalog()
    event = Event.event_from_catalog(catalog, '200707211534A')
    stations = [
        Station('{:03d}'.format(i), 'DSM', event.latitude, event.longitude + i)
        for i in range(12, 36)
    ]

    model = SeismicModel.ak135()
    pydsm_input = PyDSMInput.input_from_arrays(event, stations, model, tlen,
                                               nspc, sampling_hz)
    pydsm_output = compute(pydsm_input, mode=mode)
    pydsm_output.to_time_domain()

    return pydsm_output
示例#4
0
def get_station(trace):
    """Return Station object from obspy Trace.

    Args:
        trace (Trace): obspy Trace object

    Returns:
        Station: station

    """
    sta_nm = trace.stats.sac.kstnm
    sta_net = trace.stats.sac.knetwk
    sta_la = trace.stats.sac.stla
    sta_lo = trace.stats.sac.stlo
    return Station(sta_nm, sta_net, sta_la, sta_lo)
示例#5
0
    def windows_from_obspy_trace(
            trace, model_name, phase_names,
            t_before=10., t_after=40.):
        event = Event(
            trace.stats.sac.kevnm,
            trace.stats.sac.evla,
            trace.stats.sac.evlo,
            trace.stats.sac.evdp,
            None, None, None)
        station = Station(
            trace.stats.station,
            trace.stats.network,
            trace.stats.sac.stla,
            trace.stats.sac.stlo)
        component = Component.parse_component(trace.stats.sac.kcmpnm)

        windows = WindowMaker.compute(
            event, [station], model_name, phase_names,
            [component], t_before, t_after)
        return windows
示例#6
0
    @staticmethod
    def load(path):
        """Read path into list of Window using pickle.load().

        Args:
            path (str): path to the file that contains time windows

        Returns:
            windows (list of Window)): time windows

        """
        with open(path, 'rb') as f:
            output = pickle.load(f)
        return output


if __name__ == '__main__':
    from dsmpy.utils.cmtcatalog import read_catalog
    catalog = read_catalog()
    event = Event.event_from_catalog(
    catalog, '200707211534A')
    stations = [
    Station(
        name='FCC', network='CN',
        latitude=58.7592, longitude=-94.0884), ]
    model = 'prem'
    phases = ['S', 'ScS']
    components = [Component.T]
    windows = WindowMaker.compute(event, stations, model, phases, components)
    print(windows)
示例#7
0
    def dataset_from_sac(cls,
                         sac_files,
                         verbose=0,
                         headonly=True,
                         broadcast_data=False):
        """Creates a dataset from a list of sac files.
        With headonly=False, time series data from the sac_files
        will be stored in self.data.

        For parallel applications using MPI, headonly=False (i.e.,
        reading the data from sac files) only applies to rank 0, so
        as not to saturate the memory.

        Args:
            sac_files (list of str): list of paths to sac files.
            verbose (int): 0: quiet, 1: debug.
            headonly (bool): if True, read only the metadata.
                If False, includes data.
            broadcast_data (bool): default is False

        Returns:
            Dataset: dataset

        Examples:
            >>> sac_files = ['FCC.CN.201205280204A.T']
            >>> dataset = Dataset.dataset_from_sac(
            ...        sac_files, headonly=False)

        """

        if MPI.COMM_WORLD.Get_rank() == 0 or broadcast_data:
            traces = [
                read(sac_file, headonly=headonly)[0] for sac_file in sac_files
            ]
        else:
            traces = [
                read(sac_file, headonly=True)[0] for sac_file in sac_files
            ]

        cat = read_catalog()
        traces = [tr for tr in traces if (cat == tr.stats.sac.kevnm).any()]

        sampling_hz = int(traces[0].stats.sampling_rate)

        lats_ = []
        lons_ = []
        names_ = []
        nets_ = []
        eqlats_ = []
        eqlons_ = []
        eqdeps_ = []
        evids_ = []
        data_ = []
        components_ = []
        indices_ = list(range(len(traces)))

        for tr in traces:
            lats_.append(tr.stats.sac.stla)
            lons_.append(tr.stats.sac.stlo)
            names_.append(tr.stats.sac.kstnm)
            nets_.append(tr.stats.sac.knetwk)
            eqlats_.append(tr.stats.sac.evla)
            eqlons_.append(tr.stats.sac.evlo)
            eqdeps_.append(tr.stats.sac.evdp)
            evids_.append(tr.stats.sac.kevnm)
            if MPI.COMM_WORLD.Get_rank() == 0 or broadcast_data:
                data_.append(tr.data)
            components_.append(tr.stats.sac.kcmpnm)

        theta_phi = [
            _calthetaphi(stalat, stalon, eqlat,
                         eqlon) for stalat, stalon, eqlat, eqlon in zip(
                             lats_, lons_, eqlats_, eqlons_)
        ]
        thetas_ = np.array([x[0] for x in theta_phi], dtype=np.float64)
        phis_ = np.array([x[1] for x in theta_phi], dtype=np.float64)

        dataset_info = pd.DataFrame(
            dict(lats=lats_,
                 lons=lons_,
                 names=names_,
                 nets=nets_,
                 thetas=thetas_,
                 phis=phis_,
                 eqlats=eqlats_,
                 eqlons=eqlons_,
                 eqdeps=eqdeps_,
                 evids=evids_,
                 indices=indices_))
        # drop dupplicate sac files with identical source/receiver
        # values, due to multiple seismic components
        n_before = len(traces)
        dataset_info.drop_duplicates(subset=['names', 'nets', 'evids'],
                                     inplace=True)
        n_after = len(dataset_info)
        if verbose >= 1:
            print('Dropped {} sac files'.format(n_before - n_after))

        dataset_info.sort_values(by='evids', inplace=True)

        dataset_info.index = list(range(n_after))

        nr = len(dataset_info)
        nrs = dataset_info.groupby('evids').count().lats.values
        dataset_event_info = dataset_info.drop_duplicates(['evids'])
        evids = dataset_event_info.evids.values
        eqlats = dataset_event_info.eqlats.values.astype(np.float64)
        eqlons = dataset_event_info.eqlons.values.astype(np.float64)
        eqdeps = dataset_event_info.eqdeps.values.astype(np.float64)
        r0s = 6371. - eqdeps

        # read event catalog
        # TODO case when event_id is not in the catalog
        events_ = cat[np.isin(cat, evids)]
        if len(events_) != len(evids):
            raise RuntimeError('Some events not in the catalog')
        mts = np.array([e.mt for e in events_])
        source_time_functions = np.array(
            [e.source_time_function for e in events_])
        centroid_times = np.array([e.centroid_time for e in events_])

        events = np.array([
            Event(id, lat, lon, depth, mt, ctime, source_time_function)
            for id, lat, lon, depth, mt, ctime, source_time_function in zip(
                evids, eqlats, eqlons, eqdeps, mts, centroid_times,
                source_time_functions)
        ])
        stations = dataset_info.apply(
            lambda x: Station(x.names, x.nets, x.lats, x.lons), axis=1).values

        phis = dataset_info.phis.values
        thetas = dataset_info.thetas.values
        lons = dataset_info.lons.values
        lats = dataset_info.lats.values

        if MPI.COMM_WORLD.Get_rank() == 0 or broadcast_data:
            npts = np.array([len(d) for d in data_], dtype=int).max()
            data_arr = np.zeros((1, 3, nr, npts), dtype=DATA_FLOAT_PREC)
            for ista in range(len(dataset_info.indices.values)):
                component = components_[dataset_info.indices.values[ista]]
                icomp = Component.parse_component(component).value
                try:
                    data_arr[0, icomp,
                             ista] = data_[dataset_info.indices.values[ista]]
                except:
                    n_tmp = len(data_[dataset_info.indices.values[ista]])
                    if n_tmp < npts:
                        tmp_data = np.pad(
                            data_[dataset_info.indices.values[ista]],
                            (0, npts - n_tmp),
                            mode='constant',
                            constant_values=(0, 0))
                        data_arr[0, icomp, ista] = tmp_data
                    else:
                        data_arr[0, icomp, ista] = (
                            data_[dataset_info.indices.values[ista]][:npts])

            remaining_traces_indices = (set(indices_) -
                                        set(dataset_info.indices.values))

            for i in remaining_traces_indices:
                dataset_filt = dataset_info[(dataset_info.evids == evids_[i])
                                            & (dataset_info.names == names_[i])
                                            & (dataset_info.nets == nets_[i])]
                j = dataset_filt.index.values[0]
                component = components_[i]
                icomp = Component.parse_component(component).value
                try:
                    data_arr[0, icomp, j] = data_[i]
                except:
                    n_tmp = len(data_[i])
                    if n_tmp < npts:
                        tmp_data = np.pad(data_[i], (0, npts - n_tmp),
                                          mode='constant',
                                          constant_values=(0, 0))
                        data_arr[0, icomp, j] = tmp_data
                    else:
                        data_arr[0, icomp, j] = (data_[i][:npts])
        else:
            data_arr = None

        return cls(lats, lons, phis, thetas, eqlats, eqlons, r0s, mts, nrs,
                   stations, events, data_arr, sampling_hz)