Example #1
0
 class Items(Object):
     item_list = List.T(Item.T())
Example #2
0
class QSeisConfigFull(QSeisConfig):

    time_start = Float.T(default=0.0)
    time_reduction_velocity = Float.T(default=0.0)
    time_window = Float.T(default=900.0)

    source_depth = Float.T(default=10.0)
    source_mech = QSeisSourceMech.T(optional=True,
                                    default=QSeisSourceMechMT.D())

    receiver_depth = Float.T(default=0.0)
    receiver_distances = List.T(Float.T())
    nsamples = Int.T(default=256)

    gf_sw_source_types = Tuple.T(6, Int.T(), default=(1, 1, 1, 1, 0, 0))

    gf_filenames = Tuple.T(6, String.T(), default=qseis_greenf_names)

    seismogram_filename = String.T(default='seis')

    receiver_azimuths = List.T(Float.T())

    earthmodel_1d = gf.meta.Earthmodel1D.T(optional=True)
    earthmodel_receiver_1d = gf.meta.Earthmodel1D.T(optional=True)

    @staticmethod
    def example():
        conf = QSeisConfigFull()
        conf.receiver_distances = [2000.]
        conf.receiver_azimuths = [0.]
        conf.time_start = -10.0
        conf.time_reduction_velocity = 15.0
        conf.earthmodel_1d = cake.load_model().extract(depth_max='cmb')
        conf.earthmodel_receiver_1d = None
        conf.sw_flat_earth_transform = 1
        return conf

    def get_output_filenames(self, rundir):
        return [
            pjoin(rundir, self.seismogram_filename + '.t' + c)
            for c in qseis_components
        ]

    def get_output_filenames_gf(self, rundir):
        return [
            pjoin(rundir, fn + '.t' + c) for fn in self.gf_filenames
            for c in qseis_components
        ]

    def string_for_config(self):
        def aggregate(xx):
            return len(xx), '\n'.join([''] +
                                      [x.string_for_config() for x in xx])

        assert len(self.receiver_distances) > 0
        assert len(self.receiver_distances) == len(self.receiver_azimuths)
        assert self.earthmodel_1d is not None

        d = self.__dict__.copy()

        # fixing these switches here to reduce the amount of wrapper code
        d['sw_distance_unit'] = 1  # always give distances in [km]
        d['sw_t_reduce'] = 1  # time reduction always as velocity [km/s]
        d['sw_equidistant'] = 0  # always give all distances and azimuths
        d['sw_irregular_azimuths'] = 1

        d['n_distances'] = len(self.receiver_distances)
        d['str_distances'] = str_float_vals(self.receiver_distances)
        d['str_azimuths'] = str_float_vals(self.receiver_azimuths)

        model_str, nlines = cake_model_to_config(self.earthmodel_1d)
        d['n_model_lines'] = nlines
        d['model_lines'] = model_str

        if self.earthmodel_receiver_1d:
            model_str, nlines = cake_model_to_config(
                self.earthmodel_receiver_1d)
        else:
            model_str = "# no receiver side model"
            nlines = 0

        d['n_model_receiver_lines'] = nlines
        d['model_receiver_lines'] = model_str

        d['str_slowness_window'] = str_float_vals(self.slowness_window)
        d['n_depth_ranges'], d['str_depth_ranges'] = \
            aggregate(self.propagation_filters)

        if self.wavelet_type == 0:  # user wavelet
            d['str_w_samples'] = '\n' \
                + '%i\n' % len(self.user_wavelet_samples) \
                + str_float_vals(self.user_wavelet_samples)
        else:
            d['str_w_samples'] = ''

        if self.receiver_filter:
            d['str_receiver_filter'] = self.receiver_filter.string_for_config(
                self.qseis_version)
        else:
            if self.qseis_version == '2006a':
                d['str_receiver_filter'] = '(1.0,0.0)\n0\n#\n0'
            else:
                d['str_receiver_filter'] = '1.0\n0\n#\n0'

        d['str_gf_sw_source_types'] = str_int_vals(self.gf_sw_source_types)
        d['str_gf_filenames'] = str_str_vals(self.gf_filenames)

        if self.source_mech:
            d['str_source'] = '%s \'%s\'' % (
                self.source_mech.string_for_config(), self.seismogram_filename)
        else:
            d['str_source'] = '0'

        template = '''# autogenerated QSEIS input by qseis.py
#
# This is the input file of FORTRAN77 program "qseis06" for calculation of
# synthetic seismograms based on a layered halfspace earth model.
#
# by
# Rongjiang  Wang <*****@*****.**>
# GeoForschungsZentrum Potsdam
# Telegrafenberg, D-14473 Potsdam, Germany
#
# Last modified: Potsdam, Nov., 2006
#
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
# If not specified, SI Unit System is used overall!
#
# Coordinate systems:
# cylindrical (z,r,t) with z = downward,
#                          r = from source outward,
#                          t = azmuth angle from north to east;
# cartesian (x,y,z) with   x = north,
#                          y = east,
#                          z = downward;
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
#
#	SOURCE PARAMETERS
#	=================
# 1. source depth [km]
#------------------------------------------------------------------------------
 %(source_depth)e                    |dble: source_depth;
#------------------------------------------------------------------------------
#
#	RECEIVER PARAMETERS
#	===================
# 1. receiver depth [km]
# 2. switch for distance sampling role (1/0 = equidistant/irregular); switch
#    for unit used (1/0 = km/deg)
# 3. number of distance samples
# 4. if equidistant, then start and end trace distance (> 0); else distance
#    list (please order the receiver distances from small to large)
# 5. (reduced) time begin [sec] & length of time window [sec], number of time
#    samples (<= 2*nfmax in qsglobal.h)
# 6. switch for unit of the following time reduction parameter: 1 = velocity
#    [km/sec], 0 = slowness [sec/deg]; time reduction parameter
#------------------------------------------------------------------------------
 %(receiver_depth)e                         |dble: receiver_depth;
 %(sw_equidistant)i  %(sw_distance_unit)i   |int: sw_equidistant, sw_d_unit;
 %(n_distances)i                            |int: no_distances;
 %(str_distances)s                          |dble: d_1,d_n; or d_1,d_2, ...(no comments in between!);
 %(time_start)e %(time_window)e %(nsamples)i  |dble: t_start,t_window; int: no_t_samples;
 %(sw_t_reduce)i %(time_reduction_velocity)e  |int: sw_t_reduce; dble: t_reduce;
#------------------------------------------------------------------------------
#
#	WAVENUMBER INTEGRATION PARAMETERS
#	=================================
# 1. select slowness integration algorithm (0 = suggested for full wave-field
#    modelling; 1 or 2 = suggested when using a slowness window with narrow
#    taper range - a technique for suppressing space-domain aliasing);
# 2. 4 parameters for low and high slowness (Note 1) cut-offs [s/km] with
#    tapering: 0 < slw1 < slw2 defining cosine taper at the lower end, and 0 <
#    slw3 < slw4 defining the cosine taper at the higher end. default values
#    will be used in case of inconsistent input of the cut-offs (possibly with
#    much more computational effort);
# 3. parameter for sampling rate of the wavenumber integration (1 = sampled
#    with the spatial Nyquist frequency, 2 = sampled with twice higher than
#    the Nyquist, and so on: the larger this parameter, the smaller the space-
#    domain aliasing effect, but also the more computation effort);
# 4. the factor for suppressing time domain aliasing (> 0 and <= 1) (Note 2).
#------------------------------------------------------------------------------
 %(sw_algorithm)i                    |int: sw_algorithm;
 %(str_slowness_window)s             |dble: slw(1-4);
 %(wavenumber_sampling)e             |dble: sample_rate;
 %(aliasing_suppression_factor)e     |dble: supp_factor;
#------------------------------------------------------------------------------
#
#	        OPTIONS FOR PARTIAL SOLUTIONS
#       (only applied to the source-site structure)
#	    ===========================================
#
# 1. switch for filtering free surface effects (0 = with free surface, i.e.,
#    do not select this filter; 1 = without free surface; 2 = without free
#    surface but with correction on amplitude and wave form. Note switch 2
#    can only be used for receivers at the surface)
# 2. switch for filtering waves with a shallow penetration depth (concerning
#    their whole trace from source to receiver), penetration depth limit [km]
#
#    if this option is selected, waves whose travel path never exceeds the
#    given depth limit will be filtered ("seismic nuting"). the condition for
#    selecting this filter is that the given shallow path depth limit should
#    be larger than both source and receiver depth.
#
# 3. number of depth ranges where the following selected up/down-sp2oing P or
#    SV waves should be filtered
# 4. the 1. depth range: upper and lower depth [km], switch for filtering P
#    or SV wave in this depth range:
#
#    switch no:              1      2        3       4         other
#    filtered phase:         P(up)  P(down)  SV(up)  SV(down)  Error
#
# 5. the 2. ...
#
#    The partial solution options are useful tools to increase the numerical
#    significance of desired wave phases. Especially when the desired phases
#    are smaller than the undesired phases, these options should be selected
#    and carefully combined.
#------------------------------------------------------------------------------
 %(filter_surface_effects)i                  |int: isurf;
 %(filter_shallow_paths)i %(filter_shallow_paths_depth)e  |int: sw_path_filter; dble:shallow_depth_limit;
 %(n_depth_ranges)i %(str_depth_ranges)s
#------------------------------------------------------------------------------
#
#	SOURCE TIME FUNCTION (WAVELET) PARAMETERS (Note 3)
#	==================================================
# 1. wavelet duration [unit = time sample rather than sec!], that is about
#    equal to the half-amplitude cut-off period of the wavelet (> 0. if <= 0,
#    then default value = 2 time samples will be used), and switch for the
#    wavelet form (0 = user's own wavelet; 1 = default wavelet: normalized
#    square half-sinusoid for simulating a physical delta impulse; 2 = tapered
#    Heaviside wavelet, i.e. integral of wavelet 1)
# 2. IF user's own wavelet is selected, then number of the wavelet time samples
#    (<= 1024), and followed by
# 3. equidistant wavelet time samples
# 4  ...(continue) (! no comment lines allowed between the time sample list!)
#    IF default, delete line 2, 3, 4 ... or comment them out!
#------------------------------------------------------------------------------
 %(wavelet_duration_samples)e %(wavelet_type)i%(str_w_samples)s
#------------------------------------------------------------------------------
#
#	 FILTER PARAMETERS OF RECEIVERS (SEISMOMETERS OR HYDROPHONES)
#	 ============================================================
# 1. constant coefficient (normalization factor)
# 2. number of roots (<= nrootmax in qsglobal.h)
# 3. list of the root positions in the complex format (Re,Im). If no roots,
#    comment out this line
# 4. number of poles (<= npolemax in qsglobal.h)
# 5. list of the pole positions in the complex format (Re,Im). If no poles,
#    comment out this line
#------------------------------------------------------------------------------
 %(str_receiver_filter)s
#------------------------------------------------------------------------------
#
#	OUTPUT FILES FOR GREEN'S FUNCTIONS (Note 4)
#	===========================================
# 1. selections of source types (yes/no = 1/0)
# 2. file names of Green's functions (please give the names without extensions,
#    which will be appended by the program automatically: *.tz, *.tr, *.tt
#    and *.tv are for the vertical, radial, tangential, and volume change (for
#    hydrophones) components, respectively)
#------------------------------------------------------------------------------
#  explosion   strike-slip dip-slip   clvd       single_f_v  single_f_h
#------------------------------------------------------------------------------
 %(str_gf_sw_source_types)s
 %(str_gf_filenames)s
#------------------------------------------------------------------------------
#	OUTPUT FILES FOR AN ARBITRARY POINT DISLOCATION SOURCE
#               (for applications to earthquakes)
#	======================================================
# 1. selection (0 = not selected; 1 or 2 = selected), if (selection = 1), then
#    the 6 moment tensor elements [N*m]: Mxx, Myy, Mzz, Mxy, Myz, Mzx (x is
#    northward, y is eastward and z is downard); else if (selection = 2), then
#    Mis [N*m] = isotropic moment part = (MT+MN+MP)/3, Mcl = CLVD moment part
#    = (2/3)(MT+MP-2*MN), Mdc = double-couple moment part = MT-MN, Strike [deg],
#    Dip [deg] and Rake [deg].
#
#    Note: to use this option, the Green's functions above should be computed
#          (selection = 1) if they do not exist already.
#
#                 north(x)
#                  /
#                 /\ strike
#                *----------------------->  east(y)
#                |\                       \
#                |-\                       \
#                |  \     fault plane       \
#                |90 \                       \
#                |-dip\                       \
#                |     \                       \
#                |      \                       \
#           downward(z)  \-----------------------\\
#
# 2. switch for azimuth distribution of the stations (0 = uniform azimuth,
#    else = irregular azimuth angles)
# 3. list of the azimuth angles [deg] for all stations given above (if the
#    uniform azimuth is selected, then only one azimuth angle is required)
#
#------------------------------------------------------------------------------
#     Mis        Mcl        Mdc        Strike     Dip        Rake      File
#------------------------------------------------------------------------------
#  2   0.00       1.00       6.0E+19    120.0      30.0       25.0      'seis'
#------------------------------------------------------------------------------
#     Mxx        Myy        Mzz        Mxy        Myz        Mzx       File
#------------------------------------------------------------------------------
%(str_source)s
%(sw_irregular_azimuths)i
%(str_azimuths)s
#------------------------------------------------------------------------------
#
#	GLOBAL MODEL PARAMETERS (Note 5)
#	================================
# 1. switch for flat-earth-transform
# 2. gradient resolution [%%] of vp, vs, and ro (density), if <= 0, then default
#    values (depending on wave length at cut-off frequency) will be used
#------------------------------------------------------------------------------
 %(sw_flat_earth_transform)i     |int: sw_flat_earth_transform;
 %(gradient_resolution_vp)e %(gradient_resolution_vs)e %(gradient_resolution_density)e   |dble: vp_res, vs_res, ro_res;
#------------------------------------------------------------------------------
#
#	                LAYERED EARTH MODEL
#       (SHALLOW SOURCE + UNIFORM DEEP SOURCE/RECEIVER STRUCTURE)
#	=========================================================
# 1. number of data lines of the layered model (source site)
#------------------------------------------------------------------------------
 %(n_model_lines)i                   |int: no_model_lines;
#------------------------------------------------------------------------------
#
#	MULTILAYERED MODEL PARAMETERS (source site)
#	===========================================
# no  depth[km]  vp[km/s]  vs[km/s]  ro[g/cm^3] qp      qs
#------------------------------------------------------------------------------
%(model_lines)s
#------------------------------------------------------------------------------
#
#	          LAYERED EARTH MODEL
#       (ONLY THE SHALLOW RECEIVER STRUCTURE)
#       =====================================
# 1. number of data lines of the layered model
#
#    Note: if the number = 0, then the receiver site is the same as the
#          source site, else different receiver-site structure is considered.
#          please be sure that the lowest interface of the receiver-site
#          structure given given below can be found within the source-site
#          structure, too.
#
#------------------------------------------------------------------------------
 %(n_model_receiver_lines)i                               |int: no_model_lines;
#------------------------------------------------------------------------------
#
#	MULTILAYERED MODEL PARAMETERS (shallow receiver-site structure)
#	===============================================================
# no  depth[km]    vp[km/s]    vs[km/s]   ro[g/cm^3]   qp      qs
#------------------------------------------------------------------------------
%(model_receiver_lines)s
#---------------------------------end of all inputs----------------------------


Note 1:

The slowness is defined by inverse value of apparent wave velocity = sin(i)/v
with i = incident angle and v = true wave velocity.

Note 2:

The suppression of the time domain aliasing is achieved by using the complex
frequency technique. The suppression factor should be a value between 0 and 1.
If this factor is set to 0.1, for example, the aliasing phase at the reduced
time begin is suppressed to 10%%.

Note 3:

The default basic wavelet function (option 1) is (2/tau)*sin^2(pi*t/tau),
for 0 < t < tau, simulating physical delta impuls. Its half-amplitude cut-off
frequency is 1/tau. To avoid high-frequency noise, tau should not be smaller
than 4-5 time samples.

Note 4:

  Double-Couple   m11/ m22/ m33/ m12/ m23/ m31  Azimuth_Factor_(tz,tr,tv)/(tt)
  ============================================================================
  explosion       1.0/ 1.0/ 1.0/ -- / -- / --       1.0         /   0.0
  strike-slip     -- / -- / -- / 1.0/ -- / --       sin(2*azi)  /   cos(2*azi)
                  1.0/-1.0/ -- / -- / -- / --       cos(2*azi)  /  -sin(2*azi)
  dip-slip        -- / -- / -- / -- / -- / 1.0      cos(azi)    /   sin(azi)
                  -- / -- / -- / -- / 1.0/ --       sin(azi)    /  -cos(azi)
  clvd           -0.5/-0.5/ 1.0/ -- / -- / --       1.0         /   0.0
  ============================================================================
  Single-Force    fx / fy / fz                  Azimuth_Factor_(tz,tr,tv)/(tt)
  ============================================================================
  fz              -- / -- / 1.0                        1.0      /   0.0
  fx              1.0/ -- / --                         cos(azi) /   sin(azi)
  fy              -- / 1.0/ --                         sin(azi) /  -cos(azi)
  ============================================================================

Note 5:

Layers with a constant gradient will be discretized with a number of homogeneous
sublayers. The gradient resolutions are then used to determine the maximum
allowed thickness of the sublayers. If the resolutions of Vp, Vs and Rho
(density) require different thicknesses, the smallest is first chosen. If this
is even smaller than 1%% of the characteristic wavelength, then the latter is
taken finally for the sublayer thickness.
'''  # noqa

        return (template % d).encode('ascii')
Example #3
0
class Optimizer(Object):

    n_calls = Int.T(default=50, help='number of test sets')
    path_out = String.T(
        default='optimizer-results',
        help='base path where to store results, plots and logs')

    params = List.T(Param.T(),
                    default=[
                        PReal(name='learning_rate',
                              low=1e-6,
                              high=1e-2,
                              default=1e-4)
                    ])

    def __init__(self, **kwargs):

        super().__init__(**kwargs)
        self.model = None
        self.result = None
        self.best_model_dir = self.extend_path('best-model')
        self.fn_result = self.extend_path('result.optmz')
        self.best_loss = 9e99
        self.param_keys = [p.name for p in self.params]
        self.params_dict = OrderedDict()
        for p in self.params:
            self.params_dict[p.name] = p.make_parameter()

        self._config_operations = [
            p.name for p in self.params if p.target_attribute == 'config'
        ]

        self.optimizer_defaults = [(p.name, p.default) for p in self.params]

        self._ncalls = 0

        signal.signal(signal.SIGINT, self.plot_results)

    def clear(self):
        '''delete former runs.'''
        shutil.rmtree(self.path_out)

    @property
    def dimensions(self):
        return [v for _, v in self.params_dict.items()]

    @property
    def optimizer_keys(self):
        return [k for (k, default) in self.optimizer_defaults]

    @property
    def optimizer_values(self):
        return [default for (k, default) in self.optimizer_defaults]

    @property
    def non_categorical_dimensions(self):
        '''Returns a list of non-categorical dimension names.'''
        return [dim.name for dim in self.dimensions if not \
                           isinstance(dim, Categorical)]

    def announce_test(self, params):
        '''Log a parameter test set. '''
        logging.info('+' * 20)
        logging.info('evaluating next set of parameters:')
        base = '   {}: {}\n'
        for kv in params.items():
            logging.info(base.format(*kv))

    def update_model(self, model, kwargs):
        '''Set config and model attributes by kwargs.
        Rather sloppy...
        '''
        new_config = copy.deepcopy(model.config)
        self._ncalls += 1
        model.name = 'opt_%s-' % self._ncalls + self.base_name
        for key, arg in kwargs.items():

            # choose which object to modify (model or model.config)
            if 'config' in key:
                key = key.split('.')[-1]
                want_modifiy = new_config
            else:
                want_modifiy = model

            # If name is a ChunkOperation subclass, instatiate an object of
            # that class
            attribute = name_to_class.get(arg, False)
            if attribute:
                # chunk operation found
                attribute = attribute()
            else:
                attribute = arg

            if not getattr(want_modifiy, key):
                raise Exception('No such parameter: %s' % key)

            setattr(want_modifiy, key, attribute)

        model.config = new_config

    def save_model(self, model):
        '''copy the `model` to the `best_model` directory.'''
        shutil.rmtree(self.best_model_dir)
        shutil.copytree(model.outdir, self.best_model_dir)

    def evaluate(self, args):
        ''' wrapper to parse gp_minimize args to model.train'''
        kwargs = dict(zip(self.optimizer_keys, args))
        self.announce_test(kwargs)
        self.update_model(self.model, kwargs)
        try:
            loss = self.model.train_and_evaluate()[0]['loss']
            if loss < self.best_loss:
                print('found a better loss at %s' % loss)
                print('kwargs: ', kwargs)
                self.save_model(self.model)
                self.best_loss = loss
            else:
                self.model.clear_model()
            return loss

        except tf.errors.ResourceExhaustedError as e:
            logging.warn(e)
            logging.warn('Skipping this test, loss = 9e9')
            return 9e9

    def set_model(self, model):
        logging.info('prefixing model output path to %s' % self.path_out)
        model.prefix = self.path_out
        self.model = model
        self.base_name = self.model.name

    def optimize(self, model):
        '''Calling this method to optimize a :py:class:`pinky.model.Model`
        instance. '''
        self.set_model(model)
        ensure_dir(self.best_model_dir)

        self.result = gp_minimize(func=self.evaluate,
                                  dimensions=self.dimensions,
                                  acq_func='EI',
                                  n_calls=self.n_calls,
                                  x0=self.optimizer_values)

        self.evaluate_result()
        self.plot_results()

    def ensure_result(self):
        ''' Load and set minimizer result.'''
        if self.result is None:
            if self.fn_result is None:
                logging.warn('Cannot load results from filename: %s' %
                             self.fn_result)
            self.result = load_result(self.fn_result)

    def extend_path(self, *path):
        '''Prepend `self.path_out` to `path`.'''
        return os.path.join(self.path_out, *path)

    def evaluate_result(self):
        self.ensure_result()

        # best = self.result.space.point_to_dict(self.result.x)
        best = self.result.x
        logging.info('Best parameter set:')
        logging.info(best)

        logging.info('Best parameter loss:')
        logging.info(self.result.fun)

    def plot_results(self, *args):
        '''Produce and save result plots. '''
        # self.ensure_result()
        ensure_dir(self.extend_path('plots'))

        if _plot_histogram_error:
            logging.warn(_plot_histogram_error)
        else:
            for dim_name in self.optimizer_keys:
                fig, ax = plot_histogram(
                    result=self.result)  #, dimension_name=dim_name)
                fig.savefig(
                    self.extend_path('plots/histogram_%s.pdf' % dim_name))

        # ax = plot_objective(result=self.result,)
        # dimension_names=self.non_categorical_dimensions)
        # fig = plt.gcf()
        # fig.savefig(self.extend_path('plots/objectives.pdf'))

        ax = plot_evaluations(result=self.result, )
        # dimension_names=self.non_categorical_dimensions)
        fig = plt.gcf()
        fig.savefig(self.extend_path('plots/evaluations.pdf'))

    @classmethod
    def get_example(cls):
        '''Get an example instance of this class.'''
        return cls()
Example #4
0
class Map(Object):
    lat = Float.T(optional=True)
    lon = Float.T(optional=True)
    radius = Float.T(optional=True)
    width = Float.T(default=20.)
    height = Float.T(default=14.)
    margins = List.T(Float.T())
    illuminate = Bool.T(default=True)
    skip_feature_factor = Float.T(default=0.02)
    show_grid = Bool.T(default=False)
    show_topo = Bool.T(default=True)
    show_scale = Bool.T(default=False)
    show_topo_scale = Bool.T(default=False)
    show_center_mark = Bool.T(default=False)
    show_rivers = Bool.T(default=True)
    show_plates = Bool.T(default=False)
    show_boundaries = Bool.T(default=False)
    illuminate_factor_land = Float.T(default=0.5)
    illuminate_factor_ocean = Float.T(default=0.25)
    color_wet = Tuple.T(3, Int.T(), default=(216, 242, 254))
    color_dry = Tuple.T(3, Int.T(), default=(172, 208, 165))
    color_boundaries = Tuple.T(3, Int.T(), default=(1, 1, 1))
    topo_resolution_min = Float.T(
        default=40., help='minimum resolution of topography [dpi]')
    topo_resolution_max = Float.T(
        default=200., help='maximum resolution of topography [dpi]')
    replace_topo_color_only = FloatTile.T(
        optional=True,
        help='replace topo color while keeping topographic shading')
    topo_cpt_wet = String.T(default='light_sea')
    topo_cpt_dry = String.T(default='light_land')
    axes_layout = String.T(optional=True)
    custom_cities = List.T(City.T())
    gmt_config = Dict.T(String.T(), String.T())
    comment = String.T(optional=True)

    def __init__(self, gmtversion='newest', **kwargs):
        Object.__init__(self, **kwargs)
        self._gmt = None
        self._scaler = None
        self._widget = None
        self._corners = None
        self._wesn = None
        self._minarea = None
        self._coastline_resolution = None
        self._rivers = None
        self._dems = None
        self._have_topo_land = None
        self._have_topo_ocean = None
        self._jxyr = None
        self._prep_topo_have = None
        self._labels = []
        self._area_labels = []
        self._gmtversion = gmtversion

    def save(self,
             outpath,
             resolution=75.,
             oversample=2.,
             size=None,
             width=None,
             height=None,
             psconvert=False):
        '''
        Save the image.

        Save the image to ``outpath``. The format is determined by the filename
        extension. Formats are handled as follows: ``'.eps'`` and ``'.ps'``
        produce EPS and PS, respectively, directly with GMT. If the file name
        ends with ``'.pdf'``, GMT output is fed through ``gmtpy-epstopdf`` to
        create a PDF file. For any other filename extension, output is first
        converted to PDF with ``gmtpy-epstopdf``, then with ``pdftocairo`` to
        PNG with a resolution oversampled by the factor ``oversample`` and
        finally the PNG is downsampled and converted to the target format with
        ``convert``. The resolution of rasterized target image can be
        controlled either by ``resolution`` in DPI or by specifying ``width``
        or ``height`` or ``size``, where the latter fits the image into a
        square with given side length. To save transparency use
        ``psconvert=True``.
        '''

        gmt = self.gmt
        self.draw_labels()
        self.draw_axes()
        if self.show_topo and self.show_topo_scale:
            self._draw_topo_scale()

        gmt.save(outpath,
                 resolution=resolution,
                 oversample=oversample,
                 size=size,
                 width=width,
                 height=height,
                 psconvert=psconvert)

    @property
    def scaler(self):
        if self._scaler is None:
            self._setup_geometry()

        return self._scaler

    @property
    def wesn(self):
        if self._wesn is None:
            self._setup_geometry()

        return self._wesn

    @property
    def widget(self):
        if self._widget is None:
            self._setup()

        return self._widget

    @property
    def layout(self):
        if self._layout is None:
            self._setup()

        return self._layout

    @property
    def jxyr(self):
        if self._jxyr is None:
            self._setup()

        return self._jxyr

    @property
    def pxyr(self):
        if self._pxyr is None:
            self._setup()

        return self._pxyr

    @property
    def gmt(self):
        if self._gmt is None:
            self._setup()

        if self._have_topo_ocean is None:
            self._draw_background()

        return self._gmt

    def _setup(self):
        if not self._widget:
            self._setup_geometry()

        self._setup_lod()
        self._setup_gmt()

    def _setup_geometry(self):
        wpage, hpage = self.width, self.height
        ml, mr, mt, mb = self._expand_margins()
        wpage -= ml + mr
        hpage -= mt + mb

        wreg = self.radius * 2.0
        hreg = self.radius * 2.0
        if wpage >= hpage:
            wreg *= wpage / hpage
        else:
            hreg *= hpage / wpage

        self._wreg = wreg
        self._hreg = hreg

        self._corners = corners(self.lon, self.lat, wreg, hreg)
        west, east, south, north = extent(self.lon, self.lat, wreg, hreg, 10)

        x, y, z = ((west, east), (south, north), (-6000., 4500.))

        xax = gmtpy.Ax(mode='min-max', approx_ticks=4.)
        yax = gmtpy.Ax(mode='min-max', approx_ticks=4.)
        zax = gmtpy.Ax(mode='min-max',
                       inc=1000.,
                       label='Height',
                       scaled_unit='km',
                       scaled_unit_factor=0.001)

        scaler = gmtpy.ScaleGuru(data_tuples=[(x, y, z)], axes=(xax, yax, zax))

        par = scaler.get_params()

        west = par['xmin']
        east = par['xmax']
        south = par['ymin']
        north = par['ymax']

        self._wesn = west, east, south, north
        self._scaler = scaler

    def _setup_lod(self):
        w, e, s, n = self._wesn
        if self.radius > 1500. * km:
            coastline_resolution = 'i'
            rivers = False
        else:
            coastline_resolution = 'f'
            rivers = True

        self._minarea = (self.skip_feature_factor * self.radius / km)**2

        self._coastline_resolution = coastline_resolution
        self._rivers = rivers

        self._prep_topo_have = {}
        self._dems = {}

        cm2inch = gmtpy.cm / gmtpy.inch

        dmin = 2.0 * self.radius * m2d / (self.topo_resolution_max *
                                          (self.height * cm2inch))
        dmax = 2.0 * self.radius * m2d / (self.topo_resolution_min *
                                          (self.height * cm2inch))

        for k in ['ocean', 'land']:
            self._dems[k] = topo.select_dem_names(k, dmin, dmax, self._wesn)
            if self._dems[k]:
                logger.debug('using topography dataset %s for %s' %
                             (','.join(self._dems[k]), k))

    def _expand_margins(self):
        if len(self.margins) == 0 or len(self.margins) > 4:
            ml = mr = mt = mb = 2.0
        elif len(self.margins) == 1:
            ml = mr = mt = mb = self.margins[0]
        elif len(self.margins) == 2:
            ml = mr = self.margins[0]
            mt = mb = self.margins[1]
        elif len(self.margins) == 4:
            ml, mr, mt, mb = self.margins

        return ml, mr, mt, mb

    def _setup_gmt(self):
        w, h = self.width, self.height
        scaler = self._scaler

        if gmtpy.is_gmt5(self._gmtversion):
            gmtconf = dict(MAP_TICK_PEN_PRIMARY='1.25p',
                           MAP_TICK_PEN_SECONDARY='1.25p',
                           MAP_TICK_LENGTH_PRIMARY='0.2c',
                           MAP_TICK_LENGTH_SECONDARY='0.6c',
                           FONT_ANNOT_PRIMARY='12p,1,black',
                           FONT_LABEL='12p,1,black',
                           PS_CHAR_ENCODING='ISOLatin1+',
                           MAP_FRAME_TYPE='fancy',
                           FORMAT_GEO_MAP='D',
                           PS_MEDIA='Custom_%ix%i' %
                           (w * gmtpy.cm, h * gmtpy.cm),
                           PS_PAGE_ORIENTATION='portrait',
                           MAP_GRID_PEN_PRIMARY='thinnest,0/50/0',
                           MAP_ANNOT_OBLIQUE='6')
        else:
            gmtconf = dict(TICK_PEN='1.25p',
                           TICK_LENGTH='0.2c',
                           ANNOT_FONT_PRIMARY='1',
                           ANNOT_FONT_SIZE_PRIMARY='12p',
                           LABEL_FONT='1',
                           LABEL_FONT_SIZE='12p',
                           CHAR_ENCODING='ISOLatin1+',
                           BASEMAP_TYPE='fancy',
                           PLOT_DEGREE_FORMAT='D',
                           PAPER_MEDIA='Custom_%ix%i' %
                           (w * gmtpy.cm, h * gmtpy.cm),
                           GRID_PEN_PRIMARY='thinnest/0/50/0',
                           DOTS_PR_INCH='1200',
                           OBLIQUE_ANNOTATION='6')

        gmtconf.update((k.upper(), v) for (k, v) in self.gmt_config.items())

        gmt = gmtpy.GMT(config=gmtconf, version=self._gmtversion)

        layout = gmt.default_layout()

        layout.set_fixed_margins(*[x * cm for x in self._expand_margins()])

        widget = layout.get_widget()
        widget['P'] = widget['J']
        widget['J'] = ('-JA%g/%g' % (self.lon, self.lat)) + '/%(width)gp'
        scaler['R'] = '-R%g/%g/%g/%gr' % self._corners

        # aspect = gmtpy.aspect_for_projection(
        #     gmt.installation['version'], *(widget.J() + scaler.R()))

        aspect = self._map_aspect(jr=widget.J() + scaler.R())
        widget.set_aspect(aspect)

        self._gmt = gmt
        self._layout = layout
        self._widget = widget
        self._jxyr = self._widget.JXY() + self._scaler.R()
        self._pxyr = self._widget.PXY() + [
            '-R%g/%g/%g/%g' % (0, widget.width(), 0, widget.height())
        ]
        self._have_drawn_axes = False
        self._have_drawn_labels = False

    def _draw_background(self):
        self._have_topo_land = False
        self._have_topo_ocean = False
        if self.show_topo:
            self._have_topo = self._draw_topo()

        self._draw_basefeatures()

    def _get_topo_tile(self, k):
        t = None
        demname = None
        for dem in self._dems[k]:
            t = topo.get(dem, self._wesn)
            demname = dem
            if t is not None:
                break

        if not t:
            raise NoTopo()

        return t, demname

    def _prep_topo(self, k):
        gmt = self._gmt
        t, demname = self._get_topo_tile(k)

        if demname not in self._prep_topo_have:

            grdfile = gmt.tempfilename()

            is_flat = num.all(t.data[0] == t.data)

            gmtpy.savegrd(t.x(),
                          t.y(),
                          t.data,
                          filename=grdfile,
                          naming='lonlat')

            if self.illuminate and not is_flat:
                if k == 'ocean':
                    factor = self.illuminate_factor_ocean
                else:
                    factor = self.illuminate_factor_land

                ilumfn = gmt.tempfilename()
                gmt.grdgradient(grdfile,
                                N='e%g' % factor,
                                A=-45,
                                G=ilumfn,
                                out_discard=True)

                ilumargs = ['-I%s' % ilumfn]
            else:
                ilumargs = []

            if self.replace_topo_color_only:
                t2 = self.replace_topo_color_only
                grdfile2 = gmt.tempfilename()

                gmtpy.savegrd(t2.x(),
                              t2.y(),
                              t2.data,
                              filename=grdfile2,
                              naming='lonlat')

                if gmt.is_gmt5():
                    gmt.grdsample(
                        grdfile2,
                        G=grdfile,
                        n='l',
                        I='%g/%g' % (t.dx, t.dy),  # noqa
                        R=grdfile,
                        out_discard=True)
                else:
                    gmt.grdsample(
                        grdfile2,
                        G=grdfile,
                        Q='l',
                        I='%g/%g' % (t.dx, t.dy),  # noqa
                        R=grdfile,
                        out_discard=True)

                gmt.grdmath(grdfile,
                            '0.0',
                            'AND',
                            '=',
                            grdfile2,
                            out_discard=True)

                grdfile = grdfile2

            self._prep_topo_have[demname] = grdfile, ilumargs

        return self._prep_topo_have[demname]

    def _draw_topo(self):
        widget = self._widget
        scaler = self._scaler
        gmt = self._gmt
        cres = self._coastline_resolution
        minarea = self._minarea

        JXY = widget.JXY()
        R = scaler.R()

        try:
            grdfile, ilumargs = self._prep_topo('ocean')
            gmt.pscoast(D=cres, S='c', A=minarea, *(JXY + R))
            gmt.grdimage(grdfile,
                         C=topo.cpt(self.topo_cpt_wet),
                         *(ilumargs + JXY + R))
            gmt.pscoast(Q=True, *(JXY + R))
            self._have_topo_ocean = True
        except NoTopo:
            self._have_topo_ocean = False

        try:
            grdfile, ilumargs = self._prep_topo('land')
            gmt.pscoast(D=cres, G='c', A=minarea, *(JXY + R))
            gmt.grdimage(grdfile,
                         C=topo.cpt(self.topo_cpt_dry),
                         *(ilumargs + JXY + R))
            gmt.pscoast(Q=True, *(JXY + R))
            self._have_topo_land = True
        except NoTopo:
            self._have_topo_land = False

    def _draw_topo_scale(self, label='Elevation [km]'):
        dry = read_cpt(topo.cpt(self.topo_cpt_dry))
        wet = read_cpt(topo.cpt(self.topo_cpt_wet))
        combi = cpt_merge_wet_dry(wet, dry)
        for level in combi.levels:
            level.vmin /= km
            level.vmax /= km

        topo_cpt = self.gmt.tempfilename() + '.cpt'
        write_cpt(combi, topo_cpt)

        (w, h), (xo, yo) = self.widget.get_size()
        self.gmt.psscale(
            D='%gp/%gp/%gp/%gph' %
            (xo + 0.5 * w, yo - 2.0 * gmtpy.cm, w, 0.5 * gmtpy.cm),
            C=topo_cpt,
            B='1:%s:' % label)

    def _draw_basefeatures(self):
        gmt = self._gmt
        cres = self._coastline_resolution
        rivers = self._rivers
        minarea = self._minarea

        color_wet = self.color_wet
        color_dry = self.color_dry

        if self.show_rivers and rivers:
            rivers = ['-Ir/0.25p,%s' % gmtpy.color(self.color_wet)]
        else:
            rivers = []

        fill = {}
        if not self._have_topo_land:
            fill['G'] = color_dry

        if not self._have_topo_ocean:
            fill['S'] = color_wet

        if self.show_boundaries:
            fill['N'] = '1/1p,%s,%s' % (gmtpy.color(
                self.color_boundaries), 'solid')

        gmt.pscoast(D=cres,
                    W='thinnest,%s' %
                    gmtpy.color(darken(gmtpy.color_tup(color_dry))),
                    A=minarea,
                    *(rivers + self._jxyr),
                    **fill)

        if self.show_plates:
            self.draw_plates()

    def _draw_axes(self):
        gmt = self._gmt
        scaler = self._scaler
        widget = self._widget

        if self.axes_layout is None:
            if self.lat > 0.0:
                axes_layout = 'WSen'
            else:
                axes_layout = 'WseN'
        else:
            axes_layout = self.axes_layout

        scale_km = gmtpy.nice_value(self.radius / 5.) / 1000.

        if self.show_center_mark:
            gmt.psxy(in_rows=[[self.lon, self.lat]],
                     S='c20p',
                     W='2p,black',
                     *self._jxyr)

        if self.show_grid:
            btmpl = ('%(xinc)gg%(xinc)g:%(xlabel)s:/'
                     '%(yinc)gg%(yinc)g:%(ylabel)s:')
        else:
            btmpl = '%(xinc)g:%(xlabel)s:/%(yinc)g:%(ylabel)s:'

        if self.show_scale:
            scale = 'x%gp/%gp/%g/%g/%gk' % (6. / 7 * widget.width(),
                                            widget.height() / 7., self.lon,
                                            self.lat, scale_km)
        else:
            scale = False

        gmt.psbasemap(B=(btmpl % scaler.get_params()) + axes_layout,
                      L=scale,
                      *self._jxyr)

        if self.comment:
            font_size = self.gmt.label_font_size()

            _, east, south, _ = self._wesn
            if gmt.is_gmt5():
                row = [
                    1, 0,
                    '%gp,%s,%s' % (font_size, 0, 'black'), 'BR', self.comment
                ]

                farg = ['-F+f+j']
            else:
                row = [1, 0, font_size, 0, 0, 'BR', self.comment]
                farg = []

            gmt.pstext(in_rows=[row],
                       N=True,
                       R=(0, 1, 0, 1),
                       D='%gp/%gp' % (-font_size * 0.2, font_size * 0.3),
                       *(widget.PXY() + farg))

    def draw_axes(self):
        if not self._have_drawn_axes:
            self._draw_axes()
            self._have_drawn_axes = True

    def _have_coastlines(self):
        gmt = self._gmt
        cres = self._coastline_resolution
        minarea = self._minarea

        checkfile = gmt.tempfilename()

        gmt.pscoast(M=True,
                    D=cres,
                    W='thinnest,black',
                    A=minarea,
                    out_filename=checkfile,
                    *self._jxyr)

        points = []
        with open(checkfile, 'r') as f:
            for line in f:
                ls = line.strip()
                if ls.startswith('#') or ls.startswith('>') or ls == '':
                    continue
                plon, plat = [float(x) for x in ls.split()]
                points.append((plat, plon))

        points = num.array(points, dtype=num.float)
        return num.any(points_in_region(points, self._wesn))

    def have_coastlines(self):
        self.gmt
        return self._have_coastlines()

    def project(self, lats, lons, jr=None):
        onepoint = False
        if isinstance(lats, float) and isinstance(lons, float):
            lats = [lats]
            lons = [lons]
            onepoint = True

        if jr is not None:
            j, r = jr
            gmt = gmtpy.GMT(version=self._gmtversion)
        else:
            j, _, _, r = self.jxyr
            gmt = self.gmt

        f = BytesIO()
        gmt.mapproject(j, r, in_columns=(lons, lats), out_stream=f, D='p')
        f.seek(0)
        data = num.loadtxt(f, ndmin=2)
        xs, ys = data.T
        if onepoint:
            xs = xs[0]
            ys = ys[0]
        return xs, ys

    def _map_box(self, jr=None):
        ll_lon, ll_lat, ur_lon, ur_lat = self._corners

        xs_corner, ys_corner = self.project((ll_lat, ur_lat), (ll_lon, ur_lon),
                                            jr=jr)

        w = xs_corner[1] - xs_corner[0]
        h = ys_corner[1] - ys_corner[0]

        return w, h

    def _map_aspect(self, jr=None):
        w, h = self._map_box(jr=jr)
        return h / w

    def _draw_labels(self):
        points_taken = []
        regions_taken = []

        def no_points_in_rect(xs, ys, xmin, ymin, xmax, ymax):
            xx = not num.any(
                la(la(xmin < xs, xs < xmax), la(ymin < ys, ys < ymax)))
            return xx

        def roverlaps(a, b):
            return (a[0] < b[2] and b[0] < a[2] and a[1] < b[3]
                    and b[1] < a[3])

        w, h = self._map_box()

        label_font_size = self.gmt.label_font_size()

        if self._labels:

            n = len(self._labels)

            lons, lats, texts, sx, sy, colors, fonts, font_sizes, \
                angles, styles = list(zip(*self._labels))

            font_sizes = [(font_size or label_font_size)
                          for font_size in font_sizes]

            sx = num.array(sx, dtype=num.float)
            sy = num.array(sy, dtype=num.float)

            xs, ys = self.project(lats, lons)

            points_taken.append((xs, ys))

            dxs = num.zeros(n)
            dys = num.zeros(n)

            for i in range(n):
                dx, dy = gmtpy.text_box(texts[i],
                                        font=fonts[i],
                                        font_size=font_sizes[i],
                                        **styles[i])

                dxs[i] = dx
                dys[i] = dy

            la = num.logical_and
            anchors_ok = (
                la(xs + sx + dxs < w, ys + sy + dys < h),
                la(xs - sx - dxs > 0., ys - sy - dys > 0.),
                la(xs + sx + dxs < w, ys - sy - dys > 0.),
                la(xs - sx - dxs > 0., ys + sy + dys < h),
            )

            arects = [(xs, ys, xs + sx + dxs, ys + sy + dys),
                      (xs - sx - dxs, ys - sy - dys, xs, ys),
                      (xs, ys - sy - dys, xs + sx + dxs, ys),
                      (xs - sx - dxs, ys, xs, ys + sy + dys)]

            for i in range(n):
                for ianch in range(4):
                    anchors_ok[ianch][i] &= no_points_in_rect(
                        xs, ys, *[xxx[i] for xxx in arects[ianch]])

            anchor_choices = []
            anchor_take = []
            for i in range(n):
                choices = [ianch for ianch in range(4) if anchors_ok[ianch][i]]
                anchor_choices.append(choices)
                if choices:
                    anchor_take.append(choices[0])
                else:
                    anchor_take.append(None)

            def cost(anchor_take):
                noverlaps = 0
                for i in range(n):
                    for j in range(n):
                        if i != j:
                            i_take = anchor_take[i]
                            j_take = anchor_take[j]
                            if i_take is None or j_take is None:
                                continue
                            r_i = [xxx[i] for xxx in arects[i_take]]
                            r_j = [xxx[j] for xxx in arects[j_take]]
                            if roverlaps(r_i, r_j):
                                noverlaps += 1

                return noverlaps

            cur_cost = cost(anchor_take)
            imax = 30
            while cur_cost != 0 and imax > 0:
                for i in range(n):
                    for t in anchor_choices[i]:
                        anchor_take_new = list(anchor_take)
                        anchor_take_new[i] = t
                        new_cost = cost(anchor_take_new)
                        if new_cost < cur_cost:
                            anchor_take = anchor_take_new
                            cur_cost = new_cost

                imax -= 1

            while cur_cost != 0:
                for i in range(n):
                    anchor_take_new = list(anchor_take)
                    anchor_take_new[i] = None
                    new_cost = cost(anchor_take_new)
                    if new_cost < cur_cost:
                        anchor_take = anchor_take_new
                        cur_cost = new_cost
                        break

            anchor_strs = ['BL', 'TR', 'TL', 'BR']

            for i in range(n):
                ianchor = anchor_take[i]
                color = colors[i]
                if color is None:
                    color = 'black'

                if ianchor is not None:
                    regions_taken.append([xxx[i] for xxx in arects[ianchor]])

                    anchor = anchor_strs[ianchor]

                    yoff = [-sy[i], sy[i]][anchor[0] == 'B']
                    xoff = [-sx[i], sx[i]][anchor[1] == 'L']
                    if self.gmt.is_gmt5():
                        row = (lons[i], lats[i],
                               '%i,%s,%s' % (font_sizes[i], fonts[i], color),
                               anchor, texts[i])

                        farg = ['-F+f+j+a%g' % angles[i]]
                    else:
                        row = (lons[i], lats[i], font_sizes[i], angles[i],
                               fonts[i], anchor, texts[i])
                        farg = ['-G%s' % color]

                    self.gmt.pstext(in_rows=[row],
                                    D='%gp/%gp' % (xoff, yoff),
                                    *(self.jxyr + farg),
                                    **styles[i])

        if self._area_labels:

            for lons, lats, text, color, font, font_size, style in \
                    self._area_labels:

                if font_size is None:
                    font_size = label_font_size

                if color is None:
                    color = 'black'

                if self.gmt.is_gmt5():
                    farg = ['-F+f+j']
                else:
                    farg = ['-G%s' % color]

                xs, ys = self.project(lats, lons)
                dx, dy = gmtpy.text_box(text,
                                        font=font,
                                        font_size=font_size,
                                        **style)

                rects = [
                    xs - 0.5 * dx, ys - 0.5 * dy, xs + 0.5 * dx, ys + 0.5 * dy
                ]

                locs_ok = num.ones(xs.size, dtype=num.bool)

                for iloc in range(xs.size):
                    rcandi = [xxx[iloc] for xxx in rects]

                    locs_ok[iloc] = True
                    locs_ok[iloc] &= (0 < rcandi[0] and rcandi[2] < w
                                      and 0 < rcandi[1] and rcandi[3] < h)

                    overlap = False
                    for r in regions_taken:
                        if roverlaps(r, rcandi):
                            overlap = True
                            break

                    locs_ok[iloc] &= not overlap

                    for xs_taken, ys_taken in points_taken:
                        locs_ok[iloc] &= no_points_in_rect(
                            xs_taken, ys_taken, *rcandi)

                        if not locs_ok[iloc]:
                            break

                rows = []
                for iloc, (lon, lat) in enumerate(zip(lons, lats)):
                    if not locs_ok[iloc]:
                        continue

                    if self.gmt.is_gmt5():
                        row = (lon, lat, '%i,%s,%s' % (font_size, font, color),
                               'MC', text)

                    else:
                        row = (lon, lat, font_size, 0, font, 'MC', text)

                    rows.append(row)

                    regions_taken.append([xxx[iloc] for xxx in rects])
                    break

                self.gmt.pstext(in_rows=rows, *(self.jxyr + farg), **style)

    def draw_labels(self):
        self.gmt
        if not self._have_drawn_labels:
            self._draw_labels()
            self._have_drawn_labels = True

    def add_label(self,
                  lat,
                  lon,
                  text,
                  offset_x=5.,
                  offset_y=5.,
                  color=None,
                  font='1',
                  font_size=None,
                  angle=0,
                  style={}):

        if 'G' in style:
            style = style.copy()
            color = style.pop('G')

        self._labels.append((lon, lat, text, offset_x, offset_y, color, font,
                             font_size, angle, style))

    def add_area_label(self,
                       lat,
                       lon,
                       text,
                       color=None,
                       font='3',
                       font_size=None,
                       style={}):

        self._area_labels.append(
            (lon, lat, text, color, font, font_size, style))

    def cities_in_region(self):
        from pyrocko.dataset import geonames
        cities = geonames.get_cities_region(region=self.wesn, minpop=0)
        cities.extend(self.custom_cities)
        cities.sort(key=lambda x: x.population)
        return cities

    def draw_cities(self,
                    exact=None,
                    include=[],
                    exclude=[],
                    nmax_soft=10,
                    psxy_style=dict(S='s5p', G='black')):

        cities = self.cities_in_region()

        if exact is not None:
            cities = [c for c in cities if c.name in exact]
            minpop = None
        else:
            cities = [c for c in cities if c.name not in exclude]
            minpop = 10**3
            for minpop_new in [1e3, 3e3, 1e4, 3e4, 1e5, 3e5, 1e6, 3e6, 1e7]:
                cities_new = [
                    c for c in cities
                    if c.population > minpop_new or c.name in include
                ]

                if len(cities_new) == 0 or (len(cities_new) < 3
                                            and len(cities) < nmax_soft * 2):
                    break

                cities = cities_new
                minpop = minpop_new
                if len(cities) <= nmax_soft:
                    break

        if cities:
            lats = [c.lat for c in cities]
            lons = [c.lon for c in cities]

            self.gmt.psxy(in_columns=(lons, lats), *self.jxyr, **psxy_style)

            for c in cities:
                try:
                    text = c.name.encode('iso-8859-1').decode('iso-8859-1')
                except UnicodeEncodeError:
                    text = c.asciiname

                self.add_label(c.lat, c.lon, text)

        self._cities_minpop = minpop

    def add_stations(self, stations, psxy_style=dict()):

        default_psxy_style = {'S': 't8p', 'G': 'black'}
        default_psxy_style.update(psxy_style)

        lats, lons = zip(*[s.effective_latlon for s in stations])

        self.gmt.psxy(in_columns=(lons, lats),
                      *self.jxyr,
                      **default_psxy_style)

        for station in stations:
            self.add_label(
                station.effective_lat, station.effective_lon,
                '.'.join(x for x in (station.network, station.station) if x))

    def add_kite_scene(self, scene):
        tile = FloatTile(scene.frame.llLon, scene.frame.llLat,
                         scene.frame.dLon, scene.frame.dLat,
                         scene.displacement)

        return tile

    def add_gnss_campaign(self,
                          campaign,
                          psxy_style=None,
                          offset_scale=None,
                          labels=True,
                          vertical=False,
                          fontsize=10):

        stations = campaign.stations

        if offset_scale is None:
            offset_scale = num.zeros(campaign.nstations)
            for ista, sta in enumerate(stations):
                for comp in sta.components.values():
                    offset_scale[ista] += comp.shift
            offset_scale = num.sqrt(offset_scale**2).max()

        size = math.sqrt(self.height**2 + self.width**2)
        scale = (size / 10.) / offset_scale
        logger.debug('GNSS: Using offset scale %f, map scale %f', offset_scale,
                     scale)

        lats, lons = zip(*[s.effective_latlon for s in stations])

        if vertical:
            rows = [[
                lons[ista], lats[ista], 0., s.up.shift,
                (s.east.sigma + s.north.sigma) if s.east.sigma else 0.,
                s.up.sigma, 0., s.code if labels else None
            ] for ista, s in enumerate(stations) if s.up is not None]

        else:
            rows = [[
                lons[ista], lats[ista], s.east.shift, s.north.shift,
                s.east.sigma, s.north.sigma, s.correlation_ne,
                s.code if labels else None
            ] for ista, s in enumerate(stations)
                    if s.east is not None or s.north is not None]

        default_psxy_style = {
            'h': 0,
            'W': '2p,black',
            'A': '+p2p,black+e+a40',
            'G': 'black',
            'L': True,
            'S': 'e%dc/0.95/%d' % (scale, fontsize),
        }

        if not labels:
            for row in rows:
                row.pop(-1)

        if psxy_style is not None:
            default_psxy_style.update(psxy_style)

        self.gmt.psvelo(in_rows=rows, *self.jxyr, **default_psxy_style)

    def draw_plates(self):
        from pyrocko.dataset import tectonics

        neast = 20
        nnorth = max(1, int(round(num.round(self._hreg / self._wreg * neast))))
        norths = num.linspace(-self._hreg * 0.5, self._hreg * 0.5, nnorth)
        easts = num.linspace(-self._wreg * 0.5, self._wreg * 0.5, neast)
        norths2 = num.repeat(norths, neast)
        easts2 = num.tile(easts, nnorth)
        lats, lons = od.ne_to_latlon(self.lat, self.lon, norths2, easts2)

        bird = tectonics.PeterBird2003()
        plates = bird.get_plates()

        color_plates = gmtpy.color('aluminium5')
        color_velocities = gmtpy.color('skyblue1')
        color_velocities_lab = gmtpy.color(darken(gmtpy.color_tup('skyblue1')))

        points = num.vstack((lats, lons)).T
        used = []
        for plate in plates:
            mask = plate.contains_points(points)
            if num.any(mask):
                used.append((plate, mask))

        if len(used) > 1:

            candi_fixed = {}

            label_data = []
            for plate, mask in used:

                mean_north = num.mean(norths2[mask])
                mean_east = num.mean(easts2[mask])
                iorder = num.argsort(
                    num.sqrt((norths2[mask] - mean_north)**2 +
                             (easts2[mask] - mean_east)**2))

                lat_candis = lats[mask][iorder]
                lon_candis = lons[mask][iorder]

                candi_fixed[plate.name] = lat_candis.size

                label_data.append(
                    (lat_candis, lon_candis, plate, color_plates))

            boundaries = bird.get_boundaries()

            size = 2

            psxy_kwargs = []

            for boundary in boundaries:
                if num.any(points_in_region(boundary.points, self._wesn)):
                    for typ, part in boundary.split_types(
                        [['SUB'], ['OSR', 'OTF', 'OCB', 'CTF', 'CCB', 'CRB']]):

                        lats, lons = part.T

                        kwargs = {}
                        if typ[0] == 'SUB':
                            if boundary.kind == '\\':
                                kwargs['S'] = 'f%g/%gp+t+r' % (0.45 * size,
                                                               3. * size)
                            elif boundary.kind == '/':
                                kwargs['S'] = 'f%g/%gp+t+l' % (0.45 * size,
                                                               3. * size)

                            kwargs['G'] = color_plates

                        kwargs['in_columns'] = (lons, lats)
                        kwargs['W'] = '%gp,%s' % (size, color_plates),

                        psxy_kwargs.append(kwargs)

                        if boundary.kind == '\\':
                            if boundary.name2 in candi_fixed:
                                candi_fixed[boundary.name2] += neast * nnorth

                        elif boundary.kind == '/':
                            if boundary.name1 in candi_fixed:
                                candi_fixed[boundary.name1] += neast * nnorth

            candi_fixed = [
                name for name in sorted(list(candi_fixed.keys()),
                                        key=lambda name: -candi_fixed[name])
            ]

            candi_fixed.append(None)

            gsrm = tectonics.GSRM1()

            for name in candi_fixed:
                if name not in gsrm.plate_names() \
                        and name not in gsrm.plate_alt_names():

                    continue

                lats, lons, vnorth, veast, vnorth_err, veast_err, corr = \
                    gsrm.get_velocities(name, region=self._wesn)

                fixed_plate_name = name

                self.gmt.psvelo(in_columns=(lons, lats, veast, vnorth,
                                            veast_err, vnorth_err, corr),
                                W='0.25p,%s' % color_velocities,
                                A='9p+e+g%s' % color_velocities,
                                S='e0.2p/0.95/10',
                                *self.jxyr)

                for _ in range(len(lons) // 50 + 1):
                    ii = random.randint(0, len(lons) - 1)
                    v = math.sqrt(vnorth[ii]**2 + veast[ii]**2)
                    self.add_label(lats[ii],
                                   lons[ii],
                                   '%.0f' % v,
                                   font_size=0.7 * self.gmt.label_font_size(),
                                   style=dict(G=color_velocities_lab))

                break

            for (lat_candis, lon_candis, plate, color) in label_data:
                full_name = bird.full_name(plate.name)
                if plate.name == fixed_plate_name:
                    full_name = '@_' + full_name + '@_'

                self.add_area_label(lat_candis,
                                    lon_candis,
                                    full_name,
                                    color=color,
                                    font='3')

            for kwargs in psxy_kwargs:
                self.gmt.psxy(*self.jxyr, **kwargs)
Example #5
0
class OkadaPath(SandboxSource):

    __implements__ = 'disloc'

    depth = None
    nu = Float.T(default=0.25, help='Poisson\'s ratio, typically 0.25')
    nodes = List.T(
        default=[],
        optional=True,
        help='Nodes of the segments as (easting, northing) tuple of [m]')
    segments__ = List.T(default=[],
                        optional=True,
                        help='List of all segments.')

    def __init__(self, *args, **kwargs):
        SandboxSource.__init__(self, *args, **kwargs)

        self._segments = []

        if not self.nodes:
            self.nodes.append([self.easting, self.northing])

    @property
    def segments(self):
        return self._segments

    @segments.setter
    def segments(self, segments):
        self._segments = segments

    @staticmethod
    def _newSegment(e1, n1, e2, n2, **kwargs):
        dE = e2 - e1
        dN = n2 - n1
        length = (dN**2 + dE**2)**.5
        '''Width Scaling relation after

        Leonard, M. (2010). Earthquake fault scaling: Relating rupture length,
            width, average displacement, and moment release, Bull. Seismol.
            Soc. Am. 100, no. 5, 1971-1988.
        '''
        segment = {
            'northing': n1 + dN / 2,
            'easting': e1 + dE / 2,
            'depth': 0.,
            'length': length,
            'width': 15. * length**.66,
            'strike': num.arccos(dN / length) * r2d,
            'slip': 45.,
            'rake': 90.,
        }
        segment.update(kwargs)
        return OkadaSegment(**segment)

    def _moveSegment(self, pos, e1, n1, e2, n2):
        dE = e2 - e1
        dN = n2 - n1
        length = (dN**2 + dE**2)**.5

        segment_update = {
            'northing': n1 + dN / 2,
            'easting': e1 + dE / 2,
            'length': length,
            'width': 15. * length**.66,
            'strike': num.arccos(dN / length) * r2d,
        }

        segment = self.segments[pos]
        for attr, val in segment_update.items():
            segment.__setattr__(attr, val)

    def addNode(self, easting, northing):
        self.nodes.append([easting, northing])
        self.segments.append(
            self._newSegment(e1=self.nodes[-2][0],
                             n1=self.nodes[-2][1],
                             e2=self.nodes[-1][0],
                             n2=self.nodes[-1][1]))

    def insertNode(self, pos, easting, northing):
        self.nodes.insert(pos, [easting, northing])
        self.segments.append(
            self._newSegment(e1=self.nodes[pos][0],
                             n1=self.nodes[pos][1],
                             e2=self.nodes[pos + 1][0],
                             n2=self.nodes[pos + 1][1]))
        self._moveSegment(
            pos - 1,
            e1=self.nodes[pos - 1][0],
            n1=self.nodes[pos - 1][1],
            e2=self.nodes[pos][0],
            n2=self.nodes[pos][1],
        )

    def moveNode(self, pos, easting, northing):
        self.nodes[pos] = [easting, northing]
        if pos < len(self):
            self._moveSegment(pos,
                              e1=self.nodes[pos][0],
                              n1=self.nodes[pos][1],
                              e2=self.nodes[pos + 1][0],
                              n2=self.nodes[pos + 1][1])
        if pos != 0:
            self._moveSegment(pos,
                              e1=self.nodes[pos - 1][0],
                              n1=self.nodes[pos - 1][1],
                              e2=self.nodes[pos][0],
                              n2=self.nodes[pos][1])

    def __len__(self):
        return len(self.segments)

    def dislocSource(self):
        return num.array(
            [seg.dislocSource() for seg in self.segments if seg.enabled])
Example #6
0
class SatelliteTargetGroup(TargetGroup):
    r"""Handles maps of static ground motion from satellite observations (InSAR)

    The InSAR displacement maps post-processed by the `pyrocko` module `kite`
    are usually `Quadtree` downsampled (Jonsson, 2002). Each data point has a
    latitude, longitude, Line-of-sight displacement value [m] as well as an
    orientation and elevation angle, which define the Line-of-Sight. The data
    are associated with a weight matrix, which is the inverse of a full
    variance-covariance matrix (Sudhaus \& Jonsson, 2009). In principle, these
    data sets could stem from pixel offset maps. See also the documentation of
    the `kite` module.
    """
    kite_scenes = List.T(optional=True,
                         help='List of InSAR data files prepared \
              by the ``pyrocko`` module ``kite``')
    misfit_config = SatelliteMisfitConfig.T(
        help='Settings for the objective function of these targets')

    def get_targets(self, ds, event, default_path='none'):
        logger.debug('Selecting satellite targets...')
        targets = []

        for scene in ds.get_kite_scenes():
            if scene.meta.scene_id not in self.kite_scenes and\
               '*all' not in self.kite_scenes:
                continue

            qt = scene.quadtree

            lats = num.empty(qt.nleaves)
            lons = num.empty(qt.nleaves)
            lats.fill(qt.frame.llLat)
            lons.fill(qt.frame.llLon)

            if qt.frame.isDegree():
                logger.debug('Target "%s" is referenced in degree.' %
                             scene.meta.scene_id)
                lons += qt.leaf_focal_points[:, 0]
                lats += qt.leaf_focal_points[:, 1]
                east_shifts = num.zeros_like(lats)
                north_shifts = num.zeros_like(lats)
            elif qt.frame.isMeter():
                logger.debug('Target "%s" is referenced in meter.' %
                             scene.meta.scene_id)
                east_shifts = qt.leaf_focal_points[:, 0]
                north_shifts = qt.leaf_focal_points[:, 1]
            else:
                assert False

            sat_target = SatelliteMisfitTarget(
                quantity='displacement',
                scene_id=scene.meta.scene_id,
                lats=lats,
                lons=lons,
                east_shifts=east_shifts,
                north_shifts=north_shifts,
                theta=qt.leaf_thetas,
                phi=qt.leaf_phis,
                tsnapshot=None,
                interpolation=self.interpolation,
                store_id=self.store_id,
                normalisation_family=self.normalisation_family,
                path=self.path or default_path,
                misfit_config=self.misfit_config)

            sat_target.set_dataset(ds)
            targets.append(sat_target)

        return targets
class PlotSettings(Object):
    trace_filename = String.T(help='filename of beam or trace to use for '
                              'plotting, incl. path.',
                              optional=True)
    station_filename = String.T(help='filename containing station meta '
                                'information related to *trace_filename*.',
                                optional=True)
    event_filename = String.T(help='filename containing event information '
                              'including the expected moment tensor.',
                              default='event.pf')
    store_id = String.T(help='Store ID to use for generating the synthetic '
                        'traces.',
                        optional=True)
    store_superdirs = List.T(String.T(), optional=True)
    depth = Float.T(help='Depth [km] where to put the trace.', default=10.)
    depths = String.T(help='Synthetic source depths [km]. start:stop:delta. '
                      'default: 0:15:1',
                      optional=True,
                      default='0:15:1')
    filters = List.T(
        FrequencyResponse.T(help='List of filters used to filter '
                            'the traces'))
    zoom = List.T(Float.T(),
                  help='Window to visualize with reference to the P '
                  'phase onset [s].',
                  default=[-7, 15])
    correction = Float.T(help='time shift, to move beam trace.', default=0.)
    normalize = Bool.T(help='normalize by maximum amplitude', default=True)
    save_as = String.T(default='depth_%(array-id)s.png',
                       help='filename of output figure')
    force_nearest_neighbor = Bool.T(help='Handles OutOfBounds exceptions. '
                                    'applies only laterally!',
                                    default=False)
    auto_caption = Bool.T(
        help='Add a caption giving basic information to the figure.',
        default=False)
    title = String.T(default='%(array-id)s - %(event_name)s',
                     help='Add default title.')
    quantity = String.T(default='velocity',
                        help='velocity-> differentiate synthetic.'
                        'displacement-> integrate recorded')
    gain = Float.T(default=1., help='Gain factor')
    gain_record = Float.T(default=1., help='Gain factor')
    color = String.T(help='Trace color', default='blue')

    def update_from_args(self, args):
        kwargs = {}
        try:
            hp, lp = args.filter.split(':')
            filters = [
                ButterworthResponse(corner=float(lp), order=4, type='low'),
                ButterworthResponse(corner=float(hp), order=4, type='high')
            ]
            kwargs.update({'filters': filters})
        except:
            pass

        kwargs.update(self.process_arglist(args))
        for arg, v in kwargs.items():
            setattr(self, arg, v)

    @classmethod
    def from_argument_parser(cls, args):
        try:
            hp, lp = args.filter.split(':')
        except AttributeError:
            hp, lp = (0.7, 4.5)
        filters = [
            ButterworthResponse(corner=float(lp), order=4, type='low'),
            ButterworthResponse(corner=float(hp), order=4, type='high')
        ]
        kwargs = cls.process_arglist(args)
        return cls(filters=filters, **kwargs)

    def do_filter(self, tr):
        for f in self.filters:
            tr = tr.transfer(transfer_function=f,
                             tfade=10,
                             cut_off_fading=False)
        return tr

    @staticmethod
    def process_arglist(args):
        kwargs = {}
        for arg in arglist:
            try:
                val = getattr(args, arg)
                if arg == 'zoom' and val:
                    val = val.split(':')
                    val = map(float, val)
                if val:
                    kwargs.update({arg: val})
            except AttributeError:
                logger.debug('%s not defined' % arg)
                continue

        return kwargs
Example #8
0
class FDSNStationXML(Object):
    '''Top-level type for Station XML. Required field are Source
    (network ID of the institution sending the message) and one or
    more Network containers or one or more Station containers.'''

    schema_version = Float.T(default=1.0, xmlstyle='attribute')
    source = String.T(xmltagname='Source')
    sender = String.T(optional=True, xmltagname='Sender')
    module = String.T(optional=True, xmltagname='Module')
    module_uri = String.T(optional=True, xmltagname='ModuleURI')
    created = Timestamp.T(xmltagname='Created')
    network_list = List.T(Network.T(xmltagname='Network'))

    xmltagname = 'FDSNStationXML'

    def get_pyrocko_stations(self,
                             nslcs=None,
                             nsls=None,
                             time=None,
                             timespan=None,
                             inconsistencies='warn'):

        assert inconsistencies in ('raise', 'warn')

        if nslcs is not None:
            nslcs = set(nslcs)

        if nsls is not None:
            nsls = set(nsls)

        tt = ()
        if time is not None:
            tt = (time, )
        elif timespan is not None:
            tt = timespan

        pstations = []
        for network in self.network_list:
            if not network.spans(*tt):
                continue

            for station in network.station_list:
                if not station.spans(*tt):
                    continue

                if station.channel_list:
                    loc_to_channels = {}
                    for channel in station.channel_list:
                        if not channel.spans(*tt):
                            continue

                        loc = channel.location_code.strip()
                        if loc not in loc_to_channels:
                            loc_to_channels[loc] = []

                        loc_to_channels[loc].append(channel)

                    for loc in sorted(loc_to_channels.keys()):
                        channels = loc_to_channels[loc]
                        if nslcs is not None:
                            channels = [
                                channel for channel in channels
                                if (network.code, station.code, loc,
                                    channel.code) in nslcs
                            ]

                        if not channels:
                            continue

                        nsl = network.code, station.code, loc
                        if nsls is not None and nsl not in nsls:
                            continue

                        pstations.append(
                            pyrocko_station_from_channels(
                                nsl, channels,
                                inconsistencies=inconsistencies))
                else:
                    pstations.append(
                        pyrocko.model.Station(network.code,
                                              station.code,
                                              '*',
                                              lat=station.latitude.value,
                                              lon=station.longitude.value,
                                              elevation=value_or_none(
                                                  station.elevation),
                                              name=station.description or ''))

        return pstations

    @classmethod
    def from_pyrocko_stations(cls, pyrocko_stations):
        ''' Generate :py:class:`FDSNStationXML` from list of
        :py:class;`pyrocko.model.Station` instances.

        :param pyrocko_stations: list of
            :py:class;`pyrocko.model.Station` instances.
        '''
        from collections import defaultdict
        network_dict = defaultdict(list)
        for s in pyrocko_stations:
            network, station, location = s.nsl()
            channel_list = []
            for c in s.channels:
                channel_list.append(
                    Channel(location_code=location,
                            code=c.name,
                            latitude=Latitude(value=s.lat),
                            longitude=Longitude(value=s.lon),
                            elevation=Distance(value=s.elevation),
                            depth=Distance(value=s.depth),
                            azimuth=Azimuth(value=c.azimuth),
                            dip=Dip(value=c.dip)))

            network_dict[network].append(
                Station(code=station,
                        latitude=Latitude(value=s.lat),
                        longitude=Longitude(value=s.lon),
                        elevation=Distance(value=s.elevation),
                        channel_list=channel_list))

        timestamp = time.time()
        network_list = []
        for k, station_list in network_dict.items():

            network_list.append(
                Network(code=k,
                        station_list=station_list,
                        total_number_stations=len(station_list)))

        sxml = FDSNStationXML(source='from pyrocko stations list',
                              created=timestamp,
                              network_list=network_list)

        sxml.validate()
        return sxml

    def iter_network_stations(self,
                              net=None,
                              sta=None,
                              time=None,
                              timespan=None):

        tt = ()
        if time is not None:
            tt = (time, )
        elif timespan is not None:
            tt = timespan

        for network in self.network_list:
            if not network.spans(*tt) or (net is not None
                                          and network.code != net):
                continue

            for station in network.station_list:
                if not station.spans(*tt) or (sta is not None
                                              and station.code != sta):
                    continue

                yield (network, station)

    def iter_network_station_channels(self,
                                      net=None,
                                      sta=None,
                                      loc=None,
                                      cha=None,
                                      time=None,
                                      timespan=None):

        if loc is not None:
            loc = loc.strip()

        tt = ()
        if time is not None:
            tt = (time, )
        elif timespan is not None:
            tt = timespan

        for network in self.network_list:
            if not network.spans(*tt) or (net is not None
                                          and network.code != net):
                continue

            for station in network.station_list:
                if not station.spans(*tt) or (sta is not None
                                              and station.code != sta):
                    continue

                if station.channel_list:
                    for channel in station.channel_list:
                        if (not channel.spans(*tt)
                                or (cha is not None and channel.code != cha)
                                or (loc is not None
                                    and channel.location_code.strip() != loc)):
                            continue

                        yield (network, station, channel)

    def get_channel_groups(self,
                           net=None,
                           sta=None,
                           loc=None,
                           cha=None,
                           time=None,
                           timespan=None):

        groups = {}
        for network, station, channel in self.iter_network_station_channels(
                net, sta, loc, cha, time=time, timespan=timespan):

            net = network.code
            sta = station.code
            cha = channel.code
            loc = channel.location_code.strip()
            if len(cha) == 3:
                bic = cha[:2]  # band and intrument code according to SEED
            elif len(cha) == 1:
                bic = ''
            else:
                bic = cha

            if channel.response and \
                    channel.response.instrument_sensitivity and \
                    channel.response.instrument_sensitivity.input_units:

                unit = channel.response.instrument_sensitivity.input_units.name
            else:
                unit = None

            bic = (bic, unit)

            k = net, sta, loc
            if k not in groups:
                groups[k] = {}

            if bic not in groups[k]:
                groups[k][bic] = []

            groups[k][bic].append(channel)

        for nsl, bic_to_channels in groups.items():
            bad_bics = []
            for bic, channels in bic_to_channels.items():
                sample_rates = []
                for channel in channels:
                    sample_rates.append(channel.sample_rate.value)

                if not same(sample_rates):
                    scs = ','.join(channel.code for channel in channels)
                    srs = ', '.join('%e' % x for x in sample_rates)
                    err = 'ignoring channels with inconsistent sampling ' + \
                          'rates (%s.%s.%s.%s: %s)' % (nsl + (scs, srs))

                    logger.warn(err)
                    bad_bics.append(bic)

            for bic in bad_bics:
                del bic_to_channels[bic]

        return groups

    def choose_channels(self,
                        target_sample_rate=None,
                        priority_band_code=['H', 'B', 'M', 'L', 'V', 'E', 'S'],
                        priority_units=['M/S', 'M/S**2'],
                        priority_instrument_code=['H', 'L'],
                        time=None,
                        timespan=None):

        nslcs = {}
        for nsl, bic_to_channels in self.get_channel_groups(
                time=time, timespan=timespan).items():

            useful_bics = []
            for bic, channels in bic_to_channels.items():
                rate = channels[0].sample_rate.value

                if target_sample_rate is not None and \
                        rate < target_sample_rate*0.99999:
                    continue

                if len(bic[0]) == 2:
                    if bic[0][0] not in priority_band_code:
                        continue

                    if bic[0][1] not in priority_instrument_code:
                        continue

                unit = bic[1]

                prio_unit = len(priority_units)
                try:
                    prio_unit = priority_units.index(unit)
                except ValueError:
                    pass

                prio_inst = len(priority_instrument_code)
                prio_band = len(priority_band_code)
                if len(channels[0].code) == 3:
                    try:
                        prio_inst = priority_instrument_code.index(
                            channels[0].code[1])
                    except ValueError:
                        pass

                    try:
                        prio_band = priority_band_code.index(
                            channels[0].code[0])
                    except ValueError:
                        pass

                if target_sample_rate is None:
                    rate = -rate

                useful_bics.append((-len(channels), prio_band, rate, prio_unit,
                                    prio_inst, bic))

            useful_bics.sort()

            for _, _, rate, _, _, bic in useful_bics:
                channels = sorted(bic_to_channels[bic])
                if channels:
                    for channel in channels:
                        nslcs[nsl + (channel.code, )] = channel

                    break

        return nslcs

    def get_pyrocko_response(self,
                             nslc,
                             time=None,
                             timespan=None,
                             fake_input_units=None):

        net, sta, loc, cha = nslc
        resps = []
        for _, _, channel in self.iter_network_station_channels(
                net, sta, loc, cha, time=time, timespan=timespan):
            resp = channel.response
            if resp:
                resps.append(
                    resp.get_pyrocko_response(
                        nslc, fake_input_units=fake_input_units))

        if not resps:
            raise NoResponseInformation('%s.%s.%s.%s' % nslc)
        elif len(resps) > 1:
            raise MultipleResponseInformation('%s.%s.%s.%s' % nslc)

        return resps[0]

    @property
    def n_code_list(self):
        return sorted(set(x.code for x in self.network_list))

    @property
    def ns_code_list(self):
        nss = set()
        for network in self.network_list:
            for station in network.station_list:
                nss.add((network.code, station.code))

        return sorted(nss)

    @property
    def nsl_code_list(self):
        nsls = set()
        for network in self.network_list:
            for station in network.station_list:
                for channel in station.channel_list:
                    nsls.add(
                        (network.code, station.code, channel.location_code))

        return sorted(nsls)

    @property
    def nslc_code_list(self):
        nslcs = set()
        for network in self.network_list:
            for station in network.station_list:
                for channel in station.channel_list:
                    nslcs.add((network.code, station.code,
                               channel.location_code, channel.code))

        return sorted(nslcs)

    def summary(self):
        lst = [
            'number of n codes: %i' % len(self.n_code_list),
            'number of ns codes: %i' % len(self.ns_code_list),
            'number of nsl codes: %i' % len(self.nsl_code_list),
            'number of nslc codes: %i' % len(self.nslc_code_list)
        ]

        return '\n'.join(lst)
class VelocityProfile(Object):
    layer_count = ValueInt.T(optional=True,
                             xmltagname='layerCount')
    velocity_profile_data = List.T(VelocityProfileData.T(), xmltagname='velocityProfileData')
Example #10
0
class Log(Object):
    '''Container for log entries.'''

    entry_list = List.T(Comment.T(xmltagname='Entry'))
Example #11
0
class Response(Object):
    resource_id = String.T(optional=True, xmlstyle='attribute')
    instrument_sensitivity = Sensitivity.T(optional=True,
                                           xmltagname='InstrumentSensitivity')
    instrument_polynomial = Polynomial.T(optional=True,
                                         xmltagname='InstrumentPolynomial')
    stage_list = List.T(ResponseStage.T(xmltagname='Stage'))

    def get_pyrocko_response(self, nslc, fake_input_units=None):
        responses = []
        for stage in self.stage_list:
            responses.extend(stage.get_pyrocko_response(nslc))

        if not self.stage_list and self.instrument_sensitivity:
            responses.append(
                trace.PoleZeroResponse(
                    constant=self.instrument_sensitivity.value))

        if fake_input_units is not None:
            if not self.instrument_sensitivity or \
                    self.instrument_sensitivity.input_units is None:

                raise NoResponseInformation('no input units given')

            input_units = self.instrument_sensitivity.input_units.name

            try:
                conresp = conversion[fake_input_units.upper(),
                                     input_units.upper()]

            except KeyError:
                raise NoResponseInformation(
                    'cannot convert between units: %s, %s' %
                    (fake_input_units, input_units))

            if conresp is not None:
                responses.append(conresp)

        return trace.MultiplyResponse(responses)

    @classmethod
    def from_pyrocko_pz_response(cls,
                                 presponse,
                                 input_unit,
                                 output_unit,
                                 normalization_frequency=1.0):

        norm_factor = 1.0 / float(
            abs(
                presponse.evaluate(num.array([normalization_frequency]))[0] /
                presponse.constant))

        pzs = PolesZeros(
            pz_transfer_function_type='LAPLACE (RADIANS/SECOND)',
            normalization_factor=norm_factor,
            normalization_frequency=Frequency(normalization_frequency),
            zero_list=[
                PoleZero(real=FloatNoUnit(z.real),
                         imaginary=FloatNoUnit(z.imag))
                for z in presponse.zeros
            ],
            pole_list=[
                PoleZero(real=FloatNoUnit(z.real),
                         imaginary=FloatNoUnit(z.imag))
                for z in presponse.poles
            ])

        pzs.validate()

        stage = ResponseStage(
            number=1,
            poles_zeros_list=[pzs],
            stage_gain=Gain(float(abs(presponse.constant)) / norm_factor))

        resp = Response(instrument_sensitivity=Sensitivity(
            value=stage.stage_gain.value,
            input_units=Units(input_unit),
            output_units=Units(output_unit)),
                        stage_list=[stage])

        return resp
Example #12
0
class ResponseList(BaseFilter):
    '''Response: list of frequency, amplitude and phase values.
    Corresponds to SEED blockette 55.'''

    response_list_element_list = List.T(
        ResponseListElement.T(xmltagname='ResponseListElement'))
Example #13
0
class Operator(Object):
    agency_list = List.T(Unicode.T(xmltagname='Agency'))
    contact_list = List.T(Person.T(xmltagname='Contact'))
    web_site = String.T(optional=True, xmltagname='WebSite')
Example #14
0
class Event(Location):
    '''Seismic event representation

    :param lat: latitude of hypocenter (default 0.0)
    :param lon: longitude of hypocenter (default 0.0)
    :param time: origin time as float in seconds after '1970-01-01 00:00:00
    :param name: event identifier as string (optional)
    :param depth: source depth (optional)
    :param magnitude: magnitude of event (optional)
    :param region: source region (optional)
    :param catalog: name of catalog that lists this event (optional)
    :param moment_tensor: moment tensor as
        :py:class:`moment_tensor.MomentTensor` instance (optional)
    :param duration: source duration as float (optional)
    '''

    time = Timestamp.T(default=util.str_to_time('1970-01-01 00:00:00'))
    name = String.T(default='', optional=True)
    magnitude = Float.T(optional=True)
    magnitude_type = String.T(optional=True)
    region = Unicode.T(optional=True)
    catalog = String.T(optional=True)
    moment_tensor = moment_tensor.MomentTensor.T(optional=True)
    duration = Float.T(optional=True)
    tags = List.T(Tag.T(optional=True))

    def __init__(self,
                 lat=0.,
                 lon=0.,
                 time=0.,
                 name='',
                 depth=None,
                 elevation=None,
                 magnitude=None,
                 magnitude_type=None,
                 region=None,
                 load=None,
                 loadf=None,
                 catalog=None,
                 moment_tensor=None,
                 duration=None,
                 tags=[]):

        vals = None
        if load is not None:
            vals = Event.oldload(load)
        elif loadf is not None:
            vals = Event.oldloadf(loadf)

        if vals:
            lat, lon, time, name, depth, magnitude, magnitude_type, region, \
                catalog, moment_tensor, duration, tags = vals

        Location.__init__(self,
                          lat=lat,
                          lon=lon,
                          time=time,
                          name=name,
                          depth=depth,
                          elevation=elevation,
                          magnitude=magnitude,
                          magnitude_type=magnitude_type,
                          region=region,
                          catalog=catalog,
                          moment_tensor=moment_tensor,
                          duration=duration,
                          tags=tags)

    def time_as_string(self):
        return util.time_to_str(self.time)

    def set_name(self, name):
        self.name = name

    def olddump(self, filename):
        file = open(filename, 'w')
        self.olddumpf(file)
        file.close()

    def olddumpf(self, file):
        file.write('name = %s\n' % self.name)
        file.write('time = %s\n' % util.time_to_str(self.time))
        if self.lat is not None:
            file.write('latitude = %.12g\n' % self.lat)
        if self.lon is not None:
            file.write('longitude = %.12g\n' % self.lon)
        if self.magnitude is not None:
            file.write('magnitude = %g\n' % self.magnitude)
            file.write('moment = %g\n' %
                       moment_tensor.magnitude_to_moment(self.magnitude))
        if self.magnitude_type is not None:
            file.write('magnitude_type = %s\n' % self.magnitude_type)
        if self.depth is not None:
            file.write('depth = %.10g\n' % self.depth)
        if self.region is not None:
            file.write('region = %s\n' % self.region)
        if self.catalog is not None:
            file.write('catalog = %s\n' % self.catalog)
        if self.moment_tensor is not None:
            m = self.moment_tensor.m()
            sdr1, sdr2 = self.moment_tensor.both_strike_dip_rake()
            file.write(
                ('mnn = %g\nmee = %g\nmdd = %g\nmne = %g\nmnd = %g\nmed = %g\n'
                 'strike1 = %g\ndip1 = %g\nrake1 = %g\n'
                 'strike2 = %g\ndip2 = %g\nrake2 = %g\n') %
                ((m[0, 0], m[1, 1], m[2, 2], m[0, 1], m[0, 2], m[1, 2]) +
                 sdr1 + sdr2))

        if self.duration is not None:
            file.write('duration = %g\n' % self.duration)

        if self.tags:
            file.write('tags = %s\n' % ', '.join(self.tags))

    @staticmethod
    def unique(events,
               deltat=10.,
               group_cmp=(lambda a, b: cmp(a.catalog, b.catalog))):
        groups = Event.grouped(events, deltat)

        events = []
        for group in groups:
            if group:
                group.sort(group_cmp)
                events.append(group[-1])

        return events

    @staticmethod
    def grouped(events, deltat=10.):
        events = list(events)
        groups = []
        for ia, a in enumerate(events):
            groups.append([])
            haveit = False
            for ib, b in enumerate(events[:ia]):
                if abs(b.time - a.time) < deltat:
                    groups[ib].append(a)
                    haveit = True
                    break

            if not haveit:
                groups[ia].append(a)

        groups = [g for g in groups if g]
        groups.sort(key=lambda g: sum(e.time for e in g) // len(g))
        return groups

    @staticmethod
    def dump_catalog(events, filename=None, stream=None):
        if filename is not None:
            file = open(filename, 'w')
        else:
            file = stream
        try:
            i = 0
            for ev in events:

                ev.olddumpf(file)

                file.write('--------------------------------------------\n')
                i += 1

        finally:
            if filename is not None:
                file.close()

    @staticmethod
    def oldload(filename):
        with open(filename, 'r') as file:
            return Event.oldloadf(file)

    @staticmethod
    def oldloadf(file):
        d = {}
        try:
            for line in file:
                if line.lstrip().startswith('#'):
                    continue

                toks = line.split(' = ', 1)
                if len(toks) == 2:
                    k, v = toks[0].strip(), toks[1].strip()
                    if k in ('name', 'region', 'catalog', 'magnitude_type'):
                        d[k] = v
                    if k in (('latitude longitude magnitude depth duration '
                              'mnn mee mdd mne mnd med strike1 dip1 rake1 '
                              'strike2 dip2 rake2 duration').split()):
                        d[k] = float(v)
                    if k == 'time':
                        d[k] = util.str_to_time(v)
                    if k == 'tags':
                        d[k] = [x.strip() for x in v.split(',')]

                if line.startswith('---'):
                    d['have_separator'] = True
                    break

        except Exception as e:
            raise FileParseError(e)

        if not d:
            raise EOF()

        if 'have_separator' in d and len(d) == 1:
            raise EmptyEvent()

        mt = None
        m6 = [d[x] for x in 'mnn mee mdd mne mnd med'.split() if x in d]
        if len(m6) == 6:
            mt = moment_tensor.MomentTensor(m=moment_tensor.symmat6(*m6))
        else:
            sdr = [d[x] for x in 'strike1 dip1 rake1'.split() if x in d]
            if len(sdr) == 3:
                moment = 1.0
                if 'moment' in d:
                    moment = d['moment']
                elif 'magnitude' in d:
                    moment = moment_tensor.magnitude_to_moment(d['magnitude'])

                mt = moment_tensor.MomentTensor(strike=sdr[0],
                                                dip=sdr[1],
                                                rake=sdr[2],
                                                scalar_moment=moment)

        return (d.get('latitude', 0.0), d.get('longitude', 0.0),
                d.get('time', 0.0), d.get('name', ''), d.get('depth', None),
                d.get('magnitude', None), d.get('magnitude_type',
                                                None), d.get('region', None),
                d.get('catalog', None), mt, d.get('duration',
                                                  None), d.get('tags', []))

    @staticmethod
    def load_catalog(filename):

        file = open(filename, 'r')

        try:
            while True:
                try:
                    ev = Event(loadf=file)
                    yield ev
                except EmptyEvent:
                    pass

        except EOF:
            pass

        file.close()

    def get_hash(self):
        e = self
        if isinstance(e.time, util.hpfloat):
            stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.6FRAC')
        else:
            stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.3FRAC')

        s = float_or_none_to_str

        return ehash(', '.join(
            (stime, s(e.lat), s(e.lon), s(e.depth), s(e.magnitude),
             str(e.catalog), str(e.name), str(e.region))))

    def human_str(self):
        s = [
            'Latitude [deg]: %g' % self.lat,
            'Longitude [deg]: %g' % self.lon,
            'Time [UTC]: %s' % util.time_to_str(self.time)
        ]

        if self.name:
            s.append('Name: %s' % self.name)

        if self.depth is not None:
            s.append('Depth [km]: %g' % (self.depth / 1000.))

        if self.magnitude is not None:
            s.append('Magnitude [%s]: %3.1f' %
                     (self.magnitude_type or 'M?', self.magnitude))

        if self.region:
            s.append('Region: %s' % self.region)

        if self.catalog:
            s.append('Catalog: %s' % self.catalog)

        if self.moment_tensor:
            s.append(str(self.moment_tensor))

        return '\n'.join(s)
Example #15
0
class AhfullKiwiTestSetup(Object):
    setups = List.T(AhfullKiwiTestSetupEntry.T())
class FileResource(Object):
    description = String.T()
    url = List.T(String.T())
Example #17
0
class DatasetConfig(HasPaths):

    stations_path = Path.T(optional=True)
    stations_stationxml_paths = List.T(Path.T(), optional=True)
    events_path = Path.T(optional=True)
    waveform_paths = List.T(Path.T(), optional=True)
    clippings_path = Path.T(optional=True)
    responses_sacpz_path = Path.T(optional=True)
    responses_stationxml_paths = List.T(Path.T(), optional=True)
    station_corrections_path = Path.T(optional=True)
    apply_correction_factors = Bool.T(optional=True,
                                      default=True)
    apply_correction_delays = Bool.T(optional=True,
                                     default=True)
    extend_incomplete = Bool.T(default=False)
    picks_paths = List.T(Path.T())
    blacklist_paths = List.T(Path.T())
    blacklist = List.T(
        String.T(),
        help='stations/components to be excluded according to their STA, '
             'NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes.')
    whitelist_paths = List.T(Path.T())
    whitelist = List.T(
        String.T(),
        optional=True,
        help='if not None, list of stations/components to include according '
             'to their STA, NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes. '
             'Note: ''when whitelisting on channel level, both, the raw and '
             'the processed channel codes have to be listed.')

    def __init__(self, *args, **kwargs):
        HasPaths.__init__(self, *args, **kwargs)
        self._ds = {}

    def get_event_names(self):
        def extra(path):
            return expand_template(path, dict(
                event_name='*'))

        def fp(path):
            return self.expand_path(path, extra=extra)

        events = []
        for fn in glob.glob(fp(self.events_path)):
            events.extend(cached_load_events(fn))

        event_names = [ev.name for ev in events]
        return event_names

    def get_dataset(self, event_name):
        if event_name not in self._ds:
            def extra(path):
                return expand_template(path, dict(
                    event_name=event_name))

            def fp(path):
                return self.expand_path(path, extra=extra)

            ds = Dataset(event_name)
            ds.add_stations(
                pyrocko_stations_filename=fp(self.stations_path),
                stationxml_filenames=fp(self.stations_stationxml_paths))

            ds.add_events(filename=fp(self.events_path))

            if self.waveform_paths:
                ds.add_waveforms(paths=fp(self.waveform_paths))

            if self.clippings_path:
                ds.add_clippings(markers_filename=fp(self.clippings_path))

            if self.responses_sacpz_path:
                ds.add_responses(
                    sacpz_dirname=fp(self.responses_sacpz_path))

            if self.responses_stationxml_paths:
                ds.add_responses(
                    stationxml_filenames=fp(self.responses_stationxml_paths))

            if self.station_corrections_path:
                ds.add_station_corrections(
                    filename=fp(self.station_corrections_path))

            ds.apply_correction_factors = self.apply_correction_factors
            ds.apply_correction_delays = self.apply_correction_delays
            ds.extend_incomplete = self.extend_incomplete

            for picks_path in self.picks_paths:
                ds.add_picks(
                    filename=fp(picks_path))

            ds.add_blacklist(self.blacklist)
            ds.add_blacklist(filenames=fp(self.blacklist_paths))
            if self.whitelist:
                ds.add_whitelist(self.whitelist)
            if self.whitelist_paths:
                ds.add_whitelist(filenames=fp(self.whitelist_paths))

            self._ds[event_name] = ds

        return self._ds[event_name]
Example #18
0
class DataProvider(Object):
    use = List.T(String.T())
    timings = Dict.T(String.T(), Timings.T())

    def __init__(self, channels='SHZ', use=None, timings=None):
        self.use = use or []
        self.timings = timings or {}
        iris_arrays = {
            'YKA': ('CN', 'YKA*', '', channels),
            #'ESK': [('IM', 'EKB?', '', channels),
            #        ('IM', 'EKR*', '', channels)],
            #'ESK1': ('IM', 'EKA?', '', channels),
            'ILAR': ('IM', 'IL*', '', channels),
            'IMAR': ('IM', 'IM0?', '', channels),
            #'NIA': ('IM', 'I56H?', '', channels),
            #'PFIA': [('IM', 'I57H?', '', channels),
            #         ('IM', 'I57L?', '', channels)],
            'BMA': ('IM', 'BM0?', '', channels),
            'BCA': ('IM', 'BC0?', '', channels),
            #'HIA': ('IM', 'I59H?', '', channels),
            'NVAR': ('IM', 'NV*', '', channels),
            'PDAR': [('IM', 'PD0*', '', channels),
                     ('IM', 'PD1*', '', channels)],
            'TXAR': ('IM', 'TX*', '', channels),
            #'Pilbara': ('AU', 'PSA*', '', channels),
            'AliceSprings': ('AU', 'AS*', '', channels),
            #'GERES': [('IM', 'GEA?', '', channels),
            #         ('IM', 'GEB?', '', channels),
            #         ('IM', 'GEC?', '', channels),
            #         ('IM', 'GED?', '', channels)],
            # Diego Garcia Hydroacoustic array noqa
            'DGHAland': ('IM', 'I52H?', '', channels),
            'DGHAS': ('IM', 'H08S?', '', channels),
            'DGHAN': ('IM', 'H08N?', '', channels),
            # Tristan da Cunha. channels: BDF. noqa
            #'TDC': [('IM', 'H09N?', '', channels),
            #        ('IM', 'I49H?', '', channels)],
            #'NarroginIA': ('IM', 'I04H?', '', channels),
            #'CocosIslands': ('IM', 'I06H?', '', channels),
            'Warramunga': ('IM', 'I07H?', '', channels),
            #'BermudaIA': ('IM', 'I51H?', '', channels),
            #'FairbanksIA': ('IM', 'I53H?', '', channels)
        }

        geofon_arrays = {
            'ROHRBACH': ('6A', 'V*', '', channels),
            'AntaOffshore': ('GR', 'I27L?', '*', channels),
            'AntaOnshore': ('AW', 'VNA*', '*', channels),
            #'NORES': [('NO', 'NA*', '*', channels),
            #('NO', 'NB*', '*', channels),
            #('NO', 'NC*', '*', channels)]}
        }
        bgr_arrays = {
            'GERES': [('GR', 'GEA?', '*', channels),
                      ('GR', 'GEB?', '*', channels),
                      ('GR', 'GEC?', '*', channels),
                      ('GR', 'GED?', '*', channels)],
        }

        self.providers = {
            'iris': iris_arrays,
            'geofon': geofon_arrays,
            'bgr': bgr_arrays
        }

    def download(self,
                 event,
                 directory='array_data',
                 timing=None,
                 length=None,
                 want='all',
                 force=False,
                 prefix=False,
                 dump_config=False,
                 get_responses=False):
        """:param want: either 'all' or ID as string or list of IDs as strings
        """
        use = []
        #ts = {}
        unit = 'M'
        if all([timing, length]) is None:
            raise Exception('Define one of "timing" and "length"')
        prefix = prefix or ''
        directory = pjoin(prefix, directory)
        if not os.path.isdir(directory):
            os.mkdir(directory)
        pzresponses = {}
        logger.info('download data: %s at %sN %sE' %
                    (event.name, event.lat, event.lon))
        for site, array_data_provder in self.providers.items():
            logger.info('requesting data from site %s' % site)
            for array_id, codes in array_data_provder.items():
                if array_id not in want and want != ['all']:
                    continue
                sub_directory = pjoin(directory, array_id)
                logger.info("%s" % array_id)
                codes = array_data_provder[array_id]
                if not isinstance(codes, list):
                    codes = [codes]
                selection = [
                    c + tuple((event.time, event.time + 1000.)) for c in codes
                ]
                logger.debug('selection: %s' % selection)
                try:
                    #    if site=='bgr':
                    #        st = ws.station(url='http://eida.bgr.de/', selection=selection)
                    #    else:
                    #        st = ws.station(site=site, selection=selection)
                    st = ws.station(site=site, selection=selection)
                except ws.EmptyResult as e:
                    logging.error('No results: %s %s. skip' % (e, array_id))
                    continue
                except ValueError as e:
                    logger.error(e)
                    logger.error('...skipping...')
                    continue

                stations = st.get_pyrocko_stations()
                min_dist = min(
                    [ortho.distance_accurate50m(s, event) for s in stations])
                max_dist = max(
                    [ortho.distance_accurate50m(s, event) for s in stations])

                mod = cake.load_model(crust2_profile=(event.lat, event.lon))
                if length:
                    tstart = 0.
                    tend = length
                elif timing:
                    tstart = timing[0].t(mod, (event.depth, min_dist))
                    tend = timing[1].t(mod, (event.depth, max_dist))
                selection = [
                    c + tuple((event.time + tstart, event.time + tend))
                    for c in codes
                ]
                try:
                    d = ws.dataselect(site=site, selection=selection)
                    store.remake_dir(sub_directory, force)
                    store.remake_dir(pjoin(sub_directory, 'responses'), force)
                    fn = pjoin(sub_directory, 'traces.mseed')
                    with open(fn, 'w') as f:
                        f.write(d.read())
                        f.close()
                    if get_responses:
                        trs = io.load(fn, getdata=False)
                        logger.info('Request responses from %s' % site)
                        if progressbar:
                            pb = progressbar.ProgressBar(
                                maxval=len(trs)).start()
                        for i_tr, tr in enumerate(trs):
                            try:
                                st = ws.station(site=site,
                                                selection=selection,
                                                level='response')
                                pzresponse = st.get_pyrocko_response(
                                    nslc=tr.nslc_id,
                                    timespan=(tr.tmin, tr.tmax),
                                    fake_input_units=unit)
                                pzresponse.regularize()
                            except fdsnstation.NoResponseInformation as e:
                                logger.warn("no response information: %s" % e)
                                pzresponse = None
                                pass
                            except fdsnstation.MultipleResponseInformation as e:
                                logger.warn("MultipleResponseInformation: %s" %
                                            e)
                                pzresponse = None
                                pass
                            pzresponses[tr.nslc_id] = pzresponse
                            pzresponses[tr.nslc_id].dump(filename=pjoin(
                                sub_directory, 'responses', 'resp_%s.yaml' %
                                '.'.join(tr.nslc_id)))
                            if progressbar:
                                pb.update(i_tr)
                        if progressbar:
                            pb.finish()
                    model.dump_stations(stations,
                                        pjoin(sub_directory, 'stations.pf'))

                    if timing:
                        t = Timings(list(timing))
                        self.timings[array_id] = t
                    if array_id not in use and array_id not in self.use:
                        use.append(array_id)
                except ws.EmptyResult as e:
                    logging.error('%s on %s' % (e, array_id))

        self.use.extend(use)
Example #19
0
class PinkyConfig(Object):
    '''Configuration of data IO and data preprocessing'''

    blacklist = List.T(String.T(),
                       help='List blacklist patterns (may contain wild cards')

    stack_channels = Bool.T(
        default=False,
        help='If *True* stack abs. amplitudes of all channels of a station')

    sample_length = Float.T(optional=True,
                            help='Length in seconds. Not needed \
        when using TFRecordData')

    data_generator = DataGeneratorBase.T()
    evaluation_data_generator = DataGeneratorBase.T(optional=True)
    prediction_data_generator = DataGeneratorBase.T(optional=True)

    normalization = Normalization.T(default=Normalization(), optional=True)

    absolute = Bool.T(help='Use absolute amplitudes', default=False)

    imputation = Imputation.T(optional=True, help='How to mask and fill gaps')

    reference_station = String.T(
        optional=True,
        help='define the stations used as a reference location as NSL string')

    fn_stations = String.T(optional=True, help='')

    reference_target = Target.T(optional=True)

    n_classes = Int.T(default=3)

    # Not implemented for DataGeneratorBase
    highpass = Float.T(optional=True, help='Highpass filter corner frequency')
    lowpass = Float.T(optional=True, help='Lowpass filter corner frequency')

    highpass_order = Int.T(default=4, optional=True)
    lowpass_order = Int.T(default=4, optional=True)

    normalize_labels = Bool.T(default=True, help='Normalize labels by std')

    tpad = Float.T(default=0.,
                   help='padding between p phase onset and data chunk start')

    t_translation_max = Float.T(
        default=0.,
        help='Augment data by uniformly shifting examples in time limited by '
        'this parameters. This will increase *tpad*')

    deltat_want = Float.T(
        optional=True,
        help='If set, down or upsample traces to this sampling rate.')

    # These value or not meant to be modified. If they are set in a
    # configuration this happened automatically to port values accross
    # configurations.
    # _label_scale = num.ones(3, dtype=num.float32, help='(Don\'t modify)')
    # _label_median = num.ones(3, dtype=num.float32, help='(Don\'t modify)')
    _channels = List.T(Tuple.T(4, String.T()),
                       optional=True,
                       help='(Don\'t modify)')
    _n_samples = Int.T(optional=True, help='(Don\'t modify)')

    def __init__(self, *args, **kwargs):
        super(PinkyConfig, self).__init__(*args, **kwargs)
        stations = load_stations(self.fn_stations)
        self.targets = stations_to_targets(stations)

        if not self.reference_target:
            targets_by_code = {'.'.join(t.codes[:3]): t for t in self.targets}
            self.reference_target = targets_by_code[self.reference_station]

    def setup(self):

        self.data_generator.set_config(self)
        if self.normalize_labels:
            # To not break the label normalization, the data_generator used
            # for training is required in any case at the moment!
            # Better store normalization data during training to recycle at
            # prediction time.
            self._label_median = num.median(num.array(
                list(self.data_generator.iter_labels())),
                                            axis=0)

            self._label_scale = num.mean(
                num.std(num.array(list(self.data_generator.iter_labels())),
                        axis=0))

        if self.evaluation_data_generator:
            self.evaluation_data_generator.set_config(self)

        if self.prediction_data_generator:
            self.prediction_data_generator.set_config(self)

        self.set_n_samples()

        if self.stack_channels:

            self.data_generator = ChannelStackGenerator.from_generator(
                generator=self.data_generator)
            if self.evaluation_data_generator:
                self.evaluation_data_generator = ChannelStackGenerator.from_generator(
                    generator=self.evaluation_data_generator)
            if self.prediction_data_generator:
                self.prediction_data_generator = ChannelStackGenerator.from_generator(
                    generator=self.prediction_data_generator)

        # self.data_generator.setup()
        # self.evaluation_data_generator.setup()
        # if self.prediction_data_generator:
        #     self.prediction_data_generator.setup()

    def set_n_samples(self):
        '''Set number of sampes (n_samples) from first example of data
        generator. Note that this assumes that the evaluation data generator
        contains identical shaped examples.'''
        example, _ = next(self.data_generator.generate())
        self._n_samples = example.shape[1]
        assert (example.shape == self.tensor_shape)

    @property
    def effective_deltat(self):
        if self.deltat_want is None:
            return (self.sample_length + self.tpad) / self._n_samples
        else:
            return self.deltat_want

    @property
    def effective_tpad(self):
        tpad = self.tpad + self.t_translation_max
        if self.highpass is not None:
            tpad += 0.5 / self.highpass

        return tpad

    def normalize_label(self, label):
        '''label has to be a numpy array'''
        return (label - self._label_median) / self._label_scale

    def denormalize_label(self, label):
        '''label has to be a numpy array'''
        return (label * self._label_scale) + self._label_median

    @property
    def channels(self):
        return self._channels

    @channels.setter
    def channels(self, v):
        if self._channels:
            logger.warn('Setting channels although channels have been \
                    assigned before')
        self._channels = v

    @property
    def n_channels(self):
        return len(self._channels)

    @property
    def output_shapes(self):
        '''Return a tuple containing the shape of feature arrays and number of
        labels.
        '''
        return (self.tensor_shape, self.n_classes)

    @property
    def tensor_shape(self):
        return (self.n_channels, self._n_samples)
Example #20
0
class ScenarioGenerator(LocationGenerator):

    target_generators = List.T(
        TargetGenerator.T(),
        default=[],
        help='Targets to spawn in the scenario.')

    source_generator = SourceGenerator.T(
        default=DCSourceGenerator.D(),
        help='Sources to spawn in the scenario.')

    def __init__(self, **kwargs):
        LocationGenerator.__init__(self, **kwargs)

        for gen in self.target_generators:
            gen.update_hierarchy(self)

        for itry in range(self.ntries):

            try:
                self.get_stations()
                self.get_sources()
                return

            except ScenarioError:
                self.retry()

        raise ScenarioError(
            'could not generate scenario within %i tries' % self.ntries)

    def init_modelling(self, engine):
        self._engine = engine

    def get_engine(self):
        return self._engine

    def get_sources(self):
        return self.source_generator.get_sources()

    def get_events(self):
        return [s.pyrocko_event() for s in self.get_sources()]

    def collect(collector):
        if not callable(collector):
            raise AttributeError('This method should not be called directly.')

        def method(self, *args, **kwargs):
            result = []
            for gen in self.target_generators:
                result.extend(
                    collector(self, *args, **kwargs)(gen, *args, **kwargs))
            return result

        return method

    @collect
    def get_stations(self):
        return lambda gen: gen.get_stations()

    @collect
    def get_waveforms(self, tmin=None, tmax=None):
        return lambda gen, *a, **kw: gen.get_waveforms(
            self._engine, self.get_sources(), *a, **kw)

    @collect
    def get_insar_scenes(self, tmin=None, tmax=None):
        return lambda gen, *a, **kw: gen.get_insar_scenes(
            self._engine, self.get_sources(), *a, **kw)

    @collect
    def get_gnss_campaigns(self, tmin=None, tmax=None):
        return lambda gen, *a, **kw: gen.get_gnss_campaign(
            self._engine, self.get_sources(), *a, **kw)

    @collect
    def dump_data(self, path, tmin=None, tmax=None, overwrite=False):
        self.source_generator.dump_data(path)

        meta_dir = op.join(path, 'meta')
        util.ensuredir(meta_dir)

        model.station.dump_stations(
            self.get_stations(), op.join(meta_dir, 'stations.txt'))
        model.station.dump_kml(
            self.get_stations(), op.join(meta_dir, 'stations.kml'))

        dump_readme(path)

        def dump_data(gen, *a, **kw):
            logger.info('Creating files from %s...' % gen.__class__.__name__)
            return gen.dump_data(self._engine, self.get_sources(), *a, **kw)

        return dump_data

    @collect
    def _get_time_ranges(self):
        return lambda gen: [gen.get_time_range(self.get_sources())]

    def get_time_range(self):
        ranges = num.array(self._get_time_ranges())
        return ranges.min(), ranges.max()

    def get_pile(self, tmin=None, tmax=None):
        p = pile.Pile()

        trf = pile.MemTracesFile(None, self.get_waveforms(tmin, tmax))
        p.add_file(trf)
        return p

    def make_map(self, filename):
        logger.info('Plotting scenario\'s map...')
        try:
            draw_scenario_gmt(self, filename)
        except gmtpy.GMTError:
            logger.warning('GMT threw an error, could not plot map')

    def draw_map(self, fn):
        from pyrocko.plot import automap

        lat, lon = self.get_center_latlon()
        radius = self.get_radius()

        m = automap.Map(
            width=30.,
            height=30.,
            lat=lat,
            lon=lon,
            radius=radius,
            show_topo=True,
            show_grid=True,
            show_rivers=True,
            color_wet=(216, 242, 254),
            color_dry=(238, 236, 230)
            )

        self.source_generator.add_map_artists(m)

        sources = self.get_sources()
        for gen in self.target_generators:
            gen.add_map_artists(self.get_engine(), sources, m)

        # for patch in self.get_insar_patches():
        #     symbol_size = 50.
        #     coords = num.array(patch.get_corner_coordinates())
        #     m.gmt.psxy(in_rows=num.fliplr(coords),
        #                L=True,
        #                *m.jxyr)

        m.save(fn)

    @property
    def stores_wanted(self):
        return set([gen.store_id for gen in self.target_generators
                    if hasattr(gen, 'store_id')])

    @property
    def stores_missing(self):
        return self.stores_wanted - set(self.get_engine().get_store_ids())

    def ensure_gfstores(self, interactive=False):
        if not self.stores_missing:
            return

        from pyrocko.gf import ws

        cfg = config.config()
        if len(cfg.gf_store_superdirs) == 0:
            store_dir = op.expanduser(
                op.join(config.pyrocko_dir_tmpl, 'gf_stores'))
            logger.debug('Creating default gf_store_superdirs: %s' % store_dir)

            util.ensuredir(store_dir)
            cfg.gf_store_superdirs = [store_dir]
            config.write_config(cfg)

        if interactive:
            print('We could not find the following Green\'s function stores:\n'
                  ' %s\n'
                  'We can try to download the stores from '
                  ' http://kinherd.org:8080 into Pyrocko\'s global GF cache.'
                  % '\n'.join(self.stores_missing))
            for idr, dr in enumerate(cfg.gf_store_superdirs):
                print(' %d. %s' % ((idr+1), dr))
            s = input('\nIn which cache directory shall the GF store'
                      ' be downloaded to?\n'
                      'Default 1, (C)ancel: ')
            if s in ['c', 'C']:
                print('Canceled!')
                sys.exit(1)
            elif s == '':
                s = 0
            try:
                s = int(s)
                if s > len(cfg.gf_store_superdirs):
                    raise ValueError
            except ValueError:
                print('Invalid selection: %s' % s)
                sys.exit(1)
        else:
            s = 1

        download_dir = cfg.gf_store_superdirs[s-1]
        logger.info('Downloading Green\'s functions stores to %s'
                    % download_dir)

        for store in self.stores_missing:
            os.chdir(download_dir)
            ws.download_gf_store(site='kinherd', store_id=store)

    @classmethod
    def initialize(
            cls, path,
            center_lat=None, center_lon=None, radius=None,
            targets=AVAILABLE_TARGETS, force=False):
        """Initialize a Scenario and create a ``scenario.yml``

        :param path: Path to create the scenerio in
        :type path: str
        :param center_lat: Center latitude, defaults to None
        :type center_lat: float, optional
        :param center_lon: Center longitude, defaults to None
        :type center_lon: float, optional
        :param radius: Scenario's radius in [m], defaults to None
        :type radius: float, optional
        :param targets: Targets to thow into scenario,
            defaults to AVAILABLE_TARGETS
        :type targets: list of :class:`pyrocko.scenario.ScenarioTargets`,
            optional
        :param force: Overwrite directory, defaults to False
        :type force: bool, optional
        :returns: Scenario
        :rtype: :class:`pyrocko.scenario.ScenarioGenerator`
        """
        import os.path as op

        if op.exists(path) and not force:
            raise CannotCreate('Directory %s alread exists! May use force?'
                               % path)

        util.ensuredir(path)
        fn = op.join(path, 'scenario.yml')
        logger.debug('Writing new scenario to %s' % fn)

        scenario = cls()
        scenario.target_generators.extend([t() for t in targets])

        for gen in scenario.target_generators:
            gen.update_hierarchy(scenario)

        scenario.center_lat = center_lat
        scenario.center_lon = center_lon
        scenario.radius = radius

        scenario.dump(filename=fn)

        return scenario
Example #21
0
class WaveformGenerator(TargetGenerator):

    station_generator = StationGenerator.T(
        default=RandomStationGenerator.D(),
        help='The StationGenerator for creating the stations.')

    noise_generator = WaveformNoiseGenerator.T(
        default=WhiteNoiseGenerator.D(),
        help='Add Synthetic noise on the waveforms.')

    store_id = gf.StringID.T(
        default=DEFAULT_STORE_ID,
        help='The GF store to use for forward-calculations.')

    seismogram_quantity = StringChoice.T(
        choices=['displacement', 'velocity', 'acceleration', 'counts'],
        default='displacement')

    vmin_cut = Float.T(
        default=2000.,
        help='Minimum velocity to seismic velicty to consider in the model.')
    vmax_cut = Float.T(
        default=8000.,
        help='Maximum velocity to seismic velicty to consider in the model.')

    fmin = Float.T(
        default=0.01,
        help='Minimum frequency/wavelength to resolve in the'
             ' synthetic waveforms.')

    tabulated_phases = List.T(
        gf.meta.TPDef.T(), optional=True,
        help='Define seismic phases to be calculated.')

    tabulated_phases_from_store = Bool.T(
        default=False,
        help='Calculate seismic phase arrivals for all travel-time tables '
             'defined in GF store.')

    tabulated_phases_noise_scale = Float.T(
        default=0.0,
        help='Standard deviation of normally distributed noise added to '
             'calculated phase arrivals.')

    taper = trace.Taper.T(
        optional=True,
        help='Time domain taper applied to synthetic waveforms.')

    compensate_synthetic_offsets = Bool.T(
        default=False,
        help='Center synthetic trace amplitudes using mean of waveform tips.')

    tinc = Float.T(
        optional=True,
        help='Time increment of waveforms.')

    continuous = Bool.T(
        default=True,
        help='Only produce traces that intersect with events.')

    def __init__(self, *args, **kwargs):
        super(WaveformGenerator, self).__init__(*args, **kwargs)
        self._targets = []
        self._piles = {}

    def _get_pile(self, path):
        apath = op.abspath(path)
        assert op.isdir(apath)

        if apath not in self._piles:
            fns = util.select_files(
                [apath], show_progress=False)

            p = pile.Pile()
            if fns:
                p.load_files(fns, fileformat='mseed', show_progress=False)

            self._piles[apath] = p

        return self._piles[apath]

    def get_stations(self):
        return self.station_generator.get_stations()

    def get_targets(self):
        if self._targets:
            return self._targets

        for station in self.get_stations():
            channel_data = []
            channels = station.get_channels()
            if channels:
                for channel in channels:
                    channel_data.append([
                        channel.name,
                        channel.azimuth,
                        channel.dip])

            else:
                for c_name in ['BHZ', 'BHE', 'BHN']:
                    channel_data.append([
                        c_name,
                        model.guess_azimuth_from_name(c_name),
                        model.guess_dip_from_name(c_name)])

            for c_name, c_azi, c_dip in channel_data:

                target = gf.Target(
                    codes=(
                        station.network,
                        station.station,
                        station.location,
                        c_name),
                    quantity='displacement',
                    lat=station.lat,
                    lon=station.lon,
                    north_shift=station.north_shift,
                    east_shift=station.east_shift,
                    depth=station.depth,
                    store_id=self.store_id,
                    optimization='enable',
                    interpolation='nearest_neighbor',
                    azimuth=c_azi,
                    dip=c_dip)

                self._targets.append(target)

        return self._targets

    def get_time_range(self, sources):
        dmin, dmax = self.station_generator.get_distance_range(sources)

        times = num.array([source.time for source in sources],
                          dtype=num.float)

        tmin_events = num.min(times)
        tmax_events = num.max(times)

        tmin = tmin_events + dmin / self.vmax_cut - 10.0 / self.fmin
        tmax = tmax_events + dmax / self.vmin_cut + 10.0 / self.fmin

        return tmin, tmax

    def get_codes_to_deltat(self, engine, sources):
        deltats = {}

        targets = self.get_targets()
        for source in sources:
            for target in targets:
                deltats[target.codes] = engine.get_store(
                    target.store_id).config.deltat

        return deltats

    def get_useful_time_increment(self, engine, sources):
        _, dmax = self.station_generator.get_distance_range(sources)
        tinc = dmax / self.vmin_cut + 2.0 / self.fmin

        deltats = set(self.get_codes_to_deltat(engine, sources).values())
        deltat = reduce(util.lcm, deltats)
        tinc = int(round(tinc / deltat)) * deltat
        return tinc

    def get_relevant_sources(self, sources, tmin, tmax):
        dmin, dmax = self.station_generator.get_distance_range(sources)
        trange = tmax - tmin
        tmax_pad = trange + tmax + dmin / self.vmax_cut
        tmin_pad = tmin - (dmax / self.vmin_cut + trange)

        return [s for s in sources if s.time < tmax_pad and s.time > tmin_pad]

    def get_waveforms(self, engine, sources, tmin, tmax):

        sources_relevant = self.get_relevant_sources(sources, tmin, tmax)
        if not (self.continuous or sources_relevant):
            return []

        trs = {}
        tts = util.time_to_str

        for nslc, deltat in self.get_codes_to_deltat(engine, sources).items():
            tr_tmin = int(round(tmin / deltat)) * deltat
            tr_tmax = (int(round(tmax / deltat))-1) * deltat
            nsamples = int(round((tr_tmax - tr_tmin) / deltat)) + 1

            tr = trace.Trace(
                *nslc,
                tmin=tr_tmin,
                ydata=num.zeros(nsamples),
                deltat=deltat)

            self.noise_generator.add_noise(tr)

            trs[nslc] = tr

        logger.debug('Forward modelling waveforms between %s - %s...'
                     % (tts(tmin, format='%Y-%m-%d_%H-%M-%S'),
                        tts(tmax, format='%Y-%m-%d_%H-%M-%S')))

        if not sources_relevant:
            return list(trs.values())

        targets = self.get_targets()
        response = engine.process(sources_relevant, targets)
        for source, target, res in response.iter_results(
                get='results'):

            if isinstance(res, gf.SeismosizerError):
                logger.warning(
                    'Out of bounds! \nTarget: %s\nSource: %s\n' % (
                        '.'.join(target.codes)), source)
                continue

            tr = res.trace.pyrocko_trace()

            candidate = trs[target.codes]
            if not candidate.overlaps(tr.tmin, tr.tmax):
                continue

            if self.compensate_synthetic_offsets:
                tr.ydata -= (num.mean(tr.ydata[-3:-1]) +
                             num.mean(tr.ydata[1:3])) / 2.

            if self.taper:
                tr.taper(self.taper)

            resp = self.get_transfer_function(target.codes)
            if resp:
                tr = tr.transfer(transfer_function=resp)

            candidate.add(tr)
            trs[target.codes] = candidate

        return list(trs.values())

    def get_onsets(self, engine, sources, *args, **kwargs):

        targets = {t.codes[:3]: t for t in self.get_targets()}

        markers = []
        for source in sources:
            ev = source.pyrocko_event()
            markers.append(EventMarker(ev))
            for nsl, target in targets.items():
                store = engine.get_store(target.store_id)
                if self.tabulated_phases:
                    tabulated_phases = self.tabulated_phases

                elif self.tabulated_phases_from_store:
                    tabulated_phases = store.config.tabulated_phases
                else:
                    tabulated_phases = []

                for phase in tabulated_phases:
                    t = store.t(phase.id, source, target)
                    if not t:
                        continue

                    noise_scale = self.tabulated_phases_noise_scale
                    if noise_scale != 0.0:
                        t += num.random.normal(scale=noise_scale)

                    t += source.time
                    markers.append(
                        PhaseMarker(
                            phasename=phase.id,
                            tmin=t,
                            tmax=t,
                            event=source.pyrocko_event(),
                            nslc_ids=(nsl+('*',),)
                            )
                        )
        return markers

    def get_transfer_function(self, codes):
        if self.seismogram_quantity == 'displacement':
            return None
        elif self.seismogram_quantity == 'velocity':
            return trace.DifferentiationResponse(1)
        elif self.seismogram_quantity == 'acceleration':
            return trace.DifferentiationResponse(2)
        elif self.seismogram_quantity == 'counts':
            raise NotImplementedError()

    def ensure_data(self, engine, sources, path, tmin=None, tmax=None):
        self.ensure_waveforms(engine, sources, path, tmin, tmax)
        self.ensure_responses(path)

    def ensure_waveforms(self, engine, sources, path, tmin=None, tmax=None):

        path_waveforms = op.join(path, 'waveforms')
        util.ensuredir(path_waveforms)

        p = self._get_pile(path_waveforms)

        nslc_ids = set(target.codes for target in self.get_targets())

        def have_waveforms(tmin, tmax):
            trs_have = p.all(
                tmin=tmin, tmax=tmax,
                load_data=False, degap=False,
                trace_selector=lambda tr: tr.nslc_id in nslc_ids)

            return any(tr.data_len() > 0 for tr in trs_have)

        def add_files(paths):
            p.load_files(paths, fileformat='mseed', show_progress=False)

        path_traces = op.join(
            path_waveforms,
            '%(wmin_year)s',
            '%(wmin_month)s',
            '%(wmin_day)s',
            'waveform_%(network)s_%(station)s_' +
            '%(location)s_%(channel)s_%(tmin)s_%(tmax)s.mseed')

        tmin_all, tmax_all = self.get_time_range(sources)
        tmin = tmin if tmin is not None else tmin_all
        tmax = tmax if tmax is not None else tmax_all
        tts = util.time_to_str

        tinc = self.tinc or self.get_useful_time_increment(engine, sources)
        tmin = math.floor(tmin / tinc) * tinc
        tmax = math.ceil(tmax / tinc) * tinc

        nwin = int(round((tmax - tmin) / tinc))

        pbar = None
        for iwin in range(nwin):
            tmin_win = tmin + iwin*tinc
            tmax_win = tmin + (iwin+1)*tinc

            if have_waveforms(tmin_win, tmax_win):
                continue

            if pbar is None:
                pbar = util.progressbar('Generating waveforms', (nwin-iwin))

            pbar.update(iwin)

            trs = self.get_waveforms(engine, sources, tmin_win, tmax_win)

            try:
                wpaths = io.save(
                    trs, path_traces,
                    additional=dict(
                        wmin_year=tts(tmin_win, format='%Y'),
                        wmin_month=tts(tmin_win, format='%m'),
                        wmin_day=tts(tmin_win, format='%d'),
                        wmin=tts(tmin_win, format='%Y-%m-%d_%H-%M-%S'),
                        wmax_year=tts(tmax_win, format='%Y'),
                        wmax_month=tts(tmax_win, format='%m'),
                        wmax_day=tts(tmax_win, format='%d'),
                        wmax=tts(tmax_win, format='%Y-%m-%d_%H-%M-%S')))

                for wpath in wpaths:
                    logger.debug('Generated file: %s' % wpath)

                add_files(wpaths)

            except FileSaveError as e:
                raise ScenarioError(str(e))

        if pbar is not None:
            pbar.finish()

    def ensure_responses(self, path):
        from pyrocko.io import stationxml

        path_responses = op.join(path, 'meta')
        util.ensuredir(path_responses)

        fn_stationxml = op.join(path_responses, 'stations.xml')
        if op.exists(fn_stationxml):
            return

        logger.debug('Writing waveform meta information to StationXML...')

        stations = self.station_generator.get_stations()
        sxml = stationxml.FDSNStationXML.from_pyrocko_stations(stations)

        sunit = {
            'displacement': 'M',
            'velocity': 'M/S',
            'acceleration': 'M/S**2',
            'counts': 'COUNTS'}[self.seismogram_quantity]

        response = stationxml.Response(
            instrument_sensitivity=stationxml.Sensitivity(
                value=1.,
                frequency=1.,
                input_units=stationxml.Units(sunit),
                output_units=stationxml.Units('COUNTS')),
            stage_list=[])

        for net, station, channel in sxml.iter_network_station_channels():
            channel.response = response

        sxml.dump_xml(filename=fn_stationxml)

    def add_map_artists(self, engine, sources, automap):
        automap.add_stations(self.get_stations())
Example #22
0
class Timings(Object):
    timings = List.T(CakeTiming.T())

    def __init__(self, timings):
        self.timings = timings
Example #23
0
class DatasetConfig(HasPaths):
    ''' Configuration for a Grond `Dataset`  object. '''

    stations_path = Path.T(
        optional=True,
        help='List of files with station coordinates in Pyrocko format.')
    stations_stationxml_paths = List.T(
        Path.T(),
        optional=True,
        help='List of files with station coordinates in StationXML format.')
    events_path = Path.T(optional=True,
                         help='File with hypocenter information and possibly'
                         ' reference solution')
    waveform_paths = List.T(Path.T(),
                            optional=True,
                            help='List of directories with raw waveform data')
    clippings_path = Path.T(optional=True)
    responses_sacpz_path = Path.T(
        optional=True,
        help='List of SACPZ response files for restitution of'
        ' the raw waveform data.')
    responses_stationxml_paths = List.T(
        Path.T(),
        optional=True,
        help='List of StationXML response files for restitution of'
        ' the raw waveform data.')
    station_corrections_path = Path.T(
        optional=True, help='File containing station correction informations.')
    apply_correction_factors = Bool.T(
        optional=True,
        default=True,
        help='Apply correction factors from station corrections.')
    apply_correction_delays = Bool.T(
        optional=True,
        default=True,
        help='Apply correction delays from station corrections.')
    apply_displaced_sampling_workaround = Bool.T(
        optional=True,
        default=False,
        help='Work around displaced sampling issues.')
    extend_incomplete = Bool.T(default=False,
                               help='Extend incomplete seismic traces.')
    picks_paths = List.T(Path.T())
    blacklist_paths = List.T(
        Path.T(), help='List of text files with blacklisted stations.')
    blacklist = List.T(
        String.T(),
        help='Stations/components to be excluded according to their STA, '
        'NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes.')
    whitelist_paths = List.T(
        Path.T(), help='List of text files with whitelisted stations.')
    whitelist = List.T(
        String.T(),
        optional=True,
        help='If not None, list of stations/components to include according '
        'to their STA, NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes. '
        'Note: '
        'when whitelisting on channel level, both, the raw and '
        'the processed channel codes have to be listed.')
    synthetic_test = SyntheticTest.T(optional=True)

    kite_scene_paths = List.T(Path.T(), optional=True)

    gnss_campaign_paths = List.T(Path.T(), optional=True)

    def __init__(self, *args, **kwargs):
        HasPaths.__init__(self, *args, **kwargs)
        self._ds = {}

    def get_event_names(self):
        logger.info('Loading events ...')

        def extra(path):
            return expand_template(path, dict(event_name='*'))

        def fp(path):
            return self.expand_path(path, extra=extra)

        def check_events(events, fn):
            for ev in events:
                if not ev.name:
                    logger.warning('Event in %s has no name!', fn)
                    return
                if not ev.lat or not ev.lon:
                    logger.warning('Event %s has inconsistent coordinates!',
                                   ev.name)
                if not ev.depth:
                    logger.warning('Event %s has no depth!', ev.name)
                if not ev.time:
                    logger.warning('Event %s has no time!', ev.name)

        events = []
        events_path = fp(self.events_path)
        fns = glob.glob(events_path)
        if not fns:
            raise DatasetError('No event files matching "%s".' % events_path)

        for fn in fns:
            logger.debug('Loading from file %s' % fn)
            ev = model.load_events(filename=fn)
            check_events(ev, fn)

            events.extend(ev)

        event_names = [ev.name for ev in events]
        event_names.sort()
        return event_names

    def get_dataset(self, event_name):
        if event_name not in self._ds:

            def extra(path):
                return expand_template(path, dict(event_name=event_name))

            def fp(path):
                p = self.expand_path(path, extra=extra)
                if p is None:
                    return None

                if isinstance(p, list):
                    for path in p:
                        if not op.exists(path):
                            logger.warn('Path %s does not exist.' % path)
                else:
                    if not op.exists(p):
                        logger.warn('Path %s does not exist.' % p)

                return p

            ds = Dataset(event_name)
            try:
                ds.add_events(filename=fp(self.events_path))

                ds.add_stations(
                    pyrocko_stations_filename=fp(self.stations_path),
                    stationxml_filenames=fp(self.stations_stationxml_paths))

                if self.waveform_paths:
                    ds.add_waveforms(paths=fp(self.waveform_paths))

                if self.kite_scene_paths:
                    ds.add_kite_scenes(paths=fp(self.kite_scene_paths))

                if self.gnss_campaign_paths:
                    ds.add_gnss_campaigns(paths=fp(self.gnss_campaign_paths))

                if self.clippings_path:
                    ds.add_clippings(markers_filename=fp(self.clippings_path))

                if self.responses_sacpz_path:
                    ds.add_responses(
                        sacpz_dirname=fp(self.responses_sacpz_path))

                if self.responses_stationxml_paths:
                    ds.add_responses(stationxml_filenames=fp(
                        self.responses_stationxml_paths))

                if self.station_corrections_path:
                    ds.add_station_corrections(
                        filename=fp(self.station_corrections_path))

                ds.apply_correction_factors = self.apply_correction_factors
                ds.apply_correction_delays = self.apply_correction_delays
                ds.apply_displaced_sampling_workaround = \
                    self.apply_displaced_sampling_workaround
                ds.extend_incomplete = self.extend_incomplete

                for picks_path in self.picks_paths:
                    ds.add_picks(filename=fp(picks_path))

                ds.add_blacklist(self.blacklist)
                ds.add_blacklist(filenames=fp(self.blacklist_paths))
                if self.whitelist:
                    ds.add_whitelist(self.whitelist)
                if self.whitelist_paths:
                    ds.add_whitelist(filenames=fp(self.whitelist_paths))

                ds.set_synthetic_test(copy.deepcopy(self.synthetic_test))
                self._ds[event_name] = ds
            except (FileLoadError, OSError) as e:
                raise DatasetError(str(e))

        return self._ds[event_name]
Example #24
0
class EventMarkerList(Object):
    xmltagname = 'eventmarkerlist'
    events = List.T(XMLEventMarker.T())
Example #25
0
class ToyProblem(Problem):
    problem_parameters = [
        Parameter('north', 'm', label='North'),
        Parameter('east', 'm', label='East'),
        Parameter('depth', 'm', label='Depth')
    ]

    ranges = Dict.T(String.T(), gf.Range.T())

    targets = List.T(ToyTarget.T())
    base_source = ToySource.T()

    def __init__(self, **kwargs):
        Problem.__init__(self, **kwargs)
        self._xtargets = None
        self._obs_distances = None

    def pack(self, source):
        return num.array([source.north, source.east, source.depth],
                         dtype=num.float)

    def _setup_modelling(self):
        if self._xtargets is None:
            self._xtargets = num.array([(t.north, t.east, t.depth)
                                        for t in self.targets],
                                       dtype=num.float)

            self._obs_distances = num.array(
                [t.obs_distance for t in self.targets], dtype=num.float)

    def misfits(self, x, mask=None):
        self._setup_modelling()
        distances = num.sqrt(
            num.sum((x[num.newaxis, :] - self._xtargets)**2, axis=1))

        misfits = num.zeros((self.ntargets, 2))
        misfits[:, 0] = num.abs(distances - self._obs_distances)
        misfits[:, 1] = num.ones(self.ntargets) \
            * num.mean(num.abs(self._obs_distances))
        return misfits

    def misfits_many(self, xs):
        self._setup_modelling()
        distances = num.sqrt(
            num.sum(
                (xs[:, num.newaxis, :] - self._xtargets[num.newaxis, :])**2,
                axis=2))

        misfits = num.zeros((xs.shape[0], self.ntargets, 2))

        misfits[:, :,
                0] = num.abs(distances - self._obs_distances[num.newaxis, :])

        misfits[:, :, 1] = num.mean(num.abs(self._obs_distances))

        return misfits

    def xref(self):
        base_source = self.base_source
        return num.array(
            [base_source.north, base_source.east, base_source.depth])

    def extract(self, xs, i):
        if xs.ndim == 1:
            return self.extract(xs[num.newaxis, :], i)[0]

        if i < self.nparameters:
            return xs[:, i]
        else:
            return self.make_dependant(
                xs, self.dependants[i - self.nparameters].name)
Example #26
0
class StationMarkerList(Object):
    xmltagname = 'stationmarkerlist'
    stations = List.T(XMLStationMarker.T())
Example #27
0
class dict_stats(Object):
    """
    Dict for all stations + their flat freq ranges
    """
    FlatFreqRanges = Dict.T(String.T(), List.T(Tuple.T(2, Float.T())))
    MeanMedianR_FlatRanges = Dict.T(String.T(), List.T(Float.T()))
Example #28
0
class Event(Object):
    public_id = ResourceReference.T(
        xmlstyle='attribute', xmltagname='publicID')
    description_list = List.T(EventDescription.T())
    comment_list = List.T(Comment.T())
    focal_mechanism_list = List.T(FocalMechanism.T())
    amplitude_list = List.T(Amplitude.T())
    magnitude_list = List.T(Magnitude.T())
    station_magnitude_list = List.T(StationMagnitude.T())
    origin_list = List.T(Origin.T())
    pick_list = List.T(Pick.T())
    preferred_origin_id = ResourceReference.T(
        optional=True, xmltagname='preferredOriginID')
    preferred_magnitude_id = ResourceReference.T(
        optional=True, xmltagname='preferredMagnitudeID')
    preferred_focal_mechanism_id = ResourceReference.T(
        optional=True, xmltagname='preferredFocalMechanismID')
    type = EventType.T(
        optional=True)
    type_certainty = EventTypeCertainty.T(
        optional=True)
    creation_info = CreationInfo.T(
        optional=True)
    region = Region.T(
        optional=True)

    def pyrocko_phase_markers(self):
        event = self.pyrocko_event()
        return [p.pyrocko_phase_marker(event=event) for p in self.pick_list]

    def pyrocko_event(self):
        '''
        Convert into Pyrocko event object.

        Considers only the *preferred* origin, magnitude, and moment tensor.
        '''

        if not self.preferred_origin:
            raise NoPreferredOriginSet()

        ev = self.preferred_origin.pyrocko_event()

        foc_mech = self.preferred_focal_mechanism
        if not foc_mech and self.focal_mechanism_list:
            foc_mech = self.focal_mechanism_list[0]
            if len(self.focal_mechanism_list) > 1:
                logger.warn(
                    'Event %s: No preferred focal mechanism set, '
                    'more than one available, using first' % ev.name)

        if foc_mech and foc_mech.moment_tensor_list:
            ev.moment_tensor = \
                foc_mech.moment_tensor_list[0].pyrocko_moment_tensor()

            if len(foc_mech.moment_tensor_list) > 1:
                logger.warn(
                    'more than one moment tensor available, using first')

        mag = None
        pref_mag = self.preferred_magnitude
        if pref_mag:
            mag = pref_mag
        elif self.magnitude_list:
            mag = self.magnitude_list[0]
            if len(self.magnitude_list) > 1:
                logger.warn(
                    'Event %s: No preferred magnitude set, '
                    'more than one available, using first' % ev.name)

        if mag:
            ev.magnitude = mag.mag.value
            ev.magnitude_type = mag.type

        ev.region = self.get_effective_region()

        return ev

    def get_effective_region(self):
        if self.region:
            return self.region

        for desc in self.description_list:
            if desc.type in ('Flinn-Engdahl region', 'region name'):
                return desc.text

        return None

    @property
    def preferred_origin(self):
        return one_element_or_none(
            [x for x in self.origin_list
             if x.public_id == self.preferred_origin_id])

    @property
    def preferred_magnitude(self):
        return one_element_or_none(
            [x for x in self.magnitude_list
             if x.public_id == self.preferred_magnitude_id])

    @property
    def preferred_focal_mechanism(self):
        return one_element_or_none(
            [x for x in self.focal_mechanism_list
             if x.public_id == self.preferred_focal_mechanism_id])
Example #29
0
 class B(Object):
     a_list = List.T(A.T())
     a_tuple = Tuple.T(3, A.T())
     a_dict = Dict.T(Int.T(), A.T())
     b = Float.T()
Example #30
0
    def testOptionalDefault(self):

        from pyrocko.guts_array import Array, array_equal
        import numpy as num
        assert_ae = num.testing.assert_almost_equal

        def array_equal_noneaware(a, b):
            if a is None:
                return b is None
            elif b is None:
                return a is None
            else:
                return array_equal(a, b)

        data = [
            ('a', Int.T(), [None, 0, 1, 2], ['aerr', 0, 1, 2]),
            ('b', Int.T(optional=True), [None, 0, 1, 2], [None, 0, 1, 2]),
            ('c', Int.T(default=1), [None, 0, 1, 2], [1, 0, 1, 2]),
            ('d', Int.T(default=1, optional=True), [None, 0, 1,
                                                    2], [1, 0, 1, 2]),
            ('e', List.T(Int.T()), [None, [], [1], [2]], [[], [], [1], [2]]),
            ('f', List.T(Int.T(), optional=True), [None, [], [1],
                                                   [2]], [None, [], [1], [2]]),
            ('g', List.T(Int.T(), default=[1]), [None, [], [1], [2]], [[1], [],
                                                                       [1],
                                                                       [2]]),
            ('h', List.T(Int.T(), default=[1],
                         optional=True), [None, [], [1], [2]], [[1], [], [1],
                                                                [2]]),
            ('i', Tuple.T(2, Int.T()), [None, (1, 2)], ['err', (1, 2)]),
            ('j', Tuple.T(2, Int.T(),
                          optional=True), [None, (1, 2)], [None, (1, 2)]),
            ('k', Tuple.T(2, Int.T(),
                          default=(1, 2)), [None, (1, 2), (3, 4)], [(1, 2),
                                                                    (1, 2),
                                                                    (3, 4)]),
            ('l', Tuple.T(2, Int.T(), default=(1, 2),
                          optional=True), [None, (1, 2), (3, 4)], [(1, 2),
                                                                   (1, 2),
                                                                   (3, 4)]),
            ('i2', Tuple.T(None, Int.T()), [None, (1, 2)], [(), (1, 2)]),
            ('j2', Tuple.T(None, Int.T(),
                           optional=True), [None, (),
                                            (3, 4)], [None, (), (3, 4)]),
            ('k2', Tuple.T(None, Int.T(), default=(1, )), [None, (),
                                                           (3, 4)], [(1, ), (),
                                                                     (3, 4)]),
            ('l2', Tuple.T(None, Int.T(), default=(1, ),
                           optional=True), [None, (), (3, 4)], [(1, ), (),
                                                                (3, 4)]),
            ('m', Array.T(shape=(None, ), dtype=num.int, serialize_as='list'),
             [num.arange(0), num.arange(2)], [num.arange(0),
                                              num.arange(2)]),
            ('n',
             Array.T(shape=(None, ),
                     dtype=num.int,
                     serialize_as='list',
                     optional=True), [None, num.arange(0),
                                      num.arange(2)],
             [None, num.arange(0), num.arange(2)]),
            ('o',
             Array.T(shape=(None, ),
                     dtype=num.int,
                     serialize_as='list',
                     default=num.arange(2)),
             [None, num.arange(0),
              num.arange(2), num.arange(3)
              ], [num.arange(2),
                  num.arange(0),
                  num.arange(2),
                  num.arange(3)]),
            ('p',
             Array.T(shape=(None, ),
                     dtype=num.int,
                     serialize_as='list',
                     default=num.arange(2),
                     optional=True),
             [None, num.arange(0),
              num.arange(2), num.arange(3)
              ], [num.arange(2),
                  num.arange(0),
                  num.arange(2),
                  num.arange(3)]),
            ('q', Dict.T(String.T(), Int.T()), [None, {}, {
                'a': 1
            }], [{}, {}, {
                'a': 1
            }]),
            ('r', Dict.T(String.T(), Int.T(),
                         optional=True), [None, {}, {
                             'a': 1
                         }], [None, {}, {
                             'a': 1
                         }]),
            ('s', Dict.T(String.T(), Int.T(),
                         default={'a': 1}), [None, {}, {
                             'a': 1
                         }], [{
                             'a': 1
                         }, {}, {
                             'a': 1
                         }]),
            ('t', Dict.T(String.T(), Int.T(), default={'a': 1},
                         optional=True), [None, {}, {
                             'a': 1
                         }], [{
                             'a': 1
                         }, {}, {
                             'a': 1
                         }]),
        ]

        for k, t, vals, exp, in data:
            last = [None]

            class A(Object):
                def __init__(self, **kwargs):
                    last[0] = len(kwargs)
                    Object.__init__(self, **kwargs)

                v = t

            A.T.class_signature()

            for v, e in zip(vals, exp):
                if isinstance(e, str) and e == 'aerr':
                    with self.assertRaises(ArgumentError):
                        if v is not None:
                            a1 = A(v=v)
                        else:
                            a1 = A()

                    continue
                else:
                    if v is not None:
                        a1 = A(v=v)
                    else:
                        a1 = A()

                if isinstance(e, str) and e == 'err':
                    with self.assertRaises(ValidationError):
                        a1.validate()
                else:
                    a1.validate()
                    a2 = load_string(dump(a1))
                    if isinstance(e, num.ndarray):
                        assert last[0] == int(not (array_equal_noneaware(
                            t.default(), a1.v) and t.optional))
                        assert_ae(a1.v, e)
                        assert_ae(a1.v, e)
                    else:
                        assert last[0] == int(not (
                            t.default() == a1.v and t.optional))
                        self.assertEqual(a1.v, e)
                        self.assertEqual(a2.v, e)