Ejemplo n.º 1
0
class BaseProbabilisticRupture(with_metaclass(abc.ABCMeta, Rupture)):
    """
    Base class for a probabilistic rupture, that is a :class:`Rupture`
    associated with a temporal occurrence model defining probability of
    rupture occurrence in a certain time span.
    """
    @abc.abstractmethod
    def get_probability_no_exceedance(self, poes):
        """
        Compute and return the probability that in the time span for which the
        rupture is defined, the rupture itself never generates a ground motion
        value higher than a given level at a given site.

        Such calculation is performed starting from the conditional probability
        that an occurrence of the current rupture is producing a ground motion
        value higher than the level of interest at the site of interest.

        The actual formula used for such calculation depends on the temporal
        occurrence model the rupture is associated with.

        The calculation can be performed for multiple intensity measure levels
        and multiple sites in a vectorized fashion.

        :param poes:
            2D numpy array containing conditional probabilities the the a
            rupture occurrence causes a ground shaking value exceeding a
            ground motion level at a site. First dimension represent sites,
            second dimension intensity measure levels. ``poes`` can be obtained
            calling the :meth:`method
            <openquake.hazardlib.gsim.base.GroundShakingIntensityModel.get_poes>`.
        """

    @abc.abstractmethod
    def sample_number_of_occurrences(self):
        """
Ejemplo n.º 2
0
class _IMT(with_metaclass(IMTMeta, tuple)):
    """
    Base class for intensity measure type.

    Subclasses may define class attribute ``_fields`` as a tuple with names
    of parameters the specific intensity measure type requires (if there
    are any).
    """
    _fields = ()

    def __new__(cls, sa_period=None, sa_damping=None):
        return tuple.__new__(cls, (cls.__name__, sa_period, sa_damping))

    def __getnewargs__(self):
        return tuple(getattr(self, field) for field in self._fields)

    def __str__(self):
        if self[0] == 'SA':
            return 'SA(%s)' % self[1]
        return self[0]

    def __lt__(self, other):
        return str(self) < str(other)

    def __repr__(self):
        return '%s(%s)' % (type(self).__name__, ', '.join(
            '%s=%s' % (field, getattr(self, field))
            for field in type(self)._fields))
Ejemplo n.º 3
0
class BaseMSR(with_metaclass(abc.ABCMeta)):
    """
    A base class for Magnitude-Area Scaling Relationship.
    Allows calculation of rupture area from magnitude.
    """

    @abc.abstractmethod
    def get_median_area(self, mag, rake):
        """
        Return median area (in square km) from magnitude ``mag`` and ``rake``.

        To be overridden by subclasses.

        :param mag:
            Moment magnitude (Mw).
        :param rake:
            Rake angle (the rupture propagation direction) in degrees,
            from -180 to 180.
        """

    def __eq__(self, other):
        """
        Two instances of the same class are considered equal
        """
        return self.__class__ is other.__class__

    def __repr__(self):
        """
        Returns the name of the class
        """
        return "<%s>" % self.__class__.__name__
Ejemplo n.º 4
0
class SMTimeSeriesReader(with_metaclass(abc.ABCMeta)):
    """
    Abstract base class for a reader of a ground motion time series
    """
    def __init__(self, input_files, folder_name=None, units="cm/s/s"):
        """
        Instantiate and conduct folder checks
        """
        self.input_files = []
        for fname in input_files:
            if folder_name:
                filename = os.path.join(folder_name, fname)
                if os.path.exists(filename):
                    self.input_files.append(filename)
            else:
                if os.path.exists(fname):
                    self.input_files.append(fname)
        self.time_step = None
        self.number_steps = None
        self.units = units
        self.metadata = None

    @abc.abstractmethod
    def parse_records(self, record=None):
        """
Ejemplo n.º 5
0
class LiteralNode(with_metaclass(MetaLiteralNode, Node)):
    """
    Subclasses should define a non-empty dictionary of validators.
    """
    validators = {}  # to be overridden in subclasses

    def __init__(self,
                 fulltag,
                 attrib=None,
                 text=None,
                 nodes=None,
                 lineno=None):
        validators = self.__class__.validators
        tag = striptag(fulltag)
        if tag in validators:
            # try to cast the node, if the tag is known
            assert not nodes, 'You cannot cast a composite node: %s' % nodes
            try:
                text = validators[tag](text, **attrib)
                assert text is not None
            except Exception as exc:
                raise ValueError('Could not convert %s->%s: %s, line %s' %
                                 (tag, validators[tag].__name__, exc, lineno))
        elif attrib:
            # cast the attributes
            for n, v in attrib.items():
                if n in validators:
                    try:
                        attrib[n] = validators[n](v)
                    except Exception as exc:
                        raise ValueError(
                            'Could not convert %s->%s: %s, line %s' %
                            (n, validators[n].__name__, exc, lineno))
        super(LiteralNode, self).__init__(fulltag, attrib, text, nodes, lineno)
Ejemplo n.º 6
0
class BaseMSRSigma(with_metaclass(abc.ABCMeta, BaseMSR)):
    """
    Extends :class:`BaseMSR` and allows to include uncertainties (sigma) in
    rupture area estimation.
    """
    @abc.abstractmethod
    def get_std_dev_area(self, mag, rake):
        """
Ejemplo n.º 7
0
class BaseCorrelationModel(with_metaclass(abc.ABCMeta)):
    """
    Base class for correlation models for spatially-distributed ground-shaking
    intensities.
    """

    @abc.abstractmethod
    def get_lower_triangle_correlation_matrix(self, sites, imt):
        """
        Get lower-triangle matrix as a result of Cholesky-decomposition
        of correlation matrix.

        The resulting matrix should have zeros on values above
        the main diagonal.

        The actual implementations of :class:`BaseCorrelationModel` interface
        might calculate the matrix considering site collection and IMT (like
        :class:`JB2009CorrelationModel` does) or might have it pre-constructed
        for a specific site collection and IMT, in which case they will need
        to make sure that parameters to this function match parameters that
        were used to pre-calculate decomposed correlation matrix.

        :param sites:
            :class:`~openquake.hazardlib.site.SiteCollection` to create
            correlation matrix for.
        :param imt:
            Intensity measure type object, see :mod:`openquake.hazardlib.imt`.
        """

    def apply_correlation(self, sites, imt, residuals):
        """
        Apply correlation to randomly sampled residuals.

        :param sites:
            :class:`~openquake.hazardlib.site.SiteCollection` residuals were
            sampled for.
        :param imt:
            Intensity measure type object, see :mod:`openquake.hazardlib.imt`.
        :param residuals:
            2d numpy array of sampled residuals, where first dimension
            represents sites (the length as ``sites`` parameter) and
            second one represents different realizations (samples).
        :returns:
            Array of the same structure and semantics as ``residuals``
            but with correlations applied.
        """
        # intra-event residual for a single relization is a product
        # of lower-triangle decomposed correlation matrix and vector
        # of N random numbers (where N is equal to number of sites).
        # we need to do that multiplication once per realization
        # with the same matrix and different vectors.
        corma = self.get_lower_triangle_correlation_matrix(sites, imt)
        return numpy.dot(corma, residuals)
Ejemplo n.º 8
0
class BaseContext(with_metaclass(abc.ABCMeta)):
    """
    Base class for context object.
    """
    def __eq__(self, other):
        """
        Return True if ``other`` has same attributes with same values.
        """
        if isinstance(other, self.__class__):
            if self._slots_ == other._slots_:
                self_other = [
                    numpy.all(
                        getattr(self, s, None) == getattr(other, s, None))
                    for s in self._slots_
                ]
                return numpy.all(self_other)
        return False
Ejemplo n.º 9
0
class SMDatabaseReader(with_metaclass(abc.ABCMeta)):
    """
    Abstract base class for strong motion database parser
    """
    def __init__(self, db_id, db_name, filename, record_folder=None):
        """
        Instantiate and conduct folder checks
        """
        self.id = db_id
        self.name = db_name
        self.filename = filename
        self.database = None
        if record_folder:
            self.record_folder = record_folder
        else:
            self.record_folder = self.filename

    @abc.abstractmethod
    def parse(self):
        """
Ejemplo n.º 10
0
class SMSpectraReader(with_metaclass(abc.ABCMeta)):
    """
    Abstract Base Class for a reader of a ground motion spectra record
    """
    def __init__(self, input_files, folder_name=None):
        """
        Intantiate with basic file checks
        """
        self.input_files = []
        for fname in input_files:
            if folder_name:
                filename = os.path.join(folder_name, fname)
                if os.path.exists(filename):
                    self.input_files.append(filename)
            else:
                if os.path.exists(fname):
                    self.input_files.append(fname)

    @abc.abstractmethod
    def parse_spectra(self):
        """
Ejemplo n.º 11
0
class BaseASR(with_metaclass(abc.ABCMeta)):
    """
    A base class for Area-Magnitude Scaling Relationship.
    Allows calculation of rupture magnitude from area.
    """
    @abc.abstractmethod
    def get_median_mag(self, area, rake):
        """
        Return median magnitude (Mw) given the area and rake.

        :param area:
            Area in square km.
        :param rake:
            Rake angle (the rupture propagation direction) in degrees,
            from -180 to 180.
        """

    def __repr__(self):
        """
        Returns the name of the class
        """
        return "<%s>" % self.__class__.__name__
Ejemplo n.º 12
0
class ParametricSeismicSource(with_metaclass(abc.ABCMeta, BaseSeismicSource)):
    """
    Parametric Seismic Source generates earthquake ruptures from source
    parameters, and associated probabilities of occurrence are defined through
    a magnitude frequency distribution and a temporal occurrence model.

    :param mfd:
        Magnitude-Frequency distribution for the source.
        See :mod:`openquake.hazardlib.mfd`.
    :param rupture_mesh_spacing:
        The desired distance between two adjacent points in source's
        ruptures' mesh, in km. Mainly this parameter allows to balance
        the trade-off between time needed to compute the :meth:`distance
        <openquake.hazardlib.geo.surface.base.BaseSurface.get_min_distance>`
        between the rupture surface and a site and the precision of that
        computation.
    :param magnitude_scaling_relationship:
        Instance of subclass of
        :class:`openquake.hazardlib.scalerel.base.BaseMSR` to
        describe how does the area of the rupture depend on magnitude and rake.
    :param rupture_aspect_ratio:
        Float number representing how much source's ruptures are more wide
        than tall. Aspect ratio of 1 means ruptures have square shape,
        value below 1 means ruptures stretch vertically more than horizontally
        and vice versa.
    :param temporal_occurrence_model:
        Instance of
        :class:`openquake.hazardlib.tom.PoissonTOM` defining temporal
        occurrence model for calculating rupture occurrence probabilities

    :raises ValueError:
        If either rupture aspect ratio or rupture mesh spacing is not positive
        (if not None).
    """

    _slots_ = BaseSeismicSource._slots_ + '''mfd rupture_mesh_spacing
    magnitude_scaling_relationship rupture_aspect_ratio
    temporal_occurrence_model'''.split()

    def __init__(self, source_id, name, tectonic_region_type, mfd,
                 rupture_mesh_spacing, magnitude_scaling_relationship,
                 rupture_aspect_ratio, temporal_occurrence_model):
        super().__init__(source_id, name, tectonic_region_type)

        if rupture_mesh_spacing is not None and not rupture_mesh_spacing > 0:
            raise ValueError('rupture mesh spacing must be positive')

        if rupture_aspect_ratio is not None and not rupture_aspect_ratio > 0:
            raise ValueError('rupture aspect ratio must be positive')

        self.mfd = mfd
        self.rupture_mesh_spacing = rupture_mesh_spacing
        self.magnitude_scaling_relationship = magnitude_scaling_relationship
        self.rupture_aspect_ratio = rupture_aspect_ratio
        self.temporal_occurrence_model = temporal_occurrence_model

    def get_annual_occurrence_rates(self, min_rate=0):
        """
        Get a list of pairs "magnitude -- annual occurrence rate".

        The list is taken from assigned MFD object
        (see :meth:`openquake.hazardlib.mfd.base.BaseMFD.get_annual_occurrence_rates`)
        with simple filtering by rate applied.

        :param min_rate:
            A non-negative value to filter magnitudes by minimum annual
            occurrence rate. Only magnitudes with rates greater than that
            are included in the result list.
        :returns:
            A list of two-item tuples -- magnitudes and occurrence rates.
        """
        return [(mag, occ_rate)
                for (mag, occ_rate) in self.mfd.get_annual_occurrence_rates()
                if min_rate is None or occ_rate > min_rate]

    def get_min_max_mag(self):
        """
        Get the minimum and maximum magnitudes of the ruptures generated
        by the source from the underlying MFD.
        """
        return self.mfd.get_min_max_mag()

    def __repr__(self):
        """
        String representation of a source, displaying the source class name
        and the source id.
        """
        return '<%s %s>' % (self.__class__.__name__, self.source_id)
Ejemplo n.º 13
0
class BaseLogicTree(with_metaclass(abc.ABCMeta)):
    """
    Common code for logic tree readers, parsers and verifiers --
    :class:`GMPELogicTree` and :class:`SourceModelLogicTree`.

    :param filename:
        Full pathname of logic tree file
    :param validate:
        Boolean indicating whether or not the tree should be validated
        while parsed. This should be set to ``True`` on initial load
        of the logic tree (before importing it to the database) and
        to ``False`` on workers side (when loaded from the database).
    :raises ParsingError:
        If logic tree file or any of the referenced files is unable to read
        or parse.
    :raises ValidationError:
        If logic tree file has a logic error, which can not be prevented
        by xml schema rules (like referencing sources with missing id).
    """
    NRML = nrml.NAMESPACE
    FILTERS = ('applyToTectonicRegionType', 'applyToSources',
               'applyToSourceType')

    _xmlschema = None

    def __init__(self, filename, validate=True, seed=0, num_samples=0):
        self.filename = filename
        self.basepath = os.path.dirname(filename)
        self.seed = seed
        self.num_samples = num_samples
        self.branches = {}
        self.open_ends = set()
        try:
            tree = parse(filename)
        except etree.ParseError as exc:
            # Wrap etree parsing exception to :exc:`ParsingError`.
            raise ParsingError(self.filename, str(exc))
        [tree] = tree.findall('{%s}logicTree' % self.NRML)
        self.root_branchset = None
        self.parse_tree(tree, validate)

    def skip_branchset_condition(self, attrs):
        """
        Override in subclasses to skip a branchset depending on a
        condition on its attributes.

        :param attrs: a dictionary with the attributes of the branchset
        """
        return False

    def parse_tree(self, tree_node, validate):
        """
        Parse the whole tree and point ``root_branchset`` attribute
        to the tree's root. If ``validate`` is set to ``True``, calls
        :meth:`validate_tree` when done. Also passes that value
        to :meth:`parse_branchinglevel`.
        """
        levels = tree_node.findall('{%s}logicTreeBranchingLevel' % self.NRML)
        for depth, branchinglevel_node in enumerate(levels):
            self.parse_branchinglevel(branchinglevel_node, depth, validate)
        if validate:
            self.validate_tree(tree_node, self.root_branchset)

    def parse_branchinglevel(self, branchinglevel_node, depth, validate):
        """
        Parse one branching level.

        :param branchinglevel_node:
            ``etree.Element`` object with tag "logicTreeBranchingLevel".
        :param depth:
            The sequential number of this branching level, based on 0.
        :param validate:
            Whether or not the branching level, its branchsets and their
            branches should be validated.

        Enumerates children branchsets and call :meth:`parse_branchset`,
        :meth:`validate_branchset`, :meth:`parse_branches` and finally
        :meth:`apply_branchset` for each.

        Keeps track of "open ends" -- the set of branches that don't have
        any child branchset on this step of execution. After processing
        of every branching level only those branches that are listed in it
        can have child branchsets (if there is one on the next level).
        """
        new_open_ends = set()
        branchsets = branchinglevel_node.findall('{%s}logicTreeBranchSet' %
                                                 self.NRML)
        for number, branchset_node in enumerate(branchsets):
            if self.skip_branchset_condition(branchset_node.attrib):
                continue
            branchset = self.parse_branchset(branchset_node, depth, number,
                                             validate)
            self.parse_branches(branchset_node, branchset, validate)
            if self.root_branchset is None:  # not set yet
                self.root_branchset = branchset
            else:
                self.apply_branchset(branchset_node, branchset)
            for branch in branchset.branches:
                new_open_ends.add(branch)
        self.open_ends.clear()
        self.open_ends.update(new_open_ends)

    def parse_branchset(self, branchset_node, depth, number, validate):
        """
        Create :class:`BranchSet` object using data in ``branchset_node``.

        :param branchset_node:
            ``etree.Element`` object with tag "logicTreeBranchSet".
        :param depth:
            The sequential number of branchset's branching level, based on 0.
        :param number:
            Index number of this branchset inside branching level, based on 0.
        :param validate:
            Whether or not filters defined in branchset and the branchset
            itself should be validated.
        :returns:
            An instance of :class:`BranchSet` with filters applied but with
            no branches (they're attached in :meth:`parse_branches`).
        """
        uncertainty_type = branchset_node.get('uncertaintyType')
        filters = dict((filtername, branchset_node.get(filtername))
                       for filtername in self.FILTERS
                       if filtername in branchset_node.attrib)
        if validate:
            self.validate_filters(branchset_node, uncertainty_type, filters)
        filters = self.parse_filters(branchset_node, uncertainty_type, filters)
        branchset = BranchSet(uncertainty_type, filters)
        if validate:
            self.validate_branchset(branchset_node, depth, number, branchset)
        return branchset

    def parse_branches(self, branchset_node, branchset, validate):
        """
        Create and attach branches at ``branchset_node`` to ``branchset``.

        :param branchset_node:
            Same as for :meth:`parse_branchset`.
        :param branchset:
            An instance of :class:`BranchSet`.
        :param validate:
            Whether or not branches' uncertainty values should be validated.

        Checks that each branch has :meth:`valid <validate_uncertainty_value>`
        value, unique id and that all branches have total weight of 1.0.

        :return:
            ``None``, all branches are attached to provided branchset.
        """
        weight_sum = 0
        branches = branchset_node.findall('{%s}logicTreeBranch' % self.NRML)
        for branchnode in branches:
            weight = branchnode.find('{%s}uncertaintyWeight' % self.NRML).text
            weight = Decimal(weight.strip())
            weight_sum += weight
            value_node = branchnode.find('{%s}uncertaintyModel' % self.NRML)
            if validate:
                self.validate_uncertainty_value(value_node, branchset,
                                                value_node.text.strip())
            value = self.parse_uncertainty_value(value_node, branchset,
                                                 value_node.text.strip())
            branch_id = branchnode.get('branchID')
            branch = Branch(branch_id, weight, value)
            if branch_id in self.branches:
                raise ValidationError(branchnode, self.filename,
                                      "branchID %r is not unique" % branch_id)
            self.branches[branch_id] = branch
            branchset.branches.append(branch)
        if weight_sum != 1.0:
            raise ValidationError(branchset_node, self.filename,
                                  "branchset weights don't sum up to 1.0")

    def apply_branchset(self, branchset_node, branchset):
        # pylint: disable=W0613
        """
        Apply ``branchset`` to all "open end" branches.
        See :meth:`parse_branchinglevel`.

        :param branchset_node:
            Same as for :meth:`parse_branchset`.
        :param branchset:
            An instance of :class:`BranchSet` to make it child
            for "open-end" branches.

        Can be overridden by subclasses if they want to apply branchests
        to branches selectively.
        """
        for branch in self.open_ends:
            branch.child_branchset = branchset

    def validate_tree(self, tree_node, root_branchset):
        """
        Check the whole parsed tree for consistency and sanity.

        Can be overriden by subclasses. Base class implementation does nothing.

        :param tree_node:
            ``etree.Element`` object with tag "logicTree".
        :param root_branchset:
            An instance of :class:`BranchSet` which is about to become
            the root branchset for this tree.
        """

    def sample_path(self, rnd):
        """
        Return the model name and a list of branch ids.

        :param int random_seed: the seed used for the sampling
        """
        branchset = self.root_branchset
        branch_ids = []
        while branchset is not None:
            [branch] = sample(branchset.branches, 1, rnd)
            branch_ids.append(branch.branch_id)
            branchset = branch.child_branchset
        modelname = self.root_branchset.get_branch_by_id(branch_ids[0]).value
        return modelname, branch_ids

    def __iter__(self):
        """
        Yield Realization tuples. Notice that
        weight is not None only when the number_of_logic_tree_samples
        is 0. In that case a full enumeration is performed, otherwise
        a random sampling is performed.
        """
        if self.num_samples:
            # random sampling of the logic tree
            rnd = random.Random(self.seed)
            weight = 1. / self.num_samples
            for _ in range(self.num_samples):
                name, sm_lt_path = self.sample_path(rnd)
                yield Realization(name, weight, tuple(sm_lt_path), None,
                                  tuple(sm_lt_path))
        else:  # full enumeration
            for weight, smlt_path in self.root_branchset.enumerate_paths():
                name = smlt_path[0].value
                smlt_branch_ids = [branch.branch_id for branch in smlt_path]
                yield Realization(name, weight, tuple(smlt_branch_ids), None,
                                  tuple(smlt_branch_ids))

    @abc.abstractmethod
    def parse_uncertainty_value(self, node, branchset, value):
        """
        Do any kind of type conversion or adaptation on the uncertainty value.

        Abstract method, must be overridden by subclasses.

        Parameters are the same as for :meth:`validate_uncertainty_value`.

        :return:
            Something to replace ``value`` as the uncertainty value.
        """

    @abc.abstractmethod
    def validate_uncertainty_value(self, node, branchset, value):
        """
        Check the value ``value`` for correctness to be set for one
        of branchset's branches.

        Abstract method, must be overridden by subclasses.

        :param node:
            ``etree.Element`` object with tag "uncertaintyModel" (the one
            that contains the subject value).
        :param branchset:
            An instance of :class:`BranchSet` which will have the branch
            with provided value attached once it's validated.
        :param value:
            The actual value to be checked. Type depends on branchset's
            uncertainty type.
        """

    @abc.abstractmethod
    def parse_filters(self, branchset_node, uncertainty_type, filters):
        """
        Do any kind of type conversion or adaptation on the filters.

        Abstract method, must be overriden by subclasses.

        Parameters are the same as for :meth:`validate_filters`.

        :return:
            The filters dictionary to replace the original.
        """

    @abc.abstractmethod
    def validate_filters(self, node, uncertainty_type, filters):
        """
        Check that filters ``filters`` are valid for given uncertainty type.

        Abstract method, must be overriden by subclasses.

        :param node:
            ``etree.Element`` object with tag "logicTreeBranchSet".
        :param uncertainty_type:
            String specifying the uncertainty type.
            See the list in :class:`BranchSet`.
        :param filters:
            Filters dictionary.
        """

    @abc.abstractmethod
    def validate_branchset(self, branchset_node, depth, number, branchset):
        """
Ejemplo n.º 14
0
class BaseCalculator(with_metaclass(abc.ABCMeta)):
    """
    Abstract base class for all calculators.

    :param oqparam: OqParam object
    :param monitor: monitor object
    :param calc_id: numeric calculation ID
    """
    sitemesh = datastore.persistent_attribute('sitemesh')
    sitecol = datastore.persistent_attribute('sitecol')
    rlzs_assoc = datastore.persistent_attribute('rlzs_assoc')
    realizations = datastore.persistent_attribute('realizations')
    assets_by_site = datastore.persistent_attribute('assets_by_site')
    assetcol = datastore.persistent_attribute('assetcol')
    cost_types = datastore.persistent_attribute('cost_types')
    taxonomies = datastore.persistent_attribute('taxonomies')
    job_info = datastore.persistent_attribute('job_info')
    source_chunks = datastore.persistent_attribute('source_chunks')
    source_pre_info = datastore.persistent_attribute('source_pre_info')
    performance = datastore.persistent_attribute('performance')
    csm = datastore.persistent_attribute('composite_source_model')
    pre_calculator = None  # to be overridden
    is_stochastic = False  # True for scenario and event based calculators

    def __init__(self, oqparam, monitor=DummyMonitor(), calc_id=None,
                 persistent=True):
        self.monitor = monitor
        if persistent:
            self.datastore = datastore.DataStore(calc_id)
        else:
            self.datastore = general.AccumDict()
            self.datastore.hdf5 = {}
            self.datastore.attrs = {}
        self.datastore.export_dir = oqparam.export_dir
        self.oqparam = oqparam
        self.persistent = persistent

    def save_params(self, **kw):
        """
        Update the current calculation parameters
        """
        vars(self.oqparam).update(kw)
        for name, val in self.oqparam.to_params():
            self.datastore.attrs[name] = val
        self.datastore.attrs['oqlite_version'] = repr(__version__)
        self.datastore.hdf5.flush()

    def run(self, pre_execute=True, clean_up=True, concurrent_tasks=None,
            **kw):
        """
        Run the calculation and return the exported outputs.
        """
        if concurrent_tasks is not None:
            self.oqparam.concurrent_tasks = concurrent_tasks
        self.save_params(**kw)
        exported = {}
        try:
            if pre_execute:
                with self.monitor('pre_execute', autoflush=True):
                    self.pre_execute()
            with self.monitor('execute', autoflush=True):
                result = self.execute()
            with self.monitor('post_execute', autoflush=True):
                self.post_execute(result)
            with self.monitor('export', autoflush=True):
                exported = self.export()
        except:
            if kw.get('pdb'):  # post-mortem debug
                tb = sys.exc_info()[2]
                traceback.print_exc(tb)
                pdb.post_mortem(tb)
            else:
                logging.critical('', exc_info=True)
                raise
        # don't cleanup if there is a critical error, otherwise
        # there will likely be a cleanup error covering the real one
        if clean_up:
            self.clean_up()
        return exported

    def core_func(*args):
        """
        Core routine running on the workers.
        """
        raise NotImplementedError

    @abc.abstractmethod
    def pre_execute(self):
        """
        Initialization phase.
        """

    @abc.abstractmethod
    def execute(self):
        """
        Execution phase. Usually will run in parallel the core
        function and return a dictionary with the results.
        """

    @abc.abstractmethod
    def post_execute(self, result):
        """
        Post-processing phase of the aggregated output. It must be
        overridden with the export code. It will return a dictionary
        of output files.
        """

    def export(self, exports=None):
        """
        Export all the outputs in the datastore in the given export formats.

        :returns: dictionary output_key -> sorted list of exported paths
        """
        exported = {}
        individual_curves = self.oqparam.individual_curves
        fmts = exports.split(',') if exports else self.oqparam.exports
        for fmt in fmts:
            if not fmt:
                continue
            for key in self.datastore:  # top level keys
                if 'rlzs' in key and not individual_curves:
                    continue  # skip individual curves
                ekey = (key, fmt)
                if ekey not in export.export:  # non-exportable output
                    continue
                exported[ekey] = export.export(ekey, self.datastore)
                logging.info('exported %s: %s', key, exported[ekey])
        return exported

    def clean_up(self):
        """
        Collect the realizations and the monitoring information,
        then close the datastore.
        """
        if 'rlzs_assoc' in self.datastore:
            self.realizations = numpy.array(
                [(r.uid, r.weight) for r in self.rlzs_assoc.realizations],
                rlz_dt)
        performance = self.monitor.collect_performance()
        if performance is not None:
            self.performance = performance
Ejemplo n.º 15
0
class BaseCalculator(with_metaclass(abc.ABCMeta)):
    """
    Abstract base class for all calculators.

    :param oqparam: OqParam object
    :param monitor: monitor object
    :param calc_id: numeric calculation ID
    """
    sitemesh = datastore.persistent_attribute('sitemesh')
    sitecol = datastore.persistent_attribute('sitecol')
    etags = datastore.persistent_attribute('etags')
    assetcol = datastore.persistent_attribute('assetcol')
    cost_types = datastore.persistent_attribute('cost_types')
    job_info = datastore.persistent_attribute('job_info')
    performance = datastore.persistent_attribute('performance')
    csm = datastore.persistent_attribute('composite_source_model')
    pre_calculator = None  # to be overridden
    is_stochastic = False  # True for scenario and event based calculators

    @property
    def taxonomies(self):
        return self.datastore['assetcol/taxonomies'].value

    def __init__(self, oqparam, monitor=Monitor(), calc_id=None):
        self.monitor = monitor
        self.datastore = datastore.DataStore(calc_id)
        self.monitor.calc_id = self.datastore.calc_id
        self.monitor.hdf5path = self.datastore.hdf5path
        self.datastore.export_dir = oqparam.export_dir
        self.oqparam = oqparam

    def save_params(self, **kw):
        """
        Update the current calculation parameters and save engine_version
        """
        vars(self.oqparam).update(engine_version=__version__, **kw)
        self.datastore['oqparam'] = self.oqparam  # save the updated oqparam
        self.datastore.flush()

    def set_log_format(self):
        """Set the format of the root logger"""
        fmt = '[%(asctime)s #{} %(levelname)s] %(message)s'.format(
            self.datastore.calc_id)
        for handler in logging.root.handlers:
            handler.setFormatter(logging.Formatter(fmt))

    def run(self, pre_execute=True, concurrent_tasks=None, close=True, **kw):
        """
        Run the calculation and return the exported outputs.
        """
        self.close = close
        self.set_log_format()
        if logversion:  # make sure this is logged only once
            logging.info('Using engine version %s', __version__)
            logversion.pop()
        if (concurrent_tasks is not None
                and concurrent_tasks != OqParam.concurrent_tasks.default):
            self.oqparam.concurrent_tasks = concurrent_tasks
        self.save_params(**kw)
        exported = {}
        try:
            if pre_execute:
                self.pre_execute()
            result = self.execute()
            self.post_execute(result)
            exported = self.export(kw.get('exports', ''))
        except KeyboardInterrupt:
            pids = ' '.join(str(p.pid) for p in executor._processes)
            sys.stderr.write(
                'You can manually kill the workers with kill %s\n' % pids)
            raise
        except:
            if kw.get('pdb'):  # post-mortem debug
                tb = sys.exc_info()[2]
                traceback.print_exc(tb)
                pdb.post_mortem(tb)
            else:
                logging.critical('', exc_info=True)
                raise
        self.clean_up()
        return exported

    def core_task(*args):
        """
        Core routine running on the workers.
        """
        raise NotImplementedError

    @abc.abstractmethod
    def pre_execute(self):
        """
        Initialization phase.
        """

    @abc.abstractmethod
    def execute(self):
        """
        Execution phase. Usually will run in parallel the core
        function and return a dictionary with the results.
        """

    @abc.abstractmethod
    def post_execute(self, result):
        """
        Post-processing phase of the aggregated output. It must be
        overridden with the export code. It will return a dictionary
        of output files.
        """

    def export(self, exports=None):
        """
        Export all the outputs in the datastore in the given export formats.

        :returns: dictionary output_key -> sorted list of exported paths
        """
        # avoid circular imports
        from openquake.commonlib.export import export as exp
        exported = {}
        individual_curves = self.oqparam.individual_curves
        if exports and isinstance(exports, tuple):
            fmts = exports
        elif exports:  # is a string
            fmts = exports.split(',')
        else:  # use passed values
            fmts = self.oqparam.exports
        for fmt in fmts:
            if not fmt:
                continue
            keys = set(self.datastore)
            if (self.oqparam.uniform_hazard_spectra
                    and not self.oqparam.hazard_maps):
                # do not export the hazard maps, even if they are there
                keys.remove('hmaps')
            for key in sorted(keys):  # top level keys
                if 'rlzs' in key and not individual_curves:
                    continue  # skip individual curves
                ekey = (key, fmt)
                if ekey not in exp:  # non-exportable output
                    continue
                with self.monitor('export'):
                    exported[ekey] = exp(ekey, self.datastore)
                logging.info('exported %s: %s', key, exported[ekey])
            # special case for uhs which is a view
            if (self.oqparam.uniform_hazard_spectra
                    and 'hmaps' in self.datastore):
                ekey = ('uhs', fmt)
                exported[ekey] = exp(ekey, self.datastore)
                logging.info('exported %s: %s', key, exported[ekey])
        return exported

    def clean_up(self):
        """
        Collect the realizations and the monitoring information,
        then close the datastore.
        """
        if 'hcurves' in self.datastore:
            self.datastore.set_nbytes('hcurves')
        if 'hmaps' in self.datastore:
            self.datastore.set_nbytes('hmaps')
        self.datastore.flush()
        if self.close:  # in the engine we close later
            try:
                self.datastore.close()
            except RuntimeError:  # there could be a mysterious HDF5 error
                logging.warn('', exc_info=True)
Ejemplo n.º 16
0
class BaseCalculator(with_metaclass(abc.ABCMeta)):
    """
    Abstract base class for all calculators.

    :param oqparam: OqParam object
    :param monitor: monitor object
    :param calc_id: numeric calculation ID
    """
    sitemesh = datastore.persistent_attribute('sitemesh')
    sitecol = datastore.persistent_attribute('sitecol')
    rlzs_assoc = datastore.persistent_attribute('rlzs_assoc')
    realizations = datastore.persistent_attribute('realizations')
    assets_by_site = datastore.persistent_attribute('assets_by_site')
    assetcol = datastore.persistent_attribute('assetcol')
    cost_types = datastore.persistent_attribute('cost_types')
    taxonomies = datastore.persistent_attribute('taxonomies')
    job_info = datastore.persistent_attribute('job_info')
    source_chunks = datastore.persistent_attribute('source_chunks')
    performance = datastore.persistent_attribute('performance')
    csm = datastore.persistent_attribute('composite_source_model')
    pre_calculator = None  # to be overridden
    is_stochastic = False  # True for scenario and event based calculators

    def __init__(self, oqparam, monitor=DummyMonitor(), calc_id=None):
        self.monitor = monitor
        self.datastore = datastore.DataStore(calc_id)
        self.monitor.hdf5path = self.datastore.hdf5path
        self.datastore.export_dir = oqparam.export_dir
        self.oqparam = oqparam

    def save_params(self, **kw):
        """
        Update the current calculation parameters
        """
        vars(self.oqparam).update(kw)
        for name, val in self.oqparam.to_params():
            self.datastore.attrs[name] = val
        self.datastore.attrs['oqlite_version'] = repr(__version__)
        self.datastore.hdf5.flush()

    def set_log_format(self):
        """Set the format of the root logger"""
        fmt = '[%(asctime)s #{} %(levelname)s] %(message)s'.format(
            self.datastore.calc_id)
        for handler in logging.root.handlers:
            handler.setFormatter(logging.Formatter(fmt))

    def run(self, pre_execute=True, concurrent_tasks=None, **kw):
        """
        Run the calculation and return the exported outputs.
        """
        self.set_log_format()
        if (concurrent_tasks is not None
                and concurrent_tasks != OqParam.concurrent_tasks.default):
            self.oqparam.concurrent_tasks = concurrent_tasks
        self.save_params(**kw)
        exported = {}
        try:
            if pre_execute:
                self.pre_execute()
            result = self.execute()
            self.post_execute(result)
            exported = self.export(kw.get('exports', ''))
        except KeyboardInterrupt:
            pids = ' '.join(str(p.pid) for p in executor._processes)
            sys.stderr.write(
                'You can manually kill the workers with kill %s\n' % pids)
            raise
        except:
            if kw.get('pdb'):  # post-mortem debug
                tb = sys.exc_info()[2]
                traceback.print_exc(tb)
                pdb.post_mortem(tb)
            else:
                logging.critical('', exc_info=True)
                raise
        self.clean_up()
        return exported

    def core_func(*args):
        """
        Core routine running on the workers.
        """
        raise NotImplementedError

    @abc.abstractmethod
    def pre_execute(self):
        """
        Initialization phase.
        """

    @abc.abstractmethod
    def execute(self):
        """
        Execution phase. Usually will run in parallel the core
        function and return a dictionary with the results.
        """

    @abc.abstractmethod
    def post_execute(self, result):
        """
        Post-processing phase of the aggregated output. It must be
        overridden with the export code. It will return a dictionary
        of output files.
        """

    def export(self, exports=None):
        """
        Export all the outputs in the datastore in the given export formats.

        :returns: dictionary output_key -> sorted list of exported paths
        """
        # avoid circular imports
        from openquake.commonlib.export import export as exp
        exported = {}
        individual_curves = self.oqparam.individual_curves
        if exports and isinstance(exports, tuple):
            fmts = exports
        elif exports:  # is a string
            fmts = exports.split(',')
        else:  # use passed values
            fmts = self.oqparam.exports
        for fmt in fmts:
            if not fmt:
                continue
            for key in self.datastore:  # top level keys
                if 'rlzs' in key and not individual_curves:
                    continue  # skip individual curves
                ekey = (key, fmt)
                if ekey not in exp:  # non-exportable output
                    continue
                exported[ekey] = exp(ekey, self.datastore)
                logging.info('exported %s: %s', key, exported[ekey])
        return exported

    def clean_up(self):
        """
        Collect the realizations and the monitoring information,
        then close the datastore.
        """
        if 'hcurves' in self.datastore:
            _set_nbytes('hcurves', self.datastore)
        if 'hmaps' in self.datastore:
            _set_nbytes('hmaps', self.datastore)
        if 'rlzs_assoc' in self.datastore:
            rlzs = self.rlzs_assoc.realizations
            self.realizations = numpy.array([(r.uid, r.weight) for r in rlzs],
                                            rlz_dt)
Ejemplo n.º 17
0
class GroundShakingIntensityModel(with_metaclass(MetaGSIM)):
    """
    Base class for all the ground shaking intensity models.

    A Ground Shaking Intensity Model (GSIM) defines a set of equations
    for computing mean and standard deviation of a Normal distribution
    representing the variability of an intensity measure (or of its logarithm)
    at a site given an earthquake rupture.

    This class is not intended to be subclassed directly, instead
    the actual GSIMs should subclass either :class:`GMPE` or :class:`IPE`.

    Subclasses of both must implement :meth:`get_mean_and_stddevs`
    and all the class attributes with names starting from ``DEFINED_FOR``
    and ``REQUIRES``.
    """

    #: Reference to a
    #: :class:`tectonic region type <openquake.hazardlib.const.TRT>` this GSIM
    #: is defined for. One GSIM can implement only one tectonic region type.
    DEFINED_FOR_TECTONIC_REGION_TYPE = abc.abstractproperty()

    #: Set of :mod:`intensity measure types <openquake.hazardlib.imt>`
    #: this GSIM can
    #: calculate. A set should contain classes from module
    #: :mod:`openquake.hazardlib.imt`.
    DEFINED_FOR_INTENSITY_MEASURE_TYPES = abc.abstractproperty()

    #: Reference to a :class:`intensity measure component type
    #: <openquake.hazardlib.const.IMC>` this GSIM can calculate mean
    #: and standard
    #: deviation for.
    DEFINED_FOR_INTENSITY_MEASURE_COMPONENT = abc.abstractproperty()

    #: Set of
    #: :class:`standard deviation types <openquake.hazardlib.const.StdDev>`
    #: this GSIM can calculate.
    DEFINED_FOR_STANDARD_DEVIATION_TYPES = abc.abstractproperty()

    #: Set of site parameters names this GSIM needs. The set should include
    #: strings that match names of the attributes of a :class:`site
    #: <openquake.hazardlib.site.Site>` object.
    #: Those attributes are then available in the
    #: :class:`SitesContext` object with the same names.
    REQUIRES_SITES_PARAMETERS = abc.abstractproperty()

    #: Set of rupture parameters (excluding distance information) required
    #: by GSIM. Supported parameters are:
    #:
    #: ``mag``
    #:     Magnitude of the rupture.
    #: ``dip``
    #:     Rupture's surface dip angle in decimal degrees.
    #: ``rake``
    #:     Angle describing the slip propagation on the rupture surface,
    #:     in decimal degrees. See :mod:`~openquake.hazardlib.geo.nodalplane`
    #:     for more detailed description of dip and rake.
    #: ``ztor``
    #:     Depth of rupture's top edge in km. See
    #:     :meth:`~openquake.hazardlib.geo.surface.base.BaseSurface.get_top_edge_depth`.
    #:
    #: These parameters are available from the :class:`RuptureContext` object
    #: attributes with same names.
    REQUIRES_RUPTURE_PARAMETERS = abc.abstractproperty()

    #: Set of types of distance measures between rupture and sites. Possible
    #: values are:
    #:
    #: ``rrup``
    #:     Closest distance to rupture surface.  See
    #:     :meth:`~openquake.hazardlib.geo.surface.base.BaseSurface.get_min_distance`.
    #: ``rjb``
    #:     Distance to rupture's surface projection. See
    #:     :meth:`~openquake.hazardlib.geo.surface.base.BaseSurface.get_joyner_boore_distance`.
    #: ``rx``
    #:     Perpendicular distance to rupture top edge projection.
    #:     See :meth:`~openquake.hazardlib.geo.surface.base.BaseSurface.get_rx_distance`.
    #: ``ry0``
    #:     Horizontal distance off the end of the rupture measured parallel to
    #      strike. See:
    #:     See :meth:`~openquake.hazardlib.geo.surface.base.BaseSurface.get_ry0_distance`.
    #: ``rcdpp``
    #:     Direct point parameter for directivity effect centered on the site- and earthquake-specific
    #      average DPP used. See:
    #:     See :meth:`~openquake.hazardlib.source.rupture.ParametricProbabilisticRupture.get_dppvalue`.
    #: ``rvolc``
    #:     Source to site distance passing through surface projection of volcanic zone
    #:
    #: All the distances are available from the :class:`DistancesContext`
    #: object attributes with same names. Values are in kilometers.
    REQUIRES_DISTANCES = abc.abstractproperty()

    minimum_distance = 0  # can be set by the engine

    @abc.abstractmethod
    def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
        """
        Calculate and return mean value of intensity distribution and it's
        standard deviation.

        Method must be implemented by subclasses.

        :param sites:
            Instance of :class:`openquake.hazardlib.site.SiteCollection`
            with parameters of sites
            collection assigned to respective values as numpy arrays.
            Only those attributes that are listed in class'
            :attr:`REQUIRES_SITES_PARAMETERS` set are available.
        :param rup:
            Instance of :class:`openquake.hazardlib.source.rupture.BaseRupture`
            with parameters of a rupture
            assigned to respective values. Only those attributes that are
            listed in class' :attr:`REQUIRES_RUPTURE_PARAMETERS` set are
            available.
        :param dists:
            Instance of :class:`DistancesContext` with values of distance
            measures between the rupture and each site of the collection
            assigned to respective values as numpy arrays. Only those
            attributes that are listed in class' :attr:`REQUIRES_DISTANCES`
            set are available.
        :param imt:
            An instance (not a class) of intensity measure type.
            See :mod:`openquake.hazardlib.imt`.
        :param stddev_types:
            List of standard deviation types, constants from
            :class:`openquake.hazardlib.const.StdDev`.
            Method result value should include
            standard deviation values for each of types in this list.

        :returns:
            Method should return a tuple of two items. First item should be
            a numpy array of floats -- mean values of respective component
            of a chosen intensity measure type, and the second should be
            a list of numpy arrays of standard deviation values for the same
            single component of the same single intensity measure type, one
            array for each type in ``stddev_types`` parameter, preserving
            the order.

        Combining interface to mean and standard deviation values in a single
        method allows to avoid redoing the same intermediate calculations
        if there are some shared between stddev and mean formulae without
        resorting to keeping any sort of internal state (and effectively
        making GSIM not reenterable).

        However it is advised to split calculation of mean and stddev values
        and make ``get_mean_and_stddevs()`` just combine both (and possibly
        compute interim steps).
        """

    def get_poes(self, sctx, rctx, dctx, imt, imls, truncation_level):
        """
        Calculate and return probabilities of exceedance (PoEs) of one or more
        intensity measure levels (IMLs) of one intensity measure type (IMT)
        for one or more pairs "site -- rupture".

        :param sctx:
            An instance of :class:`SitesContext` with sites information
            to calculate PoEs on.
        :param rctx:
            An instance of :class:`RuptureContext` with a single rupture
            information.
        :param dctx:
            An instance of :class:`DistancesContext` with information about
            the distances between sites and a rupture.

            All three contexts (``sctx``, ``rctx`` and ``dctx``) must conform
            to each other. The easiest way to get them is to call
            ContextMaker.make_contexts.
        :param imt:
            An intensity measure type object (that is, an instance of one
            of classes from :mod:`openquake.hazardlib.imt`).
        :param imls:
            List of interested intensity measure levels (of type ``imt``).
        :param truncation_level:
            Can be ``None``, which means that the distribution of intensity
            is treated as Gaussian distribution with possible values ranging
            from minus infinity to plus infinity.

            When set to zero, the mean intensity is treated as an exact
            value (standard deviation is not even computed for that case)
            and resulting array contains 0 in places where IMT is strictly
            lower than the mean value of intensity and 1.0 where IMT is equal
            or greater.

            When truncation level is positive number, the intensity
            distribution is processed as symmetric truncated Gaussian with
            range borders being ``mean - truncation_level * stddev`` and
            ``mean + truncation_level * stddev``. That is, the truncation
            level expresses how far the range borders are from the mean
            value and is defined in units of sigmas. The resulting PoEs
            for that mode are values of complementary cumulative distribution
            function of that truncated Gaussian applied to IMLs.

        :returns:
            A dictionary of the same structure as parameter ``imts`` (see
            above). Instead of lists of IMLs values of the dictionaries
            have 2d numpy arrays of corresponding PoEs, first dimension
            represents sites and the second represents IMLs.

        :raises ValueError:
            If truncation level is not ``None`` and neither non-negative
            float number, and if ``imts`` dictionary contain wrong or
            unsupported IMTs (see :attr:`DEFINED_FOR_INTENSITY_MEASURE_TYPES`).
        """
        if truncation_level is not None and truncation_level < 0:
            raise ValueError('truncation level must be zero, positive number '
                             'or None')
        self._check_imt(imt)

        if truncation_level == 0:
            # zero truncation mode, just compare imls to mean
            imls = self.to_distribution_values(imls)
            mean, _ = self.get_mean_and_stddevs(sctx, rctx, dctx, imt, [])
            mean = mean.reshape(mean.shape + (1, ))
            return (imls <= mean).astype(float)
        else:
            # use real normal distribution
            assert (const.StdDev.TOTAL
                    in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES)
            imls = self.to_distribution_values(imls)
            mean, [stddev] = self.get_mean_and_stddevs(sctx, rctx, dctx, imt,
                                                       [const.StdDev.TOTAL])
            mean = mean.reshape(mean.shape + (1, ))
            stddev = stddev.reshape(stddev.shape + (1, ))
            values = (imls - mean) / stddev
            if truncation_level is None:
                return _norm_sf(values)
            else:
                return _truncnorm_sf(truncation_level, values)

    def disaggregate_pne(self, rupture, sctx, dctx, imt, iml, truncnorm,
                         epsilons):
        """
        Disaggregate (separate) PoE of ``iml`` in different contributions
        each coming from ``epsilons`` distribution bins.

        Other parameters are the same as for :meth:`get_poes`, with
        differences that ``truncation_level`` is required to be positive.

        :returns:
            Contribution to probability of exceedance of ``iml`` coming
            from different sigma bands in the form of a 2d numpy array of
            probabilities with shape (n_sites, n_epsilons)
        """
        # compute mean and standard deviations
        mean, [stddev] = self.get_mean_and_stddevs(sctx, rupture, dctx, imt,
                                                   [const.StdDev.TOTAL])

        # compute iml value with respect to standard (mean=0, std=1)
        # normal distributions
        standard_imls = (self.to_distribution_values(iml) - mean) / stddev

        # compute epsilon bins contributions
        contribution_by_bands = (truncnorm.cdf(epsilons[1:]) -
                                 truncnorm.cdf(epsilons[:-1]))

        # take the minimum epsilon larger than standard_iml
        bins = numpy.searchsorted(epsilons, standard_imls)
        poe_by_site = []
        n_epsilons = len(epsilons) - 1
        for lvl, bin in zip(standard_imls, bins):  # one per site
            if bin == 0:
                poe_by_site.append(contribution_by_bands)
            elif bin > n_epsilons:
                poe_by_site.append(numpy.zeros(n_epsilons))
            else:
                # for other cases (when ``lvl`` falls somewhere in the
                # histogram):
                poe = numpy.concatenate([
                    # take zeros for bins that are on the left hand side
                    # from the bin ``lvl`` falls into,
                    numpy.zeros(bin - 1),
                    # ... area of the portion of the bin containing ``lvl``
                    # (the portion is limited on the left hand side by
                    # ``lvl`` and on the right hand side by the bin edge),
                    [truncnorm.sf(lvl) - contribution_by_bands[bin:].sum()],
                    # ... and all bins on the right go unchanged.
                    contribution_by_bands[bin:]
                ])
                poe_by_site.append(poe)
        poes = numpy.array(poe_by_site)  # shape (n_sites, n_epsilons)
        return rupture.get_probability_no_exceedance(poes)

    @abc.abstractmethod
    def to_distribution_values(self, values):
        """
        Convert a list or array of values in units of IMT to a numpy array
        of values of intensity measure distribution (like taking the natural
        logarithm for :class:`GMPE`).

        This method is implemented by both :class:`GMPE` and :class:`IPE`
        so there is no need to override it in actual GSIM implementations.
        """

    @abc.abstractmethod
    def to_imt_unit_values(self, values):
        """
        Convert a list or array of values of intensity measure distribution
        (like ones returned from :meth:`get_mean_and_stddevs`) to values
        in units of IMT. This is the opposite operation
        to :meth:`to_distribution_values`.

        This method is implemented by both :class:`GMPE` and :class:`IPE`
        so there is no need to override it in actual GSIM implementations.
        """

    def _check_imt(self, imt):
        """
        Make sure that ``imt`` is valid and is supported by this GSIM.
        """
        if not issubclass(type(imt), imt_module._IMT):
            raise ValueError('imt must be an instance of IMT subclass')
        if not type(imt) in self.DEFINED_FOR_INTENSITY_MEASURE_TYPES:
            raise ValueError('imt %s is not supported by %s' %
                             (type(imt).__name__, type(self).__name__))

    def __lt__(self, other):
        """
        The GSIMs are ordered according to string representation
        """
        return str(self) < str(other)

    def __eq__(self, other):
        """
        The GSIMs are equal if their string representations are equal
        """
        return str(self) == str(other)

    def __hash__(self):
        """
        We use the __str__ representation as hash: it means that we can
        use equivalently GSIM instances or strings as dictionary keys.
        """
        return hash(str(self))

    def __str__(self):
        kwargs = ', '.join('%s=%r' % kv for kv in sorted(self.kwargs.items()))
        return "%s(%s)" % (self.__class__.__name__, kwargs)

    def __repr__(self):
        """
        Default string representation for GSIM instances. It contains
        the name and values of the arguments, if any.
        """
        return repr(str(self))
Ejemplo n.º 18
0
class BaseSurface(with_metaclass(abc.ABCMeta)):
    """
    Base class for a surface in 3D-space.
    """

    @abc.abstractmethod
    def get_min_distance(self, mesh):
        """
        Compute and return the minimum distance from the surface to each point
        of ``mesh``. This distance is sometimes called ``Rrup``.

        :param mesh:
            :class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
            minimum distance to.
        :returns:
            A numpy array of distances in km.
        """

    @abc.abstractmethod
    def get_closest_points(self, mesh):
        """
        For each point from ``mesh`` find a closest point belonging to surface.

        :param mesh:
            :class:`~openquake.hazardlib.geo.mesh.Mesh` of points to find
            closest points to.
        :returns:
            :class:`~openquake.hazardlib.geo.mesh.Mesh` of the same shape as
            ``mesh`` with closest surface's points on respective indices.
        """

    @abc.abstractmethod
    def get_joyner_boore_distance(self, mesh):
        """
        Compute and return Joyner-Boore (also known as ``Rjb``) distance
        to each point of ``mesh``.

        :param mesh:
            :class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
            Joyner-Boore distance to.
        :returns:
            Numpy array of closest distances between the projections of surface
            and each point of the ``mesh`` to the earth surface.
        """

    @abc.abstractmethod
    def get_ry0_distance(self, mesh):
        """
        Compute the minimum distance between each point of a mesh and the great
        circle arcs perpendicular to the average strike direction of the
        fault trace and passing through the end-points of the trace.

        :param mesh:
            :class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
            Ry0-distance to.
        :returns:
            Numpy array of distances in km.
        """

    @abc.abstractmethod
    def get_rx_distance(self, mesh):
        """
        Compute distance between each point of mesh and surface's great circle
        arc.

        Distance is measured perpendicular to the rupture strike, from
        the surface projection of the updip edge of the rupture, with
        the down dip direction being positive (this distance is usually
        called ``Rx``).

        In other words, is the horizontal distance to top edge of rupture
        measured perpendicular to the strike. Values on the hanging wall
        are positive, values on the footwall are negative.

        :param mesh:
            :class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
            Rx-distance to.
        :returns:
            Numpy array of distances in km.
        """

    @abc.abstractmethod
    def get_top_edge_depth(self):
        """
        Compute minimum depth of surface's top edge.

        :returns:
            Float value, the vertical distance between the earth surface
            and the shallowest point in surface's top edge in km.
        """

    @abc.abstractmethod
    def get_strike(self):
        """
        Compute surface's strike as decimal degrees in a range ``[0, 360)``.

        The actual definition of the strike might depend on surface geometry.

        :returns:
            Float value, the azimuth (in degrees) of the surface top edge
        """

    @abc.abstractmethod
    def get_dip(self):
        """
        Compute surface's dip as decimal degrees in a range ``(0, 90]``.

        The actual definition of the dip might depend on surface geometry.

        :returns:
            Float value, the inclination (in degrees) of the surface with
            respect to the Earth surface
        """

    @abc.abstractmethod
    def get_width(self):
        """
        Compute surface's width (that is surface extension along the
        dip direction) in km.

        The actual definition depends on the type of surface geometry.

        :returns:
            Float value, the surface width
        """

    @abc.abstractmethod
    def get_area(self):
        """
        Compute surface's area in squared km.

        :returns:
            Float value, the surface area
        """

    @abc.abstractmethod
    def get_bounding_box(self):
        """
        Compute surface geographical bounding box.

        :return:
            A tuple of four items. These items represent western, eastern,
            northern and southern borders of the bounding box respectively.
            Values are floats in decimal degrees.
        """

    @abc.abstractmethod
    def get_middle_point(self):
        """
Ejemplo n.º 19
0
class BaseQuadrilateralSurface(with_metaclass(abc.ABCMeta, BaseSurface)):
    """
    Base class for a quadrilateral surface in 3D-space.

    Subclasses must implement :meth:`_create_mesh`, and superclass methods
    :meth:`get_strike() <.base.BaseSurface.get_strike>`,
    :meth:`get_dip() <.base.BaseSurface.get_dip>` and
    :meth:`get_width() <.base.BaseSurface.get_width>`,
    and can override any others just for the sake of performance
    """

    def __init__(self):
        self._mesh = None

    def get_min_distance(self, mesh):
        """
        See :meth:`superclass method
        <.base.BaseSurface.get_min_distance>`
        for spec of input and result values.

        Base class implementation calls the :meth:`corresponding
        <openquake.hazardlib.geo.mesh.Mesh.get_min_distance>` method of the
        surface's :meth:`mesh <get_mesh>`.

        Subclasses may override this method in order to make use
        of knowledge of a specific surface shape and thus perform
        better.
        """
        return self.get_mesh().get_min_distance(mesh)

    def get_closest_points(self, mesh):
        """
        See :meth:`superclass method
        <.base.BaseSurface.get_closest_points>`
        for spec of input and result values.

        Base class implementation calls the :meth:`corresponding
        <openquake.hazardlib.geo.mesh.Mesh.get_closest_points>` method of the
        surface's :meth:`mesh <get_mesh>`.
        """
        return self.get_mesh().get_closest_points(mesh)

    def get_joyner_boore_distance(self, mesh):
        """
        See :meth:`superclass method
        <.base.BaseSurface.get_joyner_boore_distance>`
        for spec of input and result values.

        Base class calls surface mesh's method
        :meth:`~openquake.hazardlib.geo.mesh.Mesh.get_joyner_boore_distance`.
        """
        return self.get_mesh().get_joyner_boore_distance(mesh)

    def get_ry0_distance(self, mesh):
        """
        :param mesh:
            :class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
            Ry0-distance to.
        :returns:
            Numpy array of distances in km.

        See also :meth:`superclass method <.base.BaseSurface.get_ry0_distance>`
        for spec of input and result values.

        This method uses an average strike direction to compute ry0.
        """
        # This computes ry0 by using an average strike direction
        top_edge = self.get_mesh()[0:1]
        mean_strike = self.get_strike()

        dst1 = geodetic.distance_to_arc(top_edge.lons[0, 0],
                                        top_edge.lats[0, 0],
                                        (mean_strike + 90.) % 360,
                                        mesh.lons, mesh.lats)

        dst2 = geodetic.distance_to_arc(top_edge.lons[0, -1],
                                        top_edge.lats[0, -1],
                                        (mean_strike + 90.) % 360,
                                        mesh.lons, mesh.lats)
        # Find the points on the rupture

        # Get the shortest distance from the two lines
        idx = numpy.sign(dst1) == numpy.sign(dst2)
        dst = numpy.zeros_like(dst1)
        dst[idx] = numpy.fmin(numpy.abs(dst1[idx]), numpy.abs(dst2[idx]))

        return dst

    def get_rx_distance(self, mesh):
        """
        See :meth:`superclass method
        <.base.BaseSurface.get_rx_distance>`
        for spec of input and result values.

        The method extracts the top edge of the surface. For each point in mesh
        it computes the Rx distance to each segment the top edge is made
        of. The calculation is done by calling the function
        :func:`openquake.hazardlib.geo.geodetic.distance_to_arc`. The final Rx
        distance matrix is then constructed by taking, for each point in mesh,
        the minimum Rx distance value computed.
        """
        top_edge = self.get_mesh()[0:1]

        dists = []
        if top_edge.lons.shape[1] < 3:

            i = 0
            p1 = Point(
                top_edge.lons[0, i],
                top_edge.lats[0, i],
                top_edge.depths[0, i]
            )
            p2 = Point(
                top_edge.lons[0, i + 1], top_edge.lats[0, i + 1],
                top_edge.depths[0, i + 1]
            )
            azimuth = p1.azimuth(p2)
            dists.append(
                geodetic.distance_to_arc(
                    p1.longitude, p1.latitude, azimuth,
                    mesh.lons, mesh.lats
                )
            )

        else:

            for i in range(top_edge.lons.shape[1] - 1):
                p1 = Point(
                    top_edge.lons[0, i],
                    top_edge.lats[0, i],
                    top_edge.depths[0, i]
                )
                p2 = Point(
                    top_edge.lons[0, i + 1],
                    top_edge.lats[0, i + 1],
                    top_edge.depths[0, i + 1]
                )
                # Swapping
                if i == 0:
                    pt = p1
                    p1 = p2
                    p2 = pt

                # Computing azimuth and distance
                if i == 0 or i == top_edge.lons.shape[1] - 2:
                    azimuth = p1.azimuth(p2)
                    tmp = geodetic.distance_to_semi_arc(p1.longitude,
                                                        p1.latitude,
                                                        azimuth,
                                                        mesh.lons, mesh.lats)
                else:
                    tmp = geodetic.min_distance_to_segment([p1.longitude,
                                                            p2.longitude],
                                                           [p1.latitude,
                                                            p2.latitude],
                                                           mesh.lons,
                                                           mesh.lats)
                # Correcting the sign of the distance
                if i == 0:
                    tmp *= -1
                dists.append(tmp)

        # Computing distances
        dists = numpy.array(dists)
        iii = abs(dists).argmin(axis=0)
        dst = dists[iii, list(range(dists.shape[1]))]

        return dst

    def get_top_edge_depth(self):
        """
        Return minimum depth of surface's top edge.

        :returns:
            Float value, the vertical distance between the earth surface
            and the shallowest point in surface's top edge in km.
        """
        top_edge = self.get_mesh()[0:1]
        if top_edge.depths is None:
            return 0
        else:
            return numpy.min(top_edge.depths)

    def _get_top_edge_centroid(self):
        """
        Return :class:`~openquake.hazardlib.geo.point.Point` representing the
        surface's top edge centroid.
        """
        top_edge = self.get_mesh()[0:1]
        return top_edge.get_middle_point()

    def get_mesh(self):
        """
        Return surface's mesh.

        Uses :meth:`_create_mesh` for creating the mesh for the first time.
        All subsequent calls to :meth:`get_mesh` return the same mesh object.

        .. warning::
            It is required that the mesh is constructed "top-to-bottom".
            That is, the first row of points should be the shallowest.
        """
        if self._mesh is None:
            self._mesh = self._create_mesh()
            assert (
                self._mesh.depths is None or len(self._mesh.depths) == 1
                or self._mesh.depths[0][0] < self._mesh.depths[-1][0]
            ), "the first row of points in the mesh must be the shallowest"
        return self._mesh

    def get_area(self):
        """
        Compute area as the sum of the mesh cells area values.
        """
        mesh = self.get_mesh()
        _, _, _, area = mesh.get_cell_dimensions()

        return numpy.sum(area)

    def get_bounding_box(self):
        """
        Compute surface bounding box from surface mesh representation. That is
        extract longitudes and latitudes of mesh points and calls:
        :meth:`openquake.hazardlib.geo.utils.get_spherical_bounding_box`

        :return:
            A tuple of four items. These items represent western, eastern,
            northern and southern borders of the bounding box respectively.
            Values are floats in decimal degrees.
        """
        mesh = self.get_mesh()

        return utils.get_spherical_bounding_box(mesh.lons, mesh.lats)

    def get_middle_point(self):
        """
        Compute middle point from surface mesh representation. Calls
        :meth:`openquake.hazardlib.geo.mesh.RectangularMesh.get_middle_point`
        """
        mesh = self.get_mesh()

        return mesh.get_middle_point()

    def get_resampled_top_edge(self, angle_var=0.1):
        """
        This methods computes a simplified representation of a fault top edge
        by removing the points that are not describing a change of direction,
        provided a certain tolerance angle.

        :param float angle_var:
            Number representing the maximum deviation (in degrees) admitted
            without the creation of a new segment
        :returns:
            A :class:`~openquake.hazardlib.geo.line.Line` representing the
            rupture surface's top edge.
        """
        mesh = self.get_mesh()
        top_edge = [Point(mesh.lons[0][0], mesh.lats[0][0], mesh.depths[0][0])]

        for i in range(len(mesh.triangulate()[1][0]) - 1):
            v1 = numpy.asarray(mesh.triangulate()[1][0][i])
            v2 = numpy.asarray(mesh.triangulate()[1][0][i + 1])
            cosang = numpy.dot(v1, v2)
            sinang = numpy.linalg.norm(numpy.cross(v1, v2))
            angle = math.degrees(numpy.arctan2(sinang, cosang))

            if abs(angle) > angle_var:

                top_edge.append(Point(mesh.lons[0][i + 1],
                                      mesh.lats[0][i + 1],
                                      mesh.depths[0][i + 1]))

        top_edge.append(Point(mesh.lons[0][-1],
                              mesh.lats[0][-1], mesh.depths[0][-1]))
        line_top_edge = Line(top_edge)

        return line_top_edge

    @abc.abstractmethod
    def _create_mesh(self):
        """
        Create and return the mesh of points covering the surface.

        :returns:
            An instance of
            :class:`openquake.hazardlib.geo.mesh.RectangularMesh`.
        """

    def get_hypo_location(self, mesh_spacing, hypo_loc=None):
        """
        The method determines the location of the hypocentre within the rupture

        :param mesh:
            :class:`~openquake.hazardlib.geo.mesh.Mesh` of points
        :param mesh_spacing:
            The desired distance between two adjacent points in source's
            ruptures' mesh, in km. Mainly this parameter allows to balance
            the trade-off between time needed to compute the distance
            between the rupture surface and a site and the precision of that
            computation.
        :param hypo_loc:
            Hypocentre location as fraction of rupture plane, as a tuple of
            (Along Strike, Down Dip), e.g. a hypocentre located in the centroid
            of the rupture would be input as (0.5, 0.5), whereas a
            hypocentre located in a position 3/4 along the length, and 1/4 of
            the way down dip of the rupture plane would be entered as
            (0.75, 0.25).
        :returns:
            Hypocentre location as instance of
            :class:`~openquake.hazardlib.geo.point.Point`
        """
        mesh = self.get_mesh()
        centroid = mesh.get_middle_point()
        if hypo_loc is None:
            return centroid

        total_len_y = (len(mesh.depths) - 1) * mesh_spacing
        y_distance = hypo_loc[1] * total_len_y
        y_node = int(numpy.round(y_distance / mesh_spacing))
        total_len_x = (len(mesh.lons[y_node]) - 1) * mesh_spacing
        x_distance = hypo_loc[0] * total_len_x
        x_node = int(numpy.round(x_distance / mesh_spacing))
        hypocentre = Point(mesh.lons[y_node][x_node],
                           mesh.lats[y_node][x_node],
                           mesh.depths[y_node][x_node])
        return hypocentre
Ejemplo n.º 20
0
class BaseCalculator(with_metaclass(abc.ABCMeta)):
    """
    Abstract base class for all calculators.

    :param oqparam: OqParam object
    :param monitor: monitor object
    :param calc_id: numeric calculation ID
    """

    oqparam = datastore.persistent_attribute('oqparam')
    sitemesh = datastore.persistent_attribute('sitemesh')
    sitecol = datastore.persistent_attribute('sitecol')
    rlzs_assoc = datastore.persistent_attribute('rlzs_assoc')
    realizations = datastore.persistent_attribute('realizations')
    assets_by_site = datastore.persistent_attribute('assets_by_site')
    assetcol = datastore.persistent_attribute('assetcol')
    cost_types = datastore.persistent_attribute('cost_types')
    taxonomies = datastore.persistent_attribute('taxonomies')
    job_info = datastore.persistent_attribute('job_info')
    source_chunks = datastore.persistent_attribute('source_chunks')
    source_pre_info = datastore.persistent_attribute('source_pre_info')
    performance = datastore.persistent_attribute('performance')
    csm = datastore.persistent_attribute('composite_source_model')
    pre_calculator = None  # to be overridden
    is_stochastic = False  # True for scenario and event based calculators

    def __init__(self,
                 oqparam,
                 monitor=DummyMonitor(),
                 calc_id=None,
                 persistent=True):
        self.monitor = monitor
        if persistent:
            self.datastore = datastore.DataStore(calc_id)
        else:
            self.datastore = general.AccumDict()
            self.datastore.hdf5 = {}
        self.datastore.export_dir = oqparam.export_dir
        if 'oqparam' not in self.datastore:  # new datastore
            self.oqparam = oqparam
        # else we are doing a precalculation; oqparam has been already stored
        self.persistent = persistent

    def run(self,
            pre_execute=True,
            clean_up=True,
            concurrent_tasks=None,
            **kw):
        """
        Run the calculation and return the exported outputs.
        """
        if concurrent_tasks is not None:
            self.oqparam.concurrent_tasks = concurrent_tasks
        vars(self.oqparam).update(kw)
        exported = {}
        try:
            if pre_execute:
                with self.monitor('pre_execute', autoflush=True):
                    self.pre_execute()
            with self.monitor('execute', autoflush=True):
                result = self.execute()
            with self.monitor('post_execute', autoflush=True):
                self.post_execute(result)
            with self.monitor('export', autoflush=True):
                exported = self.export()
        finally:
            etype = sys.exc_info()[0]
            if etype:
                logging.critical('', exc_info=True)
            if clean_up:
                try:
                    self.clean_up()
                except:
                    logging.error('Cleanup error', exc_info=True)
            return exported

    def core_func(*args):
        """
        Core routine running on the workers.
        """
        raise NotImplementedError

    @abc.abstractmethod
    def pre_execute(self):
        """
        Initialization phase.
        """

    @abc.abstractmethod
    def execute(self):
        """
        Execution phase. Usually will run in parallel the core
        function and return a dictionary with the results.
        """

    @abc.abstractmethod
    def post_execute(self, result):
        """
        Post-processing phase of the aggregated output. It must be
        overridden with the export code. It will return a dictionary
        of output files.
        """

    def export(self, exports=None):
        """
        Export all the outputs in the datastore in the given export formats.

        :returns: dictionary output_key -> sorted list of exported paths
        """
        exported = {}
        individual_curves = self.oqparam.individual_curves
        fmts = exports.split(',') if exports else self.oqparam.exports
        for fmt in fmts:
            if not fmt:
                continue
            for key in self.datastore:
                if 'rlzs' in key and not individual_curves:
                    continue  # skip individual curves
                ekey = (key, fmt)
                try:
                    exported[ekey] = sorted(export.export(
                        ekey, self.datastore))
                    logging.info('exported %s: %s', key, exported[ekey])
                except KeyError:
                    logging.info('%s is not exportable in %s', key, fmt)
        return exported

    def clean_up(self):
        """
        Collect the realizations and the monitoring information,
        then close the datastore.
        """
        self.realizations = numpy.array(
            [(r.uid, r.weight) for r in self.rlzs_assoc.realizations], rlz_dt)
        performance = self.monitor.collect_performance()
        if performance is not None:
            self.performance = performance
        self.datastore.close()
        self.datastore.symlink(os.path.dirname(self.oqparam.inputs['job_ini']))
Ejemplo n.º 21
0
class BaseCalculator(with_metaclass(abc.ABCMeta)):
    """
    Abstract base class for all calculators.

    :param oqparam: OqParam object
    :param monitor: monitor object
    :param calc_id: numeric calculation ID
    """
    from_engine = False  # set by engine.run_calc
    sitecol = datastore.persistent_attribute('sitecol')
    assetcol = datastore.persistent_attribute('assetcol')
    performance = datastore.persistent_attribute('performance')
    pre_calculator = None  # to be overridden
    is_stochastic = False  # True for scenario and event based calculators

    @property
    def taxonomies(self):
        return self.datastore['assetcol/taxonomies'].value

    def __init__(self, oqparam, monitor=Monitor(), calc_id=None):
        self._monitor = monitor
        self.datastore = datastore.DataStore(calc_id)
        self.oqparam = oqparam

    def monitor(self, operation, **kw):
        """
        Return a new Monitor instance
        """
        mon = self._monitor(operation, hdf5path=self.datastore.hdf5path)
        self._monitor.calc_id = mon.calc_id = self.datastore.calc_id
        vars(mon).update(kw)
        return mon

    def save_params(self, **kw):
        """
        Update the current calculation parameters and save engine_version
        """
        vars(self.oqparam).update(**kw)
        self.datastore['oqparam'] = self.oqparam  # save the updated oqparam
        attrs = self.datastore['/'].attrs
        attrs['engine_version'] = engine_version
        self.datastore.flush()

    def set_log_format(self):
        """Set the format of the root logger"""
        fmt = '[%(asctime)s #{} %(levelname)s] %(message)s'.format(
            self.datastore.calc_id)
        for handler in logging.root.handlers:
            handler.setFormatter(logging.Formatter(fmt))

    def run(self, pre_execute=True, concurrent_tasks=None, close=True, **kw):
        """
        Run the calculation and return the exported outputs.
        """
        global logversion
        self.close = close
        self.set_log_format()
        if logversion:  # make sure this is logged only once
            logging.info('Running %s', self.oqparam.inputs['job_ini'])
            logging.info('Using engine version %s', engine_version)
            logversion = False
        if concurrent_tasks is None:  # use the job.ini parameter
            ct = self.oqparam.concurrent_tasks
        else:  # used the parameter passed in the command-line
            ct = concurrent_tasks
        if ct == 0:  # disable distribution temporarily
            oq_distribute = os.environ.get('OQ_DISTRIBUTE')
            os.environ['OQ_DISTRIBUTE'] = 'no'
        if ct != self.oqparam.concurrent_tasks:
            # save the used concurrent_tasks
            self.oqparam.concurrent_tasks = ct
        self.save_params(**kw)
        exported = {}
        try:
            if pre_execute:
                self.pre_execute()
            self.result = self.execute()
            if self.result is not None:
                self.post_execute(self.result)
            self.before_export()
            exported = self.export(kw.get('exports', ''))
        except KeyboardInterrupt:
            pids = ' '.join(str(p.pid) for p in executor._processes)
            sys.stderr.write(
                'You can manually kill the workers with kill %s\n' % pids)
            raise
        except:
            if kw.get('pdb'):  # post-mortem debug
                tb = sys.exc_info()[2]
                traceback.print_tb(tb)
                pdb.post_mortem(tb)
            else:
                logging.critical('', exc_info=True)
                raise
        finally:
            if ct == 0:  # restore OQ_DISTRIBUTE
                if oq_distribute is None:  # was not set
                    del os.environ['OQ_DISTRIBUTE']
                else:
                    os.environ['OQ_DISTRIBUTE'] = oq_distribute
        return exported

    def core_task(*args):
        """
        Core routine running on the workers.
        """
        raise NotImplementedError

    @abc.abstractmethod
    def pre_execute(self):
        """
        Initialization phase.
        """

    @abc.abstractmethod
    def execute(self):
        """
        Execution phase. Usually will run in parallel the core
        function and return a dictionary with the results.
        """

    @abc.abstractmethod
    def post_execute(self, result):
        """
        Post-processing phase of the aggregated output. It must be
        overridden with the export code. It will return a dictionary
        of output files.
        """

    def export(self, exports=None):
        """
        Export all the outputs in the datastore in the given export formats.
        Individual outputs are not exported if there are multiple realizations.

        :returns: dictionary output_key -> sorted list of exported paths
        """
        num_rlzs = len(self.datastore['realizations'])
        exported = {}
        if isinstance(exports, tuple):
            fmts = exports
        elif exports:  # is a string
            fmts = exports.split(',')
        elif isinstance(self.oqparam.exports, tuple):
            fmts = self.oqparam.exports
        else:  # is a string
            fmts = self.oqparam.exports.split(',')
        keys = set(self.datastore)
        has_hcurves = 'hcurves' in self.datastore or 'poes' in self.datastore
        if has_hcurves:
            keys.add('hcurves')
        for fmt in fmts:
            if not fmt:
                continue
            for key in sorted(keys):  # top level keys
                if 'rlzs' in key and num_rlzs > 1:
                    continue  # skip individual curves
                self._export((key, fmt), exported)
            if has_hcurves and self.oqparam.hazard_maps:
                self._export(('hmaps', fmt), exported)
            if has_hcurves and self.oqparam.uniform_hazard_spectra:
                self._export(('uhs', fmt), exported)

        if self.close:  # in the engine we close later
            self.result = None
            try:
                self.datastore.close()
            except (RuntimeError, ValueError):
                # sometimes produces errors but they are difficult to
                # reproduce
                logging.warn('', exc_info=True)
        return exported

    def _export(self, ekey, exported):
        if ekey in exp:
            with self.monitor('export'):
                exported[ekey] = exp(ekey, self.datastore)
                logging.info('exported %s: %s', ekey[0], exported[ekey])

    def before_export(self):
        """
        Collect the realizations and set the attributes nbytes
        """
        sm_by_rlz = self.datastore['csm_info'].get_sm_by_rlz(
            self.rlzs_assoc.realizations) or collections.defaultdict(
                lambda: 'NA')
        self.datastore['realizations'] = numpy.array(
            [(r.uid, sm_by_rlz[r], gsim_names(r), r.weight)
             for r in self.rlzs_assoc.realizations], rlz_dt)
        if 'hcurves' in set(self.datastore):
            self.datastore.set_nbytes('hcurves')
        self.datastore.flush()
Ejemplo n.º 22
0
class BaseRupture(with_metaclass(abc.ABCMeta)):
    """
    Rupture object represents a single earthquake rupture.

    :param mag:
        Magnitude of the rupture.
    :param rake:
        Rake value of the rupture.
        See :class:`~openquake.hazardlib.geo.nodalplane.NodalPlane`.
    :param tectonic_region_type:
        Rupture's tectonic regime. One of constants
        in :class:`openquake.hazardlib.const.TRT`.
    :param hypocenter:
        A :class:`~openquake.hazardlib.geo.point.Point`, rupture's hypocenter.
    :param surface:
        An instance of subclass of
        :class:`~openquake.hazardlib.geo.surface.base.BaseSurface`.
        Object representing the rupture surface geometry.
    :param source_typology:
        Subclass of :class:`~openquake.hazardlib.source.base.BaseSeismicSource`
        (class object, not an instance) referencing the typology
        of the source that produced this rupture.
    :param rupture_slip_direction:
        Angle describing rupture propagation direction in decimal degrees.

    :raises ValueError:
        If magnitude value is not positive, or tectonic region type is unknown.

    NB: if you want to convert the rupture into XML, you should set the
    attribute surface_nodes to an appropriate value.
    """
    _slots_ = '''mag rake tectonic_region_type hypocenter surface
    source_typology rupture_slip_direction'''.split()

    @classmethod
    def init(cls):
        """
        Initialize the class dictionaries `._code` and .`types` encoding the
        bidirectional correspondence between an integer in the range 0..255
        (the code) and a triplet of classes (rupture_class, surface_class,
        source_class). This is useful when serializing the rupture to and
        from HDF5.
        """
        source_classes = get_subclasses(BaseSeismicSource)
        rupture_classes = [BaseRupture] + list(get_subclasses(BaseRupture))
        surface_classes = get_subclasses(BaseSurface)
        code, types, n = {}, {}, 0
        for src, rup, sur in itertools.product(source_classes, rupture_classes,
                                               surface_classes):
            code[rup, sur, src] = n
            types[n] = rup, sur, src
            n += 1
        if n >= 256:
            raise ValueError('Too many rupture codes: %d' % n)
        cls._code = code
        cls.types = types

    def __init__(self,
                 mag,
                 rake,
                 tectonic_region_type,
                 hypocenter,
                 surface,
                 source_typology,
                 rupture_slip_direction=None):
        if not mag > 0:
            raise ValueError('magnitude must be positive')
        NodalPlane.check_rake(rake)
        self.tectonic_region_type = tectonic_region_type
        self.rake = rake
        self.mag = mag
        self.hypocenter = hypocenter
        self.surface = surface
        self.source_typology = source_typology
        self.rupture_slip_direction = rupture_slip_direction

    @property
    def code(self):
        """Returns the code (integer in the range 0 .. 255) of the rupture"""
        return self._code[self.__class__, self.surface.__class__,
                          self.source_typology]

    def get_probability_no_exceedance(self, poes):
        """
        Compute and return the probability that in the time span for which the
        rupture is defined, the rupture itself never generates a ground motion
        value higher than a given level at a given site.

        Such calculation is performed starting from the conditional probability
        that an occurrence of the current rupture is producing a ground motion
        value higher than the level of interest at the site of interest.
        The actual formula used for such calculation depends on the temporal
        occurrence model the rupture is associated with.
        The calculation can be performed for multiple intensity measure levels
        and multiple sites in a vectorized fashion.

        :param poes:
            2D numpy array containing conditional probabilities the the a
            rupture occurrence causes a ground shaking value exceeding a
            ground motion level at a site. First dimension represent sites,
            second dimension intensity measure levels. ``poes`` can be obtained
            calling the :meth:`method
            <openquake.hazardlib.gsim.base.GroundShakingIntensityModel.get_poes>`.
        """
        raise NotImplementedError

    def sample_number_of_occurrences(self):
        """
        Randomly sample number of occurrences from temporal occurrence model
        probability distribution.

        .. note::
            This method is using random numbers. In order to reproduce the
            same results numpy random numbers generator needs to be seeded, see
            http://docs.scipy.org/doc/numpy/reference/generated/numpy.random.seed.html

        :returns:
            int, Number of rupture occurrences
        """
        raise NotImplementedError
Ejemplo n.º 23
0
class BaseSeismicSource(with_metaclass(abc.ABCMeta)):
    """
    Base class representing a seismic source, that is a structure generating
    earthquake ruptures.

    :param source_id:
        Some (numeric or literal) source identifier. Supposed to be unique
        within the source model.
    :param name:
        String, a human-readable name of the source.
    :param tectonic_region_type:
        Source's tectonic regime. See :class:`openquake.hazardlib.const.TRT`.
    """
    _slots_ = [
        'source_id', 'name', 'tectonic_region_type', 'src_group_id',
        'num_ruptures', 'seed', 'id'
    ]
    RUPTURE_WEIGHT = 1.  # overridden in (Multi)PointSource, AreaSource
    nsites = 1  # FIXME: remove this and fix all hazardlib tests
    ngsims = 1

    @abc.abstractproperty
    def MODIFICATIONS(self):
        pass

    @property
    def weight(self):
        """
        Determine the source weight from the number of ruptures, by
        multiplying with the scale factor RUPTURE_WEIGHT
        """
        if not self.num_ruptures:
            self.num_ruptures = self.count_ruptures()
        return (self.num_ruptures * self.RUPTURE_WEIGHT * self.nsites
                )  # * self.ngsims)

    @property
    def src_group_ids(self):
        """
        :returns: a list of source group IDs (usually of 1 element)
        """
        grp_id = self.src_group_id
        return [grp_id] if isinstance(grp_id, int) else grp_id

    def __init__(self, source_id, name, tectonic_region_type):
        self.source_id = source_id
        self.name = name
        self.tectonic_region_type = tectonic_region_type
        self.src_group_id = None  # set by the engine
        self.num_ruptures = 0  # set by the engine
        self.seed = None  # set by the engine
        self.id = None  # set by the engine

    @abc.abstractmethod
    def iter_ruptures(self):
        """
        Get a generator object that yields probabilistic ruptures the source
        consists of.

        :returns:
            Generator of instances of sublclass of :class:
            `~openquake.hazardlib.source.rupture.BaseProbabilisticRupture`.
        """

    @abc.abstractmethod
    def count_ruptures(self):
        """
        Return the number of ruptures that will be generated by the source.
        """

    @abc.abstractmethod
    def get_min_max_mag(self):
        """
        Return minimum and maximum magnitudes of the ruptures generated
        by the source.
        """

    @abc.abstractmethod
    def get_rupture_enclosing_polygon(self, dilation=0):
        """
        Get a polygon which encloses all the ruptures generated by the source.

        The rupture enclosing polygon is meant to be used in all hazard
        calculators to filter out sources whose ruptures the user wants
        to be neglected because they are too far from the locations
        of interest.

        For performance reasons, the ``get_rupture_enclosing_polygon()``
        should compute the polygon, without creating all the ruptures.
        The rupture enclosing polygon may not be necessarily the *minimum*
        enclosing polygon, but must guarantee that all ruptures are within
        the polygon.

        This method must be implemented by subclasses.

        :param dilation:
            A buffer distance in km to extend the polygon borders to.
        :returns:
            Instance of :class:`openquake.hazardlib.geo.polygon.Polygon`.
        """

    def get_bounding_box(self, dilation=0):
        """
        Returns the bounding box of all the ruptures generated by the source,
        enlarged by the integration distance (dilation).
        """
        return self.get_rupture_enclosing_polygon(dilation).get_bbox()

    def filter_sites_by_distance_to_source(self, integration_distance, sites):
        """
        Filter out sites from the collection that are further from the source
        than some arbitrary threshold.

        :param integration_distance:
            Distance in km representing a threshold: sites that are further
            than that distance from the closest rupture produced by the source
            should be excluded.
        :param sites:
            Instance of :class:`openquake.hazardlib.site.SiteCollection`
            to filter.
        :returns:
            Filtered :class:`~openquake.hazardlib.site.SiteCollection`.

        Method can be overridden by subclasses in order to achieve
        higher performance for a specific typology. Base class method calls
        :meth:`get_rupture_enclosing_polygon` with ``integration_distance``
        as a dilation value and then filters site collection by checking
        :meth:
        `containment <openquake.hazardlib.geo.polygon.Polygon.intersects>`
        of site locations.

        The main criteria for this method to decide whether a site should be
        filtered out or not is the minimum distance between the site and all
        the ruptures produced by the source. If at least one rupture is closer
        (in terms of great circle distance between surface projections) than
        integration distance to a site, it should not be filtered out. However,
        it is important not to make this method too computationally intensive.
        If short-circuits are taken, false positives are generally better than
        false negatives (it's better not to filter a site out if there is some
        uncertainty about its distance).
        """
        if integration_distance is None:  # no filtering
            return sites
        rup_enc_poly = self.get_rupture_enclosing_polygon(integration_distance)
        return sites.filter(rup_enc_poly.intersects(sites.mesh))

    def modify(self, modification, parameters):
        """
        Apply a single modificaton to the source parameters
        Reflects the modification method and calls it passing ``parameters``
        as keyword arguments.

        Modifications can be applied one on top of another. The logic
        of stacking modifications is up to a specific source implementation.

        :param modification:
            String name representing the type of modification.
        :param parameters:
            Dictionary of parameters needed for modification.
        :raises ValueError:
            If ``modification`` is missing from the attribute `MODIFICATIONS`.
        """
        if modification not in self.MODIFICATIONS:
            raise ValueError('Modification %s is not supported by %s' %
                             (modification, type(self).__name__))
        meth = getattr(self, 'modify_%s' % modification)
        meth(**parameters)
Ejemplo n.º 24
0
class BaseCalculator(with_metaclass(abc.ABCMeta)):
    """
    Abstract base class for all calculators.

    :param oqparam: OqParam object
    :param monitor: monitor object
    :param calc_id: numeric calculation ID
    """
    from_engine = False  # set by engine.run_calc
    sitecol = datastore.persistent_attribute('sitecol')
    performance = datastore.persistent_attribute('performance')
    pre_calculator = None  # to be overridden
    is_stochastic = False  # True for scenario and event based calculators

    def __init__(self, oqparam, monitor=Monitor(), calc_id=None):
        self._monitor = monitor
        self.datastore = datastore.DataStore(calc_id)
        self.oqparam = oqparam

    def monitor(self, operation, **kw):
        """
        :returns: a new Monitor instance
        """
        mon = self._monitor(operation, hdf5path=self.datastore.hdf5path)
        self._monitor.calc_id = mon.calc_id = self.datastore.calc_id
        vars(mon).update(kw)
        return mon

    def save_params(self, **kw):
        """
        Update the current calculation parameters and save engine_version
        """
        vars(self.oqparam).update(**kw)
        self.datastore['oqparam'] = self.oqparam  # save the updated oqparam
        attrs = self.datastore['/'].attrs
        attrs['engine_version'] = engine_version
        attrs['date'] = datetime.now().isoformat()[:19]
        if 'checksum32' not in attrs:
            attrs['checksum32'] = readinput.get_checksum32(self.oqparam)
        self.datastore.flush()

    def set_log_format(self):
        """Set the format of the root logger"""
        fmt = '[%(asctime)s #{} %(levelname)s] %(message)s'.format(
            self.datastore.calc_id)
        for handler in logging.root.handlers:
            handler.setFormatter(logging.Formatter(fmt))

    def run(self, pre_execute=True, concurrent_tasks=None, close=True, **kw):
        """
        Run the calculation and return the exported outputs.
        """
        global logversion
        self.close = close
        self.set_log_format()
        if logversion:  # make sure this is logged only once
            logging.info('Running %s', self.oqparam.inputs['job_ini'])
            logging.info('Using engine version %s', engine_version)
            logversion = False
        if concurrent_tasks is None:  # use the job.ini parameter
            ct = self.oqparam.concurrent_tasks
        else:  # used the parameter passed in the command-line
            ct = concurrent_tasks
        if ct == 0:  # disable distribution temporarily
            oq_distribute = os.environ.get('OQ_DISTRIBUTE')
            os.environ['OQ_DISTRIBUTE'] = 'no'
        if ct != self.oqparam.concurrent_tasks:
            # save the used concurrent_tasks
            self.oqparam.concurrent_tasks = ct
        self.save_params(**kw)
        Starmap.init()
        try:
            if pre_execute:
                self.pre_execute()
            self.result = self.execute()
            if self.result is not None:
                self.post_execute(self.result)
            self.before_export()
            self.export(kw.get('exports', ''))
        except:
            if kw.get('pdb'):  # post-mortem debug
                tb = sys.exc_info()[2]
                traceback.print_tb(tb)
                pdb.post_mortem(tb)
            else:
                logging.critical('', exc_info=True)
                raise
        finally:
            # cleanup globals
            if ct == 0:  # restore OQ_DISTRIBUTE
                if oq_distribute is None:  # was not set
                    del os.environ['OQ_DISTRIBUTE']
                else:
                    os.environ['OQ_DISTRIBUTE'] = oq_distribute
            readinput.pmap = None
            readinput.exposure = None
            Starmap.shutdown()
        return getattr(self, 'exported', {})

    def core_task(*args):
        """
        Core routine running on the workers.
        """
        raise NotImplementedError

    @abc.abstractmethod
    def pre_execute(self):
        """
        Initialization phase.
        """

    @abc.abstractmethod
    def execute(self):
        """
        Execution phase. Usually will run in parallel the core
        function and return a dictionary with the results.
        """

    @abc.abstractmethod
    def post_execute(self, result):
        """
        Post-processing phase of the aggregated output. It must be
        overridden with the export code. It will return a dictionary
        of output files.
        """

    def export(self, exports=None):
        """
        Export all the outputs in the datastore in the given export formats.
        Individual outputs are not exported if there are multiple realizations.
        """
        self.exported = getattr(self.precalc, 'exported', {})
        if isinstance(exports, tuple):
            fmts = exports
        elif exports:  # is a string
            fmts = exports.split(',')
        elif isinstance(self.oqparam.exports, tuple):
            fmts = self.oqparam.exports
        else:  # is a string
            fmts = self.oqparam.exports.split(',')
        keys = set(self.datastore)
        has_hcurves = 'hcurves' in self.datastore or 'poes' in self.datastore
        if has_hcurves:
            keys.add('hcurves')
        for fmt in fmts:
            if not fmt:
                continue
            for key in sorted(keys):  # top level keys
                if 'rlzs' in key and self.R > 1:
                    continue  # skip individual curves
                self._export((key, fmt))
            if has_hcurves and self.oqparam.hazard_maps:
                self._export(('hmaps', fmt))
            if has_hcurves and self.oqparam.uniform_hazard_spectra:
                self._export(('uhs', fmt))

        if self.close:  # in the engine we close later
            self.result = None
            try:
                self.datastore.close()
            except (RuntimeError, ValueError):
                # sometimes produces errors but they are difficult to
                # reproduce
                logging.warn('', exc_info=True)

    def _export(self, ekey):
        if ekey not in exp or self.exported.get(ekey):  # already exported
            return
        with self.monitor('export'):
            self.exported[ekey] = fnames = exp(ekey, self.datastore)
            if fnames:
                logging.info('exported %s: %s', ekey[0], fnames)

    def before_export(self):
        """
        Set the attributes nbytes
        """
        # sanity check that eff_ruptures have been set, i.e. are not -1
        csm_info = self.datastore['csm_info']
        for sm in csm_info.source_models:
            for sg in sm.src_groups:
                assert sg.eff_ruptures != -1, sg

        for key in self.datastore:
            self.datastore.set_nbytes(key)
        self.datastore.flush()
Ejemplo n.º 25
0
class BaseMFD(with_metaclass(abc.ABCMeta)):
    """
    Abstract base class for Magnitude-Frequency Distribution function.
    """

    #: The set of modification type names that are supported by an MFD.
    #: Each modification should have a corresponding method named
    #: ``modify_modificationname()`` where the actual modification
    #: logic resides.
    MODIFICATIONS = abc.abstractproperty()

    def modify(self, modification, parameters):
        """
        Apply a single modification to an MFD parameters.

        Reflects the modification method and calls it passing ``parameters``
        as keyword arguments. See also :attr:`MODIFICATIONS`.

        Modifications can be applied one on top of another. The logic
        of stacking modifications is up to a specific MFD implementation.

        :param modification:
            String name representing the type of modification.
        :param parameters:
            Dictionary of parameters needed for modification.
        :raises ValueError:
            If ``modification`` is missing from :attr:`MODIFICATIONS`.
        """
        if not modification in self.MODIFICATIONS:
            raise ValueError('Modification %s is not supported by %s' %
                             (modification, type(self).__name__))
        meth = getattr(self, 'modify_%s' % modification)
        meth(**parameters)
        self.check_constraints()

    @abc.abstractmethod
    def check_constraints(self):
        """
        Check MFD-specific constraints and raise :exc:`ValueError`
        in case of violation.

        This method must be implemented by subclasses.
        """

    @abc.abstractmethod
    def get_annual_occurrence_rates(self):
        """
        Return an MFD annual occurrence rates histogram.

        This method must be implemented by subclasses.

        :return:
            The list of tuples, each tuple containing a pair
            ``(magnitude, occurrence_rate)``. Each pair represents
            a single bin of the histogram with ``magnitude`` being
            the center of the bin. Magnitude values are monotonically
            increasing by value of bin width. ``occurence_rate``
            represents the number of events per year with magnitude
            that falls in between bin's boundaries.
        """

    @abc.abstractmethod
    def get_min_max_mag(self):
        """
        Return the minimum and maximum magnitudes this MFD is defined for.

        This method must be implemented by subclasses.

        :return:
            Magnitude value, float number.
        """

    def __repr__(self):
        """
        Returns the name of the magnitude frequency distribution class
        """
        return "<%s>" % self.__class__.__name__
Ejemplo n.º 26
0
class BaseSeismicSource(with_metaclass(abc.ABCMeta)):
    """
    Base class representing a seismic source, that is a structure generating
    earthquake ruptures.

    :param source_id:
        Some (numeric or literal) source identifier. Supposed to be unique
        within the source model.
    :param name:
        String, a human-readable name of the source.
    :param tectonic_region_type:
        Source's tectonic regime. See :class:`openquake.hazardlib.const.TRT`.
    """
    _slots_ = [
        'source_id', 'name', 'tectonic_region_type', 'src_group_id',
        'num_ruptures', 'seed', 'id'
    ]
    RUPTURE_WEIGHT = 1.  # overridden in (Multi)PointSource, AreaSource
    ngsims = 1

    @abc.abstractproperty
    def MODIFICATIONS(self):
        pass

    @property
    def weight(self):
        """
        Determine the source weight from the number of ruptures, by
        multiplying with the scale factor RUPTURE_WEIGHT
        """
        if not self.num_ruptures:
            self.num_ruptures = self.count_ruptures()
        # (MS) the weight is proportional to the number of ruptures and GSIMs
        # the relation to the number of sites is unclear, but for sure less
        # than linear and I am using a sqrt here (totally made up but good)
        return (self.num_ruptures * self.RUPTURE_WEIGHT *
                math.sqrt(self.nsites) * self.ngsims)

    @property
    def nsites(self):
        """
        :returns: the number of sites affected by this source
        """
        try:
            # the engine sets self.indices when filtering the sources
            return len(self.indices)
        except AttributeError:
            # this happens in several hazardlib tests, therefore we return
            # a fake number of affected sites to avoid changing all tests
            return 1

    @property
    def src_group_ids(self):
        """
        :returns: a list of source group IDs (usually of 1 element)
        """
        grp_id = getattr(self, 'src_group_id', [0])
        return [grp_id] if isinstance(grp_id, int) else grp_id

    def __init__(self, source_id, name, tectonic_region_type):
        self.source_id = source_id
        self.name = name
        self.tectonic_region_type = tectonic_region_type
        self.src_group_id = 0  # set by the engine
        self.num_ruptures = 0  # set by the engine
        self.seed = None  # set by the engine
        self.id = None  # set by the engine

    @abc.abstractmethod
    def iter_ruptures(self):
        """
        Get a generator object that yields probabilistic ruptures the source
        consists of.

        :returns:
            Generator of instances of sublclass of :class:
            `~openquake.hazardlib.source.rupture.BaseProbabilisticRupture`.
        """

    def __iter__(self):
        """
        Override to implement source splitting
        """
        yield self

    @abc.abstractmethod
    def count_ruptures(self):
        """
        Return the number of ruptures that will be generated by the source.
        """

    @abc.abstractmethod
    def get_min_max_mag(self):
        """
        Return minimum and maximum magnitudes of the ruptures generated
        by the source.
        """

    def modify(self, modification, parameters):
        """
        Apply a single modificaton to the source parameters
        Reflects the modification method and calls it passing ``parameters``
        as keyword arguments.

        Modifications can be applied one on top of another. The logic
        of stacking modifications is up to a specific source implementation.

        :param modification:
            String name representing the type of modification.
        :param parameters:
            Dictionary of parameters needed for modification.
        :raises ValueError:
            If ``modification`` is missing from the attribute `MODIFICATIONS`.
        """
        if modification not in self.MODIFICATIONS:
            raise ValueError('Modification %s is not supported by %s' %
                             (modification, type(self).__name__))
        meth = getattr(self, 'modify_%s' % modification)
        meth(**parameters)
Ejemplo n.º 27
0
class ParamSet(with_metaclass(MetaParamSet, hdf5.LiteralAttrs)):
    """
    A set of valid interrelated parameters. Here is an example
    of usage:

    >>> class MyParams(ParamSet):
    ...     a = Param(positiveint)
    ...     b = Param(positivefloat)
    ...
    ...     def is_valid_not_too_big(self):
    ...         "The sum of a and b must be under 10: a={a} and b={b}"
    ...         return self.a + self.b < 10

    >>> mp = MyParams(a='1', b='7.2')
    >>> mp
    <MyParams a=1, b=7.2>

    >>> MyParams(a='1', b='9.2').validate()
    Traceback (most recent call last):
    ...
    ValueError: The sum of a and b must be under 10: a=1 and b=9.2

    The constrains are applied in lexicographic order. The attribute
    corresponding to a Param descriptor can be set as usual:

    >>> mp.a = '2'
    >>> mp.a
    '2'

    A list with the literal strings can be extracted as follows:

    >>> mp.to_params()
    [('a', "'2'"), ('b', '7.2')]

    It is possible to build a new object from a dictionary of parameters
    which are assumed to be already validated:

    >>> MyParams.from_(dict(a="'2'", b='7.2'))
    <MyParams a='2', b=7.2>
    """
    params = {}

    @classmethod
    def check(cls, dic):
        """
        Convert a dictionary name->string into a dictionary name->value
        by converting the string. If the name does not correspond to a
        known parameter, just ignore it and print a warning.
        """
        res = {}
        for name, text in dic.items():
            try:
                p = getattr(cls, name)
            except AttributeError:
                logging.warn('Ignored unknown parameter %s', name)
            else:
                res[name] = p.validator(text)
        return res

    @classmethod
    def from_(cls, dic):
        """
        Build a new ParamSet from a dictionary of string-valued parameters
        which are assumed to be already valid.
        """
        self = cls.__new__(cls)
        for k, v in dic.items():
            setattr(self, k, ast.literal_eval(v))
        return self

    def to_params(self):
        """
        Convert the instance dictionary into a sorted list of pairs
        (name, valrepr) where valrepr is the string representation of
        the underlying value.
        """
        dic = self.__dict__
        return [(k, repr(dic[k])) for k in sorted(dic)
                if not k.startswith('_')]

    def __init__(self, **names_vals):
        for name, val in names_vals.items():
            if name.startswith(('_', 'is_valid_')):
                raise NameError('The parameter name %s is not acceptable' %
                                name)
            try:
                convert = getattr(self.__class__, name).validator
            except AttributeError:
                logging.warn("The parameter '%s' is unknown, ignoring" % name)
                continue
            try:
                value = convert(val)
            except Exception as exc:
                raise ValueError('%s: could not convert to %s: %s=%s' %
                                 (exc, convert.__name__, name, val))
            setattr(self, name, value)

    def validate(self):
        """
        Apply the `is_valid` methods to self and possibly raise a ValueError.
        """
        # it is important to have the validator applied in a fixed order
        valids = [
            getattr(self, valid) for valid in sorted(dir(self.__class__))
            if valid.startswith('is_valid_')
        ]
        for is_valid in valids:
            if not is_valid():
                docstring = is_valid.__doc__.strip()
                doc = textwrap.fill(docstring.format(**vars(self)))
                raise ValueError(doc)

    def __iter__(self):
        for item in sorted(vars(self).items()):
            yield item
Ejemplo n.º 28
0
class BaseSeismicSource(with_metaclass(abc.ABCMeta)):
    """
    Base class representing a seismic source, that is a structure generating
    earthquake ruptures.

    :param source_id:
        Some (numeric or literal) source identifier. Supposed to be unique
        within the source model.
    :param name:
        String, a human-readable name of the source.
    :param tectonic_region_type:
        Source's tectonic regime. See :class:`openquake.hazardlib.const.TRT`.
    """

    __slots__ = [
        'source_id', 'name', 'tectonic_region_type', 'trt_model_id', 'weight',
        'seed', 'id'
    ]

    def __init__(self, source_id, name, tectonic_region_type):
        self.source_id = source_id
        self.name = name
        self.tectonic_region_type = tectonic_region_type
        self.trt_model_id = None  # set by the engine
        self.weight = 1  # set by the engine
        self.seed = None  # set by the engine
        self.id = None  # set by the engine

    @abc.abstractmethod
    def iter_ruptures(self):
        """
        Get a generator object that yields probabilistic ruptures the source
        consists of.

        :returns:
            Generator of instances of sublclass of :class:
            `~openquake.hazardlib.source.rupture.BaseProbabilisticRupture`.
        """

    @abc.abstractmethod
    def count_ruptures(self):
        """
        Return the number of ruptures that will be generated by the source.
        """

    @abc.abstractmethod
    def get_min_max_mag(self):
        """
        Return minimum and maximum magnitudes of the ruptures generated
        by the source.
        """

    @abc.abstractmethod
    def get_rupture_enclosing_polygon(self, dilation=0):
        """
        Get a polygon which encloses all the ruptures generated by the source.

        The rupture enclosing polygon is meant to be used in all hazard
        calculators to filter out sources whose ruptures the user wants
        to be neglected because they are too far from the locations
        of interest.

        For performance reasons, the ``get_rupture_enclosing_polygon()``
        should compute the polygon, without creating all the ruptures.
        The rupture enclosing polygon may not be necessarily the *minimum*
        enclosing polygon, but must guarantee that all ruptures are within
        the polygon.

        This method must be implemented by subclasses.

        :param dilation:
            A buffer distance in km to extend the polygon borders to.
        :returns:
            Instance of :class:`openquake.hazardlib.geo.polygon.Polygon`.
        """

    def filter_sites_by_distance_to_source(self, integration_distance, sites):
        """
        Filter out sites from the collection that are further from the source
        than some arbitrary threshold.

        :param integration_distance:
            Distance in km representing a threshold: sites that are further
            than that distance from the closest rupture produced by the source
            should be excluded.
        :param sites:
            Instance of :class:`openquake.hazardlib.site.SiteCollection`
            to filter.
        :returns:
            Filtered :class:`~openquake.hazardlib.site.SiteCollection`.

        Method can be overridden by subclasses in order to achieve
        higher performance for a specific typology. Base class method calls
        :meth:`get_rupture_enclosing_polygon` with ``integration_distance``
        as a dilation value and then filters site collection by checking
        :meth:
        `containment <openquake.hazardlib.geo.polygon.Polygon.intersects>`
        of site locations.

        The main criteria for this method to decide whether a site should be
        filtered out or not is the minimum distance between the site and all
        the ruptures produced by the source. If at least one rupture is closer
        (in terms of great circle distance between surface projections) than
        integration distance to a site, it should not be filtered out. However,
        it is important not to make this method too computationally intensive.
        If short-circuits are taken, false positives are generally better than
        false negatives (it's better not to filter a site out if there is some
        uncertainty about its distance).
        """
        rup_enc_poly = self.get_rupture_enclosing_polygon(integration_distance)
        return sites.filter(rup_enc_poly.intersects(sites.mesh))