Example #1
0
 def _populate_from(self, asset_nodes, param):
     asset_refs = set()
     for idx, asset_node in enumerate(asset_nodes):
         asset_id = asset_node['id']
         if asset_id in asset_refs:
             raise nrml.DuplicatedID(asset_id)
         asset_refs.add(asset_id)
         self._add_asset(idx, asset_node, param)
Example #2
0
 def _populate_from(self, asset_array, param, check_dupl):
     asset_refs = set()
     for idx, asset in enumerate(asset_array):
         asset_id = asset['id']
         # check_dupl is False only in oq prepare_site_model since
         # in that case we are only interested in the asset locations
         if check_dupl and asset_id in asset_refs:
             raise nrml.DuplicatedID(asset_id)
         asset_refs.add(param['asset_prefix'] + asset_id)
         self._add_asset(idx, asset, param)
Example #3
0
    def parse_branchset(self, branchset_node, depth):
        """
        :param branchset_ node:
            ``etree.Element`` object with tag "logicTreeBranchSet".
        :param depth:
            The sequential number of this branching level, based on 0.

        Enumerates children branchsets and call :meth:`parse_branchset`,
        :meth:`validate_branchset`, :meth:`parse_branches` and finally
        :meth:`apply_branchset` for each.

        Keeps track of "open ends" -- the set of branches that don't have
        any child branchset on this step of execution. After processing
        of every branchset only those branches that are listed in it
        can have child branchsets (if there is one on the next level).
        """
        attrs = branchset_node.attrib.copy()
        uncertainty_type = branchset_node.attrib.get('uncertaintyType')
        dic = dict((filtername, branchset_node.attrib.get(filtername))
                   for filtername in self.FILTERS
                   if filtername in branchset_node.attrib)
        self.validate_filters(branchset_node, uncertainty_type, dic)
        filters = self.parse_filters(branchset_node, uncertainty_type, dic)

        ordinal = len(self.bsetdict)
        branchset = BranchSet(uncertainty_type, ordinal, filters)
        branchset.id = bsid = attrs.pop('branchSetID')
        if bsid in self.bsetdict:
            raise nrml.DuplicatedID('%s in %s' % (bsid, self.filename))
        self.bsetdict[bsid] = attrs
        self.validate_branchset(branchset_node, depth, branchset)
        self.parse_branches(branchset_node, branchset)
        dummies = []  # dummy branches in case of applyToBranches
        if self.root_branchset is None:  # not set yet
            self.num_paths = 1
            self.root_branchset = branchset
        else:
            prev_ids = ' '.join(pb.branch_id for pb in self.previous_branches)
            app2brs = branchset_node.attrib.get('applyToBranches') or prev_ids
            if app2brs != prev_ids:
                branchset.applied = app2brs
                self.apply_branchset(
                    app2brs, branchset_node.lineno, branchset)
                for brid in set(prev_ids.split()) - set(app2brs.split()):
                    self.branches[brid].bset = dummy = dummy_branchset()
                    [dummybranch] = dummy.branches
                    self.branches[dummybranch.branch_id] = dummybranch
                    dummies.append(dummybranch)
            else:  # apply to all previous branches
                for branch in self.previous_branches:
                    branch.bset = branchset
        self.previous_branches = branchset.branches + dummies
        self.num_paths *= len(branchset)
        self.branchsets.append(branchset)
Example #4
0
def get_composite_source_model(oqparam, in_memory=True):
    """
    Parse the XML and build a complete composite source model in memory.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param in_memory:
        if False, just parse the XML without instantiating the sources
    """
    smodels = []
    grp_id = 0
    idx = 0
    gsim_lt = get_gsim_lt(oqparam)
    source_model_lt = get_source_model_lt(oqparam)
    if source_model_lt.on_each_source():
        logging.info('There is a logic tree on each source')
    for source_model in get_source_models(oqparam,
                                          gsim_lt,
                                          source_model_lt,
                                          in_memory=in_memory):
        for src_group in source_model.src_groups:
            src_group.sources = sorted(src_group, key=getid)
            src_group.id = grp_id
            for src in src_group:
                # there are two cases depending on the flag in_memory:
                # 1) src is a hazardlib source and has a src_group_id
                #    attribute; in that case the source has to be numbered
                # 2) src is a Node object, then nothing must be done
                if isinstance(src, Node):
                    continue
                src.src_group_id = grp_id
                src.id = idx
                idx += 1
            grp_id += 1
            if grp_id >= TWO16:
                # the limit is really needed only for event based calculations
                raise ValueError('There is a limit of %d src groups!' % TWO16)
        smodels.append(source_model)
    csm = source.CompositeSourceModel(gsim_lt, source_model_lt, smodels,
                                      oqparam.optimize_same_id_sources)
    for sm in csm.source_models:
        counter = collections.Counter()
        for sg in sm.src_groups:
            for srcid in map(getid, sg):
                counter[srcid] += 1
        dupl = [srcid for srcid in counter if counter[srcid] > 1]
        if dupl:
            raise nrml.DuplicatedID('Found duplicated source IDs in %s: %s' %
                                    (sm, dupl))
    return csm
Example #5
0
    def parse_branchset(self, branchset_node, depth):
        """
        :param branchset_ node:
            ``etree.Element`` object with tag "logicTreeBranchSet".
        :param depth:
            The sequential number of this branching level, based on 0.

        Enumerates children branchsets and call :meth:`parse_branchset`,
        :meth:`validate_branchset`, :meth:`parse_branches` and finally
        :meth:`apply_branchset` for each.

        Keeps track of "open ends" -- the set of branches that don't have
        any child branchset on this step of execution. After processing
        of every branchset only those branches that are listed in it
        can have child branchsets (if there is one on the next level).
        """
        attrs = branchset_node.attrib.copy()
        uncertainty_type = branchset_node.attrib.get('uncertaintyType')
        filters = dict((filtername, branchset_node.attrib.get(filtername))
                       for filtername in self.FILTERS
                       if filtername in branchset_node.attrib)
        self.validate_filters(branchset_node, uncertainty_type, filters)
        filters = self.parse_filters(branchset_node, uncertainty_type, filters)

        branchset = BranchSet(uncertainty_type, len(self.bsetdict), filters)
        bsid = attrs.pop('branchSetID')
        if bsid in self.bsetdict:
            raise nrml.DuplicatedID('%s in %s' % (bsid, self.filename))
        self.bsetdict[bsid] = attrs
        self.validate_branchset(branchset_node, depth, branchset)
        self.parse_branches(branchset_node, branchset)
        if self.root_branchset is None:  # not set yet
            self.num_paths = 1
            self.root_branchset = branchset
        else:
            apply_to_branches = branchset_node.attrib.get('applyToBranches')
            if apply_to_branches:
                self.apply_branchset(apply_to_branches, branchset_node.lineno,
                                     branchset)
            else:
                for branch in self.previous_branches:
                    branch.bset = branchset
        self.previous_branches = branchset.branches
        self.num_paths *= len(branchset)
        self.branchsets.append(branchset)
Example #6
0
def get_composite_source_model(oqparam,
                               monitor=None,
                               in_memory=True,
                               srcfilter=SourceFilter(None, {})):
    """
    Parse the XML and build a complete composite source model in memory.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param monitor:
         a `openquake.baselib.performance.Monitor` instance
    :param in_memory:
        if False, just parse the XML without instantiating the sources
    :param srcfilter:
        if not None, use it to prefilter the sources
    """
    ucerf = oqparam.calculation_mode.startswith('ucerf')
    source_model_lt = get_source_model_lt(oqparam, validate=not ucerf)
    trts = source_model_lt.tectonic_region_types
    trts_lower = {trt.lower() for trt in trts}
    reqv = oqparam.inputs.get('reqv', {})
    for trt in reqv:  # these are lowercase because they come from the job.ini
        if trt not in trts_lower:
            raise ValueError('Unknown TRT=%s in %s [reqv]' %
                             (trt, oqparam.inputs['job_ini']))
    gsim_lt = get_gsim_lt(oqparam, trts or ['*'])
    p = source_model_lt.num_paths * gsim_lt.get_num_paths()
    if oqparam.number_of_logic_tree_samples:
        logging.info('Considering {:,d} logic tree paths out of {:,d}'.format(
            oqparam.number_of_logic_tree_samples, p))
    else:  # full enumeration
        if oqparam.is_event_based() and p > oqparam.max_potential_paths:
            raise ValueError(
                'There are too many potential logic tree paths (%d) '
                'use sampling instead of full enumeration' % p)
        logging.info('Potential number of logic tree paths = {:,d}'.format(p))

    if source_model_lt.on_each_source:
        logging.info('There is a logic tree on each source')
    if monitor is None:
        monitor = performance.Monitor()
    smodels = []
    for source_model in get_source_models(oqparam, gsim_lt, source_model_lt,
                                          monitor, in_memory, srcfilter):
        for src_group in source_model.src_groups:
            src_group.sources = sorted(src_group, key=getid)
            for src in src_group:
                # there are two cases depending on the flag in_memory:
                # 1) src is a hazardlib source and has a src_group_id
                #    attribute; in that case the source has to be numbered
                # 2) src is a Node object, then nothing must be done
                if isinstance(src, Node):
                    continue
        smodels.append(source_model)
    csm = source.CompositeSourceModel(gsim_lt, source_model_lt, smodels,
                                      oqparam.optimize_same_id_sources)
    for sm in csm.source_models:
        counter = collections.Counter()
        for sg in sm.src_groups:
            for srcid in map(getid, sg):
                counter[srcid] += 1
        dupl = [srcid for srcid in counter if counter[srcid] > 1]
        if dupl:
            raise nrml.DuplicatedID('Found duplicated source IDs in %s: %s' %
                                    (sm, dupl))
    if not in_memory:
        return csm

    if oqparam.is_event_based():
        # initialize the rupture serial numbers before splitting/filtering; in
        # this way the serials are independent from the site collection
        csm.init_serials(oqparam.ses_seed)

    if oqparam.disagg_by_src:
        csm = csm.grp_by_src()  # one group per source

    csm.info.gsim_lt.check_imts(oqparam.imtls)
    return csm
Example #7
0
def get_composite_source_model(oqparam,
                               monitor=None,
                               in_memory=True,
                               split_all=True,
                               srcfilter=None):
    """
    Parse the XML and build a complete composite source model in memory.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param monitor:
         a `openquake.baselib.performance.Monitor` instance
    :param in_memory:
        if False, just parse the XML without instantiating the sources
    :param split_all:
        if True, split all the sources in the models
    :param srcfilter:
        if not None, use it to prefilter the sources
    """
    smodels = []
    gsim_lt = get_gsim_lt(oqparam)
    source_model_lt = get_source_model_lt(oqparam)
    if oqparam.number_of_logic_tree_samples == 0:
        logging.info('Potential number of logic tree paths = {:,d}'.format(
            source_model_lt.num_paths * gsim_lt.get_num_paths()))
    if source_model_lt.on_each_source:
        logging.info('There is a logic tree on each source')
    if monitor is None:
        monitor = performance.Monitor()
    for source_model in get_source_models(oqparam,
                                          gsim_lt,
                                          source_model_lt,
                                          monitor,
                                          in_memory=in_memory):
        for src_group in source_model.src_groups:
            src_group.sources = sorted(src_group, key=getid)
            for src in src_group:
                # there are two cases depending on the flag in_memory:
                # 1) src is a hazardlib source and has a src_group_id
                #    attribute; in that case the source has to be numbered
                # 2) src is a Node object, then nothing must be done
                if isinstance(src, Node):
                    continue
        smodels.append(source_model)
    csm = source.CompositeSourceModel(gsim_lt, source_model_lt, smodels,
                                      oqparam.optimize_same_id_sources)
    for sm in csm.source_models:
        counter = collections.Counter()
        for sg in sm.src_groups:
            for srcid in map(getid, sg):
                counter[srcid] += 1
        dupl = [srcid for srcid in counter if counter[srcid] > 1]
        if dupl:
            raise nrml.DuplicatedID('Found duplicated source IDs in %s: %s' %
                                    (sm, dupl))
    if not in_memory:
        return csm

    if 'event_based' in oqparam.calculation_mode:
        if oqparam.pointsource_distance == 0:
            # remove splitting/floating ruptures
            for src in csm.get_sources():
                if hasattr(src, 'hypocenter_distribution'):
                    src.hypocenter_distribution.reduce()
                    src.nodal_plane_distribution.reduce()
                    src.num_ruptures = src.count_ruptures()

        # initialize the rupture serial numbers before splitting/filtering; in
        # this way the serials are independent from the site collection
        csm.init_serials(oqparam.ses_seed)

    # TODO: check why the seeds still depend on the minimun_magnitude
    set_min_mag(csm.get_sources(), oqparam.minimum_magnitude)

    if oqparam.disagg_by_src:
        csm = csm.grp_by_src()  # one group per source

    csm.info.gsim_lt.check_imts(oqparam.imtls)
    if monitor.hdf5:
        csm.info.gsim_lt.store_gmpe_tables(monitor.hdf5)

    # splitting assumes that the serials have been initialized already
    if split_all and 'ucerf' not in oqparam.calculation_mode:
        csm = parallel_split_filter(csm, srcfilter, oqparam.random_seed,
                                    monitor('prefilter'))
    return csm