Beispiel #1
0
 def test_wrong_trts(self):
     # 'active Shallow Crust' is missing, 'Active Shallow Crust' is there
     oq = readinput.get_oqparam('job.ini', case_16)
     with self.assertRaises(logictree.InvalidLogicTree) as c:
         readinput.get_gsim_lt(oq, ['active Shallow Crust'])
     self.assertIn("is missing the TRT 'active Shallow Crust'",
                   str(c.exception))
Beispiel #2
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer and seeds
     """
     super(ScenarioCalculator, self).pre_execute()
     oq = self.oqparam
     trunc_level = oq.truncation_level
     correl_model = oq.get_correl_model()
     rup = readinput.get_rupture(oq)
     rup.seed = self.oqparam.random_seed
     self.gsims = readinput.get_gsims(oq)
     maxdist = oq.maximum_distance['default']
     with self.monitor('filtering sites', autoflush=True):
         self.sitecol = filters.filter_sites_by_distance_to_rupture(
             rup, maxdist, self.sitecol)
     if self.sitecol is None:
         raise RuntimeError(
             'All sites were filtered out! maximum_distance=%s km' %
             maxdist)
     # eid, ses, occ, sample
     events = numpy.array(
         [(eid, 1, 1, 0)
          for eid in range(oq.number_of_ground_motion_fields)],
         calc.event_dt)
     rupture = calc.EBRupture(
         rup, self.sitecol.sids, events, 'single_rupture', 0, 0)
     self.datastore['ruptures/grp-00/0'] = rupture
     self.computer = GmfComputer(
         rupture, self.sitecol, oq.imtls, self.gsims,
         trunc_level, correl_model)
     gsim_lt = readinput.get_gsim_lt(oq)
     cinfo = source.CompositionInfo.fake(gsim_lt)
     self.datastore['csm_info'] = cinfo
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
Beispiel #3
0
def zip(job_ini, archive_zip):
    """
    Zip the given job.ini file into the given archive, together with all
    related files.
    """
    if not os.path.exists(job_ini):
        sys.exit('%s does not exist' % job_ini)
    if not archive_zip.endswith('.zip'):
        sys.exit('%s does not end with .zip' % archive_zip)
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    logging.basicConfig(level=logging.INFO)
    oq = readinput.get_oqparam(job_ini)
    files = set()

    # collect .hdf5 tables for the GSIMs, if any
    if 'gsim_logic_tree' in oq.inputs or oq.gsim:
        gsim_lt = readinput.get_gsim_lt(oq)
        for gsims in gsim_lt.values.values():
            for gsim in gsims:
                table = getattr(gsim, 'GMPE_TABLE', None)
                if table:
                    files.add(table)

    # collect all other files
    for key in oq.inputs:
        fname = oq.inputs[key]
        if isinstance(fname, list):
            for f in fname:
                files.add(os.path.normpath(f))
        else:
            files.add(os.path.normpath(fname))
    general.zipfiles(files, archive_zip, log=logging.info)
Beispiel #4
0
 def pre_execute(self):
     """
     parse the logic tree and source model input
     """
     self.sitecol = readinput.get_site_collection(self.oqparam)
     self.save_mesh()
     self.gsim_lt = readinput.get_gsim_lt(self.oqparam, [DEFAULT_TRT])
     self.smlt = readinput.get_source_model_lt(self.oqparam)
     parser = source.SourceModelParser(
         UCERFSourceConverter(self.oqparam.investigation_time,
                              self.oqparam.rupture_mesh_spacing))
     [self.source
      ] = parser.parse_sources(self.oqparam.inputs["source_model"])
     branches = sorted(self.smlt.branches.items())
     min_mag, max_mag = self.source.min_mag, None
     source_models = []
     num_gsim_paths = self.gsim_lt.get_num_paths()
     for ordinal, (name, branch) in enumerate(branches):
         tm = source.TrtModel(DEFAULT_TRT, [],
                              min_mag,
                              max_mag,
                              ordinal,
                              eff_ruptures=-1)
         sm = source.SourceModel(name, branch.weight, [name], [tm],
                                 num_gsim_paths, ordinal, 1)
         source_models.append(sm)
     self.csm = source.CompositeSourceModel(self.gsim_lt,
                                            self.smlt,
                                            source_models,
                                            set_weight=False)
     self.rup_data = {}
     self.infos = []
Beispiel #5
0
def main(argv):
    """
    This provides a short summary of the GSIM models used in a hazard model and
    the required parameters necessary to define ruptures, sites and
    rupture-site distances.
    """
    #
    # get the name of the .ini file
    ini_fname = argv[0]
    #
    # read the content of the configuration file
    oqparam = readinput.get_oqparam(ini_fname)
    gmmlt = readinput.get_gsim_lt(oqparam)
    gmmlist = set(readinput.get_gsims(oqparam))
    #
    # print results
    print('\nGMPEs:')
    for tstr in gmmlist:
        print('  - ', tstr)
    ctx = ContextMaker(gmmlist)
    #
    # parameters
    print('\nRequired rupture-site distances')
    print('   ', getattr(ctx, 'REQUIRES_DISTANCES'))
    print('Required site parameters')
    print('   ', getattr(ctx, 'REQUIRES_SITES_PARAMETERS'))
    print('Required rupture parameters')
    print('   ', getattr(ctx, 'REQUIRES_RUPTURE_PARAMETERS'))
Beispiel #6
0
 def pre_execute(self):
     """
     parse the logic tree and source model input
     """
     oq = self.oqparam
     self.read_risk_data()  # read the site collection
     self.gsim_lt = readinput.get_gsim_lt(oq, [DEFAULT_TRT])
     self.smlt = readinput.get_source_model_lt(oq)
     job_info = dict(hostname=socket.gethostname())
     self.datastore.save('job_info', job_info)
     parser = source.SourceModelParser(
         UCERFSourceConverter(oq.investigation_time,
                              oq.rupture_mesh_spacing))
     [src_group] = parser.parse_src_groups(oq.inputs["source_model"])
     branches = sorted(self.smlt.branches.items())
     source_models = []
     num_gsim_paths = self.gsim_lt.get_num_paths()
     for grp_id, rlz in enumerate(self.smlt):
         [name] = rlz.lt_path
         branch = self.smlt.branches[name]
         sg = _copy_grp(src_group, grp_id, name, branch.value)
         sm = source.SourceModel(
             name, branch.weight, [name], [sg], num_gsim_paths, grp_id, 1)
         source_models.append(sm)
     self.csm = source.CompositeSourceModel(
         self.gsim_lt, self.smlt, source_models, set_weight=False)
     self.datastore['csm_info'] = self.csm.info
     logging.info('Found %d x %d logic tree branches', len(branches),
                  self.gsim_lt.get_num_paths())
     self.rlzs_assoc = self.csm.info.get_rlzs_assoc()
     self.infos = []
     self.eid = collections.Counter()  # sm_id -> event_id
     self.sm_by_grp = self.csm.info.get_sm_by_grp()
     if not self.oqparam.imtls:
         raise ValueError('Missing intensity_measure_types!')
Beispiel #7
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer, etags and seeds
     """
     super(ScenarioCalculator, self).pre_execute()
     oq = self.oqparam
     trunc_level = oq.truncation_level
     correl_model = readinput.get_correl_model(oq)
     n_gmfs = oq.number_of_ground_motion_fields
     rupture = readinput.get_rupture(oq)
     self.gsims = readinput.get_gsims(oq)
     maxdist = oq.maximum_distance['default']
     with self.monitor('filtering sites', autoflush=True):
         self.sitecol = filters.filter_sites_by_distance_to_rupture(
             rupture, maxdist, self.sitecol)
     if self.sitecol is None:
         raise RuntimeError(
             'All sites were filtered out! maximum_distance=%s km' %
             maxdist)
     self.etags = numpy.array(
         sorted(['scenario-%010d~ses=1' % i for i in range(n_gmfs)]),
         (bytes, 100))
     self.computer = GmfComputer(rupture, self.sitecol, oq.imtls,
                                 self.gsims, trunc_level, correl_model)
     gsim_lt = readinput.get_gsim_lt(oq)
     cinfo = source.CompositionInfo.fake(gsim_lt)
     self.datastore['csm_info'] = cinfo
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
Beispiel #8
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer, etags and seeds
     """
     super(ScenarioCalculator, self).pre_execute()
     oq = self.oqparam
     trunc_level = oq.truncation_level
     correl_model = readinput.get_correl_model(oq)
     n_gmfs = oq.number_of_ground_motion_fields
     rupture = readinput.get_rupture(oq)
     self.gsims = readinput.get_gsims(oq)
     maxdist = oq.maximum_distance['default']
     with self.monitor('filtering sites', autoflush=True):
         self.sitecol = filters.filter_sites_by_distance_to_rupture(
             rupture, maxdist, self.sitecol)
     if self.sitecol is None:
         raise RuntimeError(
             'All sites were filtered out! maximum_distance=%s km' %
             maxdist)
     self.etags = numpy.array(
         sorted(['scenario-%010d~ses=1' % i for i in range(n_gmfs)]),
         (bytes, 100))
     self.computer = GmfComputer(
         rupture, self.sitecol, oq.imtls, self.gsims,
         trunc_level, correl_model)
     gsim_lt = readinput.get_gsim_lt(oq)
     cinfo = source.CompositionInfo.fake(gsim_lt)
     self.datastore['csm_info'] = cinfo
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
Beispiel #9
0
    def pre_execute(self):
        """
        parse the logic tree and source model input
        """
        self.sitecol = readinput.get_site_collection(self.oqparam)
        self.gsim_lt = readinput.get_gsim_lt(self.oqparam, [DEFAULT_TRT])
        self.smlt = readinput.get_source_model_lt(self.oqparam)
        parser = source.SourceModelParser(
            UCERFClassicalSourceConverter(self.oqparam.investigation_time,
                                          self.oqparam.rupture_mesh_spacing))
        [self.src_group] = parser.parse_src_groups(
            self.oqparam.inputs["source_model"])
        [src] = self.src_group
        branches = sorted(self.smlt.branches.items())
        source_models = []
        num_gsim_paths = self.gsim_lt.get_num_paths()
        for ordinal, (name, branch) in enumerate(branches):
            sg = copy.copy(self.src_group)
            sg.id = ordinal

            # Update the event set
            src.branch_id = branch.value
            src.idx_set = src.build_idx_set()
            sm = source.SourceModel(
                name, branch.weight, [name], [sg], num_gsim_paths, ordinal, 1)
            source_models.append(sm)
        self.csm = source.CompositeSourceModel(
            self.gsim_lt, self.smlt, source_models, set_weight=False)
        self.rlzs_assoc = self.csm.info.get_rlzs_assoc()
        self.rup_data = {}
        self.num_tiles = 1
        self.infos = {}
Beispiel #10
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer and seeds
     """
     super(ScenarioCalculator, self).pre_execute()
     oq = self.oqparam
     gsim_lt = readinput.get_gsim_lt(oq)
     cinfo = source.CompositionInfo.fake(gsim_lt)
     self.datastore['csm_info'] = cinfo
     self.datastore['oqparam'] = oq
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
     if 'rupture_model' not in oq.inputs:
         logging.warn('There is no rupture_model, the calculator will just '
                      'import data without performing any calculation')
         return
     ebr, self.sitecol = readinput.get_rupture_sitecol(oq, self.sitecol)
     self.gsims = readinput.get_gsims(oq)
     self.datastore['events'] = ebr.events
     rupser = calc.RuptureSerializer(self.datastore)
     rupser.save([ebr])
     rupser.close()
     trunc_level = oq.truncation_level
     correl_model = oq.get_correl_model()
     self.computer = GmfComputer(ebr, self.sitecol, oq.imtls,
                                 ContextMaker(self.gsims), trunc_level,
                                 correl_model)
Beispiel #11
0
    def are_parallel(self, lnSA):
        """
        Returns the vector-valued annual rate of exceedance

        param *lnSA: tuple, natural logarithm of acceleration values, in unit of g.
        """
        args_list = list()
        for rlz in self.ssm_lt:  # Loop over realizations
            _, weight = parser.get_value_and_weight_from_rlz(rlz)   
            srcs = parser.get_sources_from_rlz(rlz, self.oqparam, self.ssm_lt, sourcefilter=self.srcfilter)

            for src in srcs:  # Loop over (filtered) seismic sources (area, fault, etc...)

                for pt in self.srcfilter.filter(src):  # Loop over point-sources

                    gsim_lt = get_gsim_lt(self.oqparam, trts=[src.tectonic_region_type])
                    for gsim_rlz in gsim_lt:  # Loop over GSIM Logic_tree
                        gsim_model, gsim_weight = parser.get_value_and_weight_from_gsim_rlz(gsim_rlz)

                        # Distribute ARE:
                        pt_weight = weight*gsim_weight
                        args = (self, pt, gsim_model, pt_weight, lnSA)
                        args_list.append(args)

            are = 0
            for value in Starmap(self.pt_src_are.__func__, args_list):
                are += value
        return are
Beispiel #12
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer and seeds
     """
     oq = self.oqparam
     cinfo = source.CompositionInfo.fake(readinput.get_gsim_lt(oq))
     self.datastore['csm_info'] = cinfo
     if 'rupture_model' not in oq.inputs:
         logging.warn('There is no rupture_model, the calculator will just '
                      'import data without performing any calculation')
         super().pre_execute()
         return
     self.rup = readinput.get_rupture(oq)
     self.gsims = readinput.get_gsims(oq)
     self.cmaker = ContextMaker(self.gsims, oq.maximum_distance,
                                {'filter_distance': oq.filter_distance})
     super().pre_execute()
     self.datastore['oqparam'] = oq
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
     E = oq.number_of_ground_motion_fields
     events = numpy.zeros(E, readinput.stored_event_dt)
     events['eid'] = numpy.arange(E)
     ebr = EBRupture(self.rup, 0, self.sitecol.sids, events)
     self.datastore['events'] = ebr.events
     rupser = calc.RuptureSerializer(self.datastore)
     rupser.save([ebr])
     rupser.close()
     self.computer = GmfComputer(ebr, self.sitecol, oq.imtls, self.cmaker,
                                 oq.truncation_level, oq.correl_model)
Beispiel #13
0
def zip(job_ini, archive_zip, risk_ini, oq=None, log=logging.info):
    """
    Zip the given job.ini file into the given archive, together with all
    related files.
    """
    if not os.path.exists(job_ini):
        sys.exit('%s does not exist' % job_ini)
    if isinstance(archive_zip, str):  # actually it should be path-like
        if not archive_zip.endswith('.zip'):
            sys.exit('%s does not end with .zip' % archive_zip)
        if os.path.exists(archive_zip):
            sys.exit('%s exists already' % archive_zip)
    logging.basicConfig(level=logging.INFO)
    # do not validate to avoid permissions error on the export_dir
    oq = oq or readinput.get_oqparam(job_ini, validate=False)
    files = set()
    if risk_ini:
        risk_ini = os.path.normpath(os.path.abspath(risk_ini))
        oq.inputs.update(readinput.get_params([risk_ini])['inputs'])
        files.add(os.path.normpath(os.path.abspath(job_ini)))

    # collect .hdf5 tables for the GSIMs, if any
    if 'gsim_logic_tree' in oq.inputs or oq.gsim:
        gsim_lt = readinput.get_gsim_lt(oq)
        for gsims in gsim_lt.values.values():
            for gsim in gsims:
                table = getattr(gsim, 'GMPE_TABLE', None)
                if table:
                    files.add(table)

    # collect exposure.csv, if any
    exposure_xml = oq.inputs.get('exposure')
    if exposure_xml:
        dname = os.path.dirname(exposure_xml)
        expo = nrml.read(exposure_xml, stop='asset')[0]
        if not expo.assets:
            exposure_csv = (~expo.assets).strip()
            for csv in exposure_csv.split():
                if csv and os.path.exists(os.path.join(dname, csv)):
                    files.add(os.path.join(dname, csv))

    # collection .hdf5 UCERF file, if any
    if oq.calculation_mode.startswith('ucerf_'):
        sm = nrml.read(oq.inputs['source_model'])
        fname = sm.sourceModel.UCERFSource['filename']
        f = os.path.join(os.path.dirname(oq.inputs['source_model']), fname)
        files.add(os.path.normpath(f))

    # collect all other files
    for key in oq.inputs:
        fname = oq.inputs[key]
        if isinstance(fname, list):
            for f in fname:
                files.add(os.path.normpath(f))
        elif isinstance(fname, dict):
            for f in fname.values():
                files.add(os.path.normpath(f))
        else:
            files.add(os.path.normpath(fname))
    general.zipfiles(files, archive_zip, log=log)
Beispiel #14
0
 def pre_execute(self):
     """
     parse the logic tree and source model input
     """
     self.sitecol = readinput.get_site_collection(self.oqparam)
     self.save_mesh()
     self.gsim_lt = readinput.get_gsim_lt(self.oqparam, [DEFAULT_TRT])
     self.smlt = readinput.get_source_model_lt(self.oqparam)
     parser = source.SourceModelParser(
         UCERFSourceConverter(self.oqparam.investigation_time,
                              self.oqparam.rupture_mesh_spacing))
     [self.source] = parser.parse_sources(
         self.oqparam.inputs["source_model"])
     branches = sorted(self.smlt.branches.items())
     min_mag, max_mag = self.source.min_mag, None
     source_models = []
     for ordinal, (name, branch) in enumerate(branches):
         tm = source.TrtModel(DEFAULT_TRT, [], min_mag, max_mag,
                              ordinal, eff_ruptures=-1)
         sm = source.SourceModel(
             name, branch.weight, [name], [tm], self.gsim_lt, ordinal, 1)
         source_models.append(sm)
     self.csm = source.CompositeSourceModel(
         self.smlt, source_models, set_weight=False)
     self.rup_data = {}
     self.infos = []
Beispiel #15
0
    def _read_scenario_ruptures(self):
        oq = self.oqparam
        gsim_lt = readinput.get_gsim_lt(self.oqparam)
        G = gsim_lt.get_num_paths()
        if oq.calculation_mode.startswith('scenario'):
            ngmfs = oq.number_of_ground_motion_fields
        if oq.inputs['rupture_model'].endswith('.xml'):
            self.gsims = [gsim_rlz.value[0] for gsim_rlz in gsim_lt]
            self.cmaker = ContextMaker('*', self.gsims, {
                'maximum_distance': oq.maximum_distance,
                'imtls': oq.imtls
            })
            rup = readinput.get_rupture(oq)
            if self.N > oq.max_sites_disagg:  # many sites, split rupture
                ebrs = [
                    EBRupture(copyobj(rup, rup_id=rup.rup_id + i),
                              0,
                              0,
                              G,
                              e0=i * G) for i in range(ngmfs)
                ]
            else:  # keep a single rupture with a big occupation number
                ebrs = [EBRupture(rup, 0, 0, G * ngmfs, rup.rup_id)]
            aw = get_rup_array(ebrs, self.srcfilter)
            if len(aw) == 0:
                raise RuntimeError(
                    'The rupture is too far from the sites! Please check the '
                    'maximum_distance and the position of the rupture')
        elif oq.inputs['rupture_model'].endswith('.csv'):
            aw = readinput.get_ruptures(oq.inputs['rupture_model'])
            num_gsims = numpy.array(
                [len(gsim_lt.values[trt]) for trt in gsim_lt.values], U32)
            if oq.calculation_mode.startswith('scenario'):
                # rescale n_occ
                aw['n_occ'] *= ngmfs * num_gsims[aw['trt_smr']]
        rup_array = aw.array
        hdf5.extend(self.datastore['rupgeoms'], aw.geom)

        if len(rup_array) == 0:
            raise RuntimeError(
                'There are no sites within the maximum_distance'
                ' of %s km from the rupture' %
                oq.maximum_distance(rup.tectonic_region_type, rup.mag))

        # check the number of branchsets
        branchsets = len(gsim_lt._ltnode)
        if len(rup_array) == 1 and branchsets > 1:
            raise InvalidFile(
                '%s for a scenario calculation must contain a single '
                'branchset, found %d!' % (oq.inputs['job_ini'], branchsets))

        fake = logictree.FullLogicTree.fake(gsim_lt)
        self.realizations = fake.get_realizations()
        self.datastore['full_lt'] = fake
        self.store_rlz_info({})  # store weights
        self.save_params()
        imp = calc.RuptureImporter(self.datastore)
        imp.import_rups_events(rup_array, get_rupture_getters)
Beispiel #16
0
    def _read_scenario_ruptures(self):
        oq = self.oqparam
        gsim_lt = readinput.get_gsim_lt(self.oqparam)
        G = gsim_lt.get_num_paths()
        if oq.inputs['rupture_model'].endswith('.xml'):
            ngmfs = oq.number_of_ground_motion_fields
            self.gsims = readinput.get_gsims(oq)
            self.cmaker = ContextMaker('*', self.gsims, {
                'maximum_distance': oq.maximum_distance,
                'imtls': oq.imtls
            })
            rup = readinput.get_rupture(oq)
            mesh = surface_to_array(rup.surface).transpose(1, 2, 0).flatten()
            if self.N > oq.max_sites_disagg:  # many sites, split rupture
                ebrs = [
                    EBRupture(copyobj(rup, rup_id=rup.rup_id + i),
                              0,
                              0,
                              G,
                              e0=i * G) for i in range(ngmfs)
                ]
                meshes = numpy.array([mesh] * ngmfs, object)
            else:  # keep a single rupture with a big occupation number
                ebrs = [EBRupture(rup, 0, 0, G * ngmfs, rup.rup_id)]
                meshes = numpy.array([mesh] * ngmfs, object)
            rup_array = get_rup_array(ebrs, self.srcfilter).array
            hdf5.extend(self.datastore['rupgeoms'], meshes)
        elif oq.inputs['rupture_model'].endswith('.csv'):
            aw = readinput.get_ruptures(oq.inputs['rupture_model'])
            aw.array['n_occ'] = G
            rup_array = aw.array
            hdf5.extend(self.datastore['rupgeoms'], aw.geom)

        if len(rup_array) == 0:
            raise RuntimeError(
                'There are no sites within the maximum_distance'
                ' of %s km from the rupture' %
                oq.maximum_distance(rup.tectonic_region_type, rup.mag))

        # check the number of branchsets
        branchsets = len(gsim_lt._ltnode)
        if len(rup_array) == 1 and branchsets > 1:
            raise InvalidFile(
                '%s for a scenario calculation must contain a single '
                'branchset, found %d!' % (oq.inputs['job_ini'], branchsets))

        fake = logictree.FullLogicTree.fake(gsim_lt)
        self.realizations = fake.get_realizations()
        self.datastore['full_lt'] = fake
        self.store_rlz_info({})  # store weights
        self.save_params()
        calc.RuptureImporter(self.datastore).import_rups(rup_array)
Beispiel #17
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer and seeds
     """
     oq = self.oqparam
     cinfo = logictree.FullLogicTree.fake(readinput.get_gsim_lt(oq))
     self.realizations = cinfo.get_realizations()
     self.datastore['full_lt'] = cinfo
     if 'rupture_model' not in oq.inputs:
         logging.warning(
             'There is no rupture_model, the calculator will just '
             'import data without performing any calculation')
         super().pre_execute()
         return
     self.rup = readinput.get_rupture(oq)
     self.gsims = readinput.get_gsims(oq)
     R = len(self.gsims)
     self.cmaker = ContextMaker(
         '*', self.gsims, {
             'maximum_distance': oq.maximum_distance,
             'filter_distance': oq.filter_distance
         })
     super().pre_execute()
     self.datastore['oqparam'] = oq
     self.store_rlz_info({})
     rlzs_by_gsim = cinfo.get_rlzs_by_gsim(0)
     E = oq.number_of_ground_motion_fields
     n_occ = numpy.array([E])
     ebr = EBRupture(self.rup, 0, 0, n_occ)
     ebr.e0 = 0
     events = numpy.zeros(E * R, events_dt)
     for rlz, eids in ebr.get_eids_by_rlz(rlzs_by_gsim).items():
         events[rlz * E:rlz * E + E]['id'] = eids
         events[rlz * E:rlz * E + E]['rlz_id'] = rlz
     self.datastore['events'] = self.events = events
     rupser = calc.RuptureSerializer(self.datastore)
     rup_array = get_rup_array([ebr], self.src_filter())
     if len(rup_array) == 0:
         maxdist = oq.maximum_distance(self.rup.tectonic_region_type,
                                       self.rup.mag)
         raise RuntimeError('There are no sites within the maximum_distance'
                            ' of %s km from the rupture' % maxdist)
     rupser.save(rup_array)
     rupser.close()
     self.computer = GmfComputer(ebr, self.sitecol, oq.imtls, self.cmaker,
                                 oq.truncation_level, oq.correl_model,
                                 self.amplifier)
     M32 = (numpy.float32, len(self.oqparam.imtls))
     self.sig_eps_dt = [('eid', numpy.uint64), ('sig', M32), ('eps', M32)]
def get_composite_source_model(oq):
    """
    :param oq: :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :returns: a `class:`openquake.commonlib.source.CompositeSourceModel`
    """
    [src_group] = nrml.to_python(
        oq.inputs["source_model"],
        SourceConverter(oq.investigation_time, oq.rupture_mesh_spacing))
    source_models = []
    gsim_lt = readinput.get_gsim_lt(oq, [DEFAULT_TRT])
    smlt = readinput.get_source_model_lt(oq)
    for sm in smlt.gen_source_models(gsim_lt):
        sg = copy.copy(src_group)
        sg.id = sm.ordinal
        sm.src_groups = [sg]
        sg.sources = [sg[0].new(sm.ordinal, sm.names)]
        source_models.append(sm)
    return source.CompositeSourceModel(gsim_lt, smlt, source_models)
Beispiel #19
0
    def read_inputs(self):
        """
        Read risk data and sources if any
        """
        oq = self.oqparam
        self._read_risk_data()
        self.check_overflow()  # check if self.sitecol is too large

        if ('amplification' in oq.inputs
                and oq.amplification_method == 'kernel'):
            logging.info('Reading %s', oq.inputs['amplification'])
            df = readinput.get_amplification(oq)
            check_amplification(df, self.sitecol)
            self.af = AmplFunction.from_dframe(df)

        if (oq.calculation_mode == 'disaggregation'
                and oq.max_sites_disagg < len(self.sitecol)):
            raise ValueError('Please set max_sites_disagg=%d in %s' %
                             (len(self.sitecol), oq.inputs['job_ini']))
        if ('source_model_logic_tree' in oq.inputs
                and oq.hazard_calculation_id is None):
            with self.monitor('composite source model', measuremem=True):
                self.csm = csm = readinput.get_composite_source_model(
                    oq, self.datastore.hdf5)
                srcs = [src for sg in csm.src_groups for src in sg]
                if not srcs:
                    raise RuntimeError('All sources were discarded!?')
                logging.info('Checking the sources bounding box')
                sids = self.src_filter().within_bbox(srcs)
                if len(sids) == 0:
                    raise RuntimeError('All sources were discarded!?')
                self.full_lt = csm.full_lt
        self.init()  # do this at the end of pre-execute

        if (not oq.hazard_calculation_id
                and oq.calculation_mode != 'preclassical'
                and not oq.save_disk_space):
            self.gzip_inputs()

        # check DEFINED_FOR_REFERENCE_VELOCITY
        if self.amplifier:
            gsim_lt = readinput.get_gsim_lt(oq)
            self.amplifier.check(self.sitecol.vs30, oq.vs30_tolerance,
                                 gsim_lt.values)
Beispiel #20
0
    def read_inputs(self):
        """
        Read risk data and sources if any
        """
        oq = self.oqparam
        self._read_risk_data()
        self.check_overflow()  # check if self.sitecol is too large

        if ('amplification' in oq.inputs
                and oq.amplification_method == 'kernel'):
            logging.info('Reading %s', oq.inputs['amplification'])
            df = readinput.get_amplification(oq)
            check_amplification(df, self.sitecol)
            self.af = AmplFunction.from_dframe(df)

        if (oq.calculation_mode == 'disaggregation'
                and oq.max_sites_disagg < len(self.sitecol)):
            raise ValueError('Please set max_sites_disagg=%d in %s' %
                             (len(self.sitecol), oq.inputs['job_ini']))
        if ('source_model_logic_tree' in oq.inputs
                and oq.hazard_calculation_id is None):
            with self.monitor('composite source model', measuremem=True):
                self.csm = csm = readinput.get_composite_source_model(
                    oq, self.datastore.hdf5)
                mags_by_trt = csm.get_mags_by_trt()
                oq.maximum_distance.interp(mags_by_trt)
                for trt in mags_by_trt:
                    self.datastore['source_mags/' + trt] = numpy.array(
                        mags_by_trt[trt])
                self.full_lt = csm.full_lt
        self.init()  # do this at the end of pre-execute
        self.pre_checks()

        if (not oq.hazard_calculation_id
                and oq.calculation_mode != 'preclassical'
                and not oq.save_disk_space):
            self.gzip_inputs()

        # check DEFINED_FOR_REFERENCE_VELOCITY
        if self.amplifier:
            gsim_lt = readinput.get_gsim_lt(oq)
            self.amplifier.check(self.sitecol.vs30, oq.vs30_tolerance,
                                 gsim_lt.values)
Beispiel #21
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer and seeds
     """
     super(ScenarioCalculator, self).pre_execute()
     oq = self.oqparam
     trunc_level = oq.truncation_level
     correl_model = oq.get_correl_model()
     ebr, self.sitecol = readinput.get_rupture_sitecol(oq, self.sitecol)
     self.gsims = readinput.get_gsims(oq)
     self.datastore['events'] = ebr.events
     rupser = calc.RuptureSerializer(self.datastore)
     rupser.save([ebr])
     rupser.close()
     self.computer = GmfComputer(ebr, self.sitecol, oq.imtls, self.gsims,
                                 trunc_level, correl_model)
     gsim_lt = readinput.get_gsim_lt(oq)
     cinfo = source.CompositionInfo.fake(gsim_lt)
     self.datastore['csm_info'] = cinfo
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
Beispiel #22
0
def parse_openquake_ini(job_ini):
    """
    Parse an XML-formatted Seismic Source Model (SSM) for the Openquake Engine.

    Acknowledgement: M. Pagani

    :param job_ini: str, Path to the Openquake Engine configuration file.

    :return a 2-element tuple containing an instance of the class
    "openquake.commonlib.oqvalidation.OqParam" and another instance of the class
    "openquake.commonlib.logictree.SourceModelLogicTreeCollection"
    """
    # Read the .ini file
    oqparam = get_oqparam(job_ini)
    # Read the ssc logic tree
    ssm_lt = get_source_model_lt(oqparam)
    # Reag the gsim logic tree
    gsim_lt = get_gsim_lt(oqparam)
    # imts = get_imts(oqparam)
    return oqparam, ssm_lt, gsim_lt
Beispiel #23
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer and seeds
     """
     super(ScenarioCalculator, self).pre_execute()
     oq = self.oqparam
     trunc_level = oq.truncation_level
     correl_model = oq.get_correl_model()
     self.datastore["rupture"] = rupture = readinput.get_rupture(oq)
     self.gsims = readinput.get_gsims(oq)
     maxdist = oq.maximum_distance["default"]
     with self.monitor("filtering sites", autoflush=True):
         self.sitecol = filters.filter_sites_by_distance_to_rupture(rupture, maxdist, self.sitecol)
     if self.sitecol is None:
         raise RuntimeError("All sites were filtered out! maximum_distance=%s km" % maxdist)
     self.computer = GmfComputer(rupture, self.sitecol, oq.imtls, self.gsims, trunc_level, correl_model)
     gsim_lt = readinput.get_gsim_lt(oq)
     cinfo = source.CompositionInfo.fake(gsim_lt)
     self.datastore["csm_info"] = cinfo
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
Beispiel #24
0
    def _read_scenario_ruptures(self):
        oq = self.oqparam
        if oq.inputs['rupture_model'].endswith(('.xml', '.toml', '.txt')):
            self.gsims = readinput.get_gsims(oq)
            self.cmaker = ContextMaker(
                '*', self.gsims,
                {'maximum_distance': oq.maximum_distance,
                 'filter_distance': oq.filter_distance})
            n_occ = numpy.array([oq.number_of_ground_motion_fields])
            rup = readinput.get_rupture(oq)
            ebr = EBRupture(rup, 0, 0, n_occ)
            ebr.e0 = 0
            rup_array = get_rup_array([ebr], self.srcfilter).array
            mesh = surface_to_array(rup.surface).transpose(1, 2, 0).flatten()
            hdf5.extend(self.datastore['rupgeoms'],
                        numpy.array([mesh], object))
        elif oq.inputs['rupture_model'].endswith('.csv'):
            aw = readinput.get_ruptures(oq.inputs['rupture_model'])
            rup_array = aw.array
            hdf5.extend(self.datastore['rupgeoms'], aw.geom)

        if len(rup_array) == 0:
            raise RuntimeError(
                'There are no sites within the maximum_distance'
                ' of %s km from the rupture' % oq.maximum_distance(
                    rup.tectonic_region_type, rup.mag))

        gsim_lt = readinput.get_gsim_lt(self.oqparam)
        # check the number of branchsets
        branchsets = len(gsim_lt._ltnode)
        if len(rup_array) == 1 and branchsets > 1:
            raise InvalidFile(
                '%s for a scenario calculation must contain a single '
                'branchset, found %d!' % (oq.inputs['job_ini'], branchsets))

        fake = logictree.FullLogicTree.fake(gsim_lt)
        self.realizations = fake.get_realizations()
        self.datastore['full_lt'] = fake
        self.store_rlz_info({})  # store weights
        self.save_params()
        calc.RuptureImporter(self.datastore).import_rups(rup_array)
Beispiel #25
0
def read_config_file(cfg):
    gmf_file = cfg['input']['gmf_file']
    gmf_file_gmpe_rate = cfg['input']['gmf_file_gmpe_rate']
    job_ini = cfg['input']['job_ini']
    oq_param = get_oqparam(job_ini)
    get_risk_model(oq_param)  # read risk functions and set imtls
    haz_sitecol = get_site_collection(oq_param)
    sites, assets_by_site, _ = get_sitecol_assetcol(oq_param, haz_sitecol)
    gsimlt = get_gsim_lt(oq_param)
    gsim_list = [br.uncertainty for br in gsimlt.branches]
    cinfo = source.CompositionInfo.fake(gsimlt)
    mean_shift_inter_residuals = float(
        cfg['input']['mean_shift_inter_residuals'])
    realizations_inter = int(cfg['input']['realizations_inter'])
    realizations_intra = int(cfg['input']['realizations_intra'])
    intra_files_name = cfg['input']['intra_files_name']
    intra_files = cfg['input']['intra_files'].split()
    csv_rate_gmf_file = cfg['output']['csv_rate_gmf_file']
    seed = int(cfg['input']['seed'])
    return (gmf_file, gmf_file_gmpe_rate, sites, gsim_list, cinfo, oq_param,
            mean_shift_inter_residuals, realizations_inter, realizations_intra,
            intra_files_name, intra_files, csv_rate_gmf_file, seed)
Beispiel #26
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer and seeds
     """
     super(ScenarioCalculator, self).pre_execute()
     oq = self.oqparam
     trunc_level = oq.truncation_level
     correl_model = oq.get_correl_model()
     rup = readinput.get_rupture(oq)
     rup.seed = self.oqparam.random_seed
     self.gsims = readinput.get_gsims(oq)
     maxdist = oq.maximum_distance['default']
     with self.monitor('filtering sites', autoflush=True):
         self.sitecol = filters.filter_sites_by_distance_to_rupture(
             rup, maxdist, self.sitecol)
     if self.sitecol is None:
         raise RuntimeError(
             'All sites were filtered out! maximum_distance=%s km' %
             maxdist)
     # eid, ses, occ, sample
     events = numpy.zeros(oq.number_of_ground_motion_fields,
                          calc.stored_event_dt)
     events['eid'] = numpy.arange(oq.number_of_ground_motion_fields)
     rupture = calc.EBRupture(rup, self.sitecol.sids, events, 0, 0)
     rupture.sidx = 0
     rupture.eidx1 = 0
     rupture.eidx2 = len(events)
     self.datastore['sids'] = self.sitecol.sids
     self.datastore['events/grp-00'] = events
     array, nbytes = calc.RuptureSerializer.get_array_nbytes([rupture])
     self.datastore.extend('ruptures/grp-00', array, nbytes=nbytes)
     self.computer = GmfComputer(rupture, self.sitecol, oq.imtls,
                                 self.gsims, trunc_level, correl_model)
     gsim_lt = readinput.get_gsim_lt(oq)
     cinfo = source.CompositionInfo.fake(gsim_lt)
     self.datastore['csm_info'] = cinfo
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
 def pre_execute(self):
     """
     parse the logic tree and source model input
     """
     self.sitecol = readinput.get_site_collection(self.oqparam)
     self.gsim_lt = readinput.get_gsim_lt(self.oqparam, [DEFAULT_TRT])
     self.smlt = readinput.get_source_model_lt(self.oqparam)
     parser = source.SourceModelParser(
         UCERFSourceConverter(self.oqparam.investigation_time,
                              self.oqparam.rupture_mesh_spacing))
     self.src_groups = parser.parse_src_groups(
         self.oqparam.inputs["source_model"])
     branches = sorted(self.smlt.branches.items())
     source_models = []
     num_gsim_paths = self.gsim_lt.get_num_paths()
     for ordinal, (name, branch) in enumerate(branches):
         sm = source.SourceModel(
             name, branch.weight, [name], self.src_groups,
             num_gsim_paths, ordinal, 1)
         source_models.append(sm)
     self.csm = source.CompositeSourceModel(
         self.gsim_lt, self.smlt, source_models, set_weight=False)
     self.rup_data = {}
     self.infos = []
def main(cfg_file):
    startTime = datetime.now()
    cfg = configparser.ConfigParser()
    cfg.read(cfg_file)

    (oq_param, source_model_file, matrixMagsMin, matrixMagsMax,
     matrixMagsStep, matrixDistsMin, matrixDistsMax,
     matrixDistsStep, limitIM, imt_filtering, trunc_level,
     im_filter, gmf_file, gmf_file_gmpe_rate, rup_mesh_spac,
     complex_mesh_spac, mfd_bin, area_discre, limit_max_mag,
            limit_min_mag) = read_config_file(cfg)

    # Set up the source model configuration
    conv1 = SourceConverter(1.0,  # Investigation time
                            rup_mesh_spac,   # Rupture mesh spacing
                            complex_fault_mesh_spacing=complex_mesh_spac,
                            width_of_mfd_bin=mfd_bin,
                            area_source_discretization=area_discre)
    # Parse the source Model
    if source_model_file:  # only one source model file
        source_model = to_python(source_model_file, conv1)
    else:  # source model has many files (in this case 2 - adapt for more)
        source_model_file2 = "demo_data/SA_RA_CATAL1_05.xml"
        source_model2 = to_python(source_model_file2, conv1)
        source_model = source_model+source_model2

    # Calculate total number of ruptures in the erf
    # num_rup = 0
    # rate_rup = []
    # for a in range(len(source_model)):
        # model_trt = source_model[a]
        # for b in range(len(model_trt)):
            # num_rup = num_rup + len(list(model_trt[b].iter_ruptures()))
            # for rup in model_trt[b].iter_ruptures():
                # rate_rup.append(rup.occurrence_rate)
    # print(num_rup)
    # print(sum(rate_rup))
    # print(rate_rup[0:10])
    
    # If exposure model is provided:
    haz_sitecol = get_site_collection(oq_param)
    sites, assets_by_site, _ = get_sitecol_assetcol(oq_param, haz_sitecol)
    # print(list(sites)[0:10])
    # np.savetxt('sites.csv',list(zip(sites.lons, sites.lats)))
    # If region coordinates are provided:
    # sites = get_site_collection(oq_param)

    gsimlt = get_gsim_lt(oq_param)
    gsim_list = [br.uncertainty for br in gsimlt.branches]
    GMPEmatrix = build_gmpe_table(matrixMagsMin, matrixMagsMax, matrixMagsStep,
                                  matrixDistsMin, matrixDistsMax,
                                  matrixDistsStep, imt_filtering, limitIM,
                                  gsim_list, limit_max_mag, limit_min_mag)

    # Calculate minimum distance between rupture and assets
    # Import exposure from .ini file
    depths = np.zeros(len(sites))
    exposureCoords = Mesh(sites.lons, sites.lats, depths)
    # To calculate Joyner Boore distance:
    exposurePoints = (exposureCoords, exposureCoords)
    recMeshExposure = RectangularMesh.from_points_list(exposurePoints)
    imts = ['PGA', 'SA(0.3)']
    cmake = ContextMaker(gsim_list)
    
    filter1 = SourceFilter(sites, oq_param.maximum_distance)

    if im_filter == 'True':  # Here we consider the IM and the MaxDist filter
        gmfs_median = calculate_gmfs_filter(source_model, gsimlt, filter1,
                                            cmake, gsim_list, recMeshExposure,
                                            matrixMagsMin, matrixMagsStep,
                                            matrixDistsMin, matrixDistsStep,
                                            GMPEmatrix, imts, trunc_level)
    else:  # No IM filter, just the MAxDist filter
        gmfs_median = calc_gmfs_no_IM_filter(source_model, imts, gsim_list,
                                             trunc_level, gsimlt,
                                             filter1, cmake)

    print("%s Ground Motion Fields" % len(gmfs_median))

    save_gmfs(gmf_file, gmf_file_gmpe_rate, gmfs_median, exposureCoords,
              gsim_list, imts)
    print(datetime.now() - startTime)
Beispiel #29
0
    def _read_scenario_ruptures(self):
        oq = self.oqparam
        gsim_lt = readinput.get_gsim_lt(self.oqparam)
        G = gsim_lt.get_num_paths()
        if oq.calculation_mode.startswith('scenario'):
            ngmfs = oq.number_of_ground_motion_fields
        if oq.inputs['rupture_model'].endswith('.xml'):
            # check the number of branchsets
            bsets = len(gsim_lt._ltnode)
            if bsets > 1:
                raise InvalidFile(
                    '%s for a scenario calculation must contain a single '
                    'branchset, found %d!' % (oq.inputs['job_ini'], bsets))
            [(trt, rlzs_by_gsim)] = gsim_lt.get_rlzs_by_gsim_trt().items()
            self.cmaker = ContextMaker(
                trt, rlzs_by_gsim, {
                    'maximum_distance': oq.maximum_distance(trt),
                    'minimum_distance': oq.minimum_distance,
                    'truncation_level': oq.truncation_level,
                    'imtls': oq.imtls
                })
            rup = readinput.get_rupture(oq)
            if self.N > oq.max_sites_disagg:  # many sites, split rupture
                ebrs = [
                    EBRupture(copyobj(rup, rup_id=rup.rup_id + i),
                              'NA',
                              0,
                              G,
                              e0=i * G,
                              scenario=True) for i in range(ngmfs)
                ]
            else:  # keep a single rupture with a big occupation number
                ebrs = [
                    EBRupture(rup,
                              'NA',
                              0,
                              G * ngmfs,
                              rup.rup_id,
                              scenario=True)
                ]
            srcfilter = SourceFilter(self.sitecol, oq.maximum_distance(trt))
            aw = get_rup_array(ebrs, srcfilter)
            if len(aw) == 0:
                raise RuntimeError(
                    'The rupture is too far from the sites! Please check the '
                    'maximum_distance and the position of the rupture')
        elif oq.inputs['rupture_model'].endswith('.csv'):
            aw = get_ruptures(oq.inputs['rupture_model'])
            if len(gsim_lt.values) == 1:  # fix for scenario_damage/case_12
                aw['trt_smr'] = 0  # a single TRT
            if oq.calculation_mode.startswith('scenario'):
                # rescale n_occ by ngmfs and nrlzs
                aw['n_occ'] *= ngmfs * gsim_lt.get_num_paths()
        else:
            raise InvalidFile("Something wrong in %s" % oq.inputs['job_ini'])
        rup_array = aw.array
        hdf5.extend(self.datastore['rupgeoms'], aw.geom)

        if len(rup_array) == 0:
            raise RuntimeError(
                'There are no sites within the maximum_distance'
                ' of %s km from the rupture' %
                oq.maximum_distance(rup.tectonic_region_type)(rup.mag))

        fake = logictree.FullLogicTree.fake(gsim_lt)
        self.realizations = fake.get_realizations()
        self.datastore['full_lt'] = fake
        self.store_rlz_info({})  # store weights
        self.save_params()
        imp = calc.RuptureImporter(self.datastore)
        imp.import_rups_events(rup_array, get_rupture_getters)