Beispiel #1
0
 def testcase02(self):
     """
     Area source straddling the IDL
     """
     datafold = '../data/tools/area/case02/'
     datafold = os.path.join(BASE_DATA_PATH, datafold)
     #
     # create the source and set the geometry
     model = OQtModel('0', 'test')
     #
     # read geometries
     shapefile = os.path.join(datafold, 'area_16.shp')
     srcs = load_geometry_from_shapefile(shapefile)
     model.sources = srcs
     #
     # read catalogue
     self.catalogue_fname = os.path.join(datafold, 'catalogue.csv')
     cat = get_catalogue(self.catalogue_fname)
     #
     # select earthquakes within the polygon
     scat = create_catalogue(model, cat, ['16'])
     #
     # cleaning
     os.remove(os.path.join(datafold, 'catalogue.pkl'))
     #
     # check
     self.assertEqual(len(scat.data['longitude']), 4)
Beispiel #2
0
def main(argv):
    catalogue_filename = 'catalogue.csv'
    c = get_catalogue(catalogue_filename)

    edges_folder = './profiles/int/'
    tedges = _read_edges(edges_folder)

    minlo = 9.0
    maxlo = 11.0
    minla = 44.0
    maxla = 46.0
    npo = 500

    poi = np.zeros((npo, 3))
    poi[:, 0] = minlo + np.random.rand((npo)) * (maxlo - minlo)
    poi[:, 1] = minla + np.random.rand((npo)) * (maxla - minla)
    poi = get_points_on_plane(tedges, poi)

    fig, ax = plot_complex_surface(tedges)
    ax.plot(c.data['longitude'], c.data['latitude'], c.data['depth'], 'og')
    ax.plot(poi[:, 0], poi[:, 1], poi[:, 2], '.r')

    circle = Circle((10, 45), .5, alpha=.8)
    ax.add_patch(circle)
    art3d.pathpatch_2d_to_3d(circle, z=30, zdir='z')

    for i, (lo, la, de) in enumerate(
            zip(c.data['longitude'], c.data['latitude'], c.data['depth'])):
        ts = '{:d}'.format(i)
        ax.text(lo, la, de, ts)
    ax.set_zlim([0, 70])
    ax.invert_zaxis()
    plt.show()
Beispiel #3
0
 def testcase01(self):
     """
     Read .csv catalogue
     """
     tmps = './../../data/tr/catalogue_sample.csv'
     cat = get_catalogue(os.path.join(BASE_PATH, tmps))
     expected = 11
     self.assertEqual(expected, len(cat.data['longitude']))
     #
     # pickle file
     tmpo = os.path.join(BASE_PATH, tmps)
     assert os.path.exists(re.sub('ndk$', 'pkl', tmpo))
     print(re.sub('csv$', 'pkl', tmpo))
     os.remove(re.sub('csv$', 'pkl', tmpo))
Beispiel #4
0
def classify(ini_fname, compute_distances, rf):
    """
    :param str ini_fname:
        The path to the .ini file containing settings
    :param str rf:
        The root folder (all the path in the .ini file will use this as a
        reference
    :param bool compute_distances:
        A boolean controlling the calculation of distances between the slab
        surfaces and the earthquakes in the catalog
    """
    logger = logging.getLogger('classify')
    #
    #
    assert os.path.exists(ini_fname)
    #
    # Parse .ini file
    config = configparser.ConfigParser()
    config.read(ini_fname)
    #
    #
    if rf is False:
        assert 'root_folder' in config['general']
        rf = config['general']['root_folder']
    #
    # set root folder
    distance_folder = os.path.join(rf, config['general']['distance_folder'])
    catalogue_fname = os.path.join(rf, config['general']['catalogue_filename'])
    assert os.path.exists(catalogue_fname)
    #
    # Read priority list
    priorityl = str_to_list(config['general']['priority'])
    #
    # Tectonic regionalisation fname
    tmps = config['general']['treg_filename']
    treg_filename = os.path.join(rf, tmps)
    if not os.path.exists(treg_filename):
        logger.info('Creating: {:s}'.format(treg_filename))
        f = h5py.File(treg_filename, "w")
        f.close()
    else:
        logger.info('{:s} exists'.format(treg_filename))
    #
    # Log filename
    log_fname = 'log.hdf5'
    if os.path.exists(log_fname):
        os.remove(log_fname)
    logger.info('Creating: {:s}'.format(treg_filename))
    f = h5py.File(treg_filename, 'w')
    f.close()
    #
    # process the input information
    remove_from = []
    for key in priorityl:
        #
        # Set TR label
        if 'label' in config[key]:
            trlab = config[key]['label']
        else:
            trlab = key
        #
        # subduction earthquakes
        if re.search('^slab', key) or re.search('^int', key):
            #
            # Info
            logger.info('Classifying: {:s}'.format(key))
            #
            # Reading parameters
            edges_folder = os.path.join(rf, config[key]['folder'])
            distance_buffer_below = None
            if 'distance_buffer_below' in config[key]:
                tmps = config[key]['distance_buffer_below']
                distance_buffer_below = float(tmps)
            distance_buffer_above = None
            if 'distance_buffer_above' in config[key]:
                tmps = config[key]['distance_buffer_above']
                distance_buffer_above = float(tmps)
            lower_depth = None
            if 'lower_depth' in config[key]:
                lower_depth = float(config[key]['lower_depth'])
            #
            # Selecting earthquakes within a time period
            low_year = -10000
            if 'low_year' in config[key]:
                low_year = float(config[key]['low_year'])
            upp_year = 10000
            if 'upp_year' in config[key]:
                upp_year = float(config[key]['upp_year'])
            #
            # Selecting earthquakes within a magnitude range
            low_mag = -5
            if 'low_mag' in config[key]:
                low_mag = float(config[key]['low_mag'])
            upp_mag = 15
            if 'upp_mag' in config[key]:
                upp_mag = float(config[key]['upp_mag'])
            #
            #
            sse = SetSubductionEarthquakes(
                trlab, treg_filename, distance_folder, edges_folder,
                distance_buffer_below, distance_buffer_above, lower_depth,
                catalogue_fname, log_fname, low_year, upp_year, low_mag,
                upp_mag)
            sse.classify(compute_distances, remove_from)
        #
        # crustal earthquakes
        elif re.search('^crustal', key) or re.search('^volcanic', key):
            #
            # info
            logger.info('Classifying: {:s}'.format(key))
            #
            # set data files
            tmps = config[key]['crust_filename']
            distance_delta = config[key]['distance_delta']
            #
            # set shapefile name
            shapefile = None
            if ('shapefile' in config[key]):
                shapefile = os.path.join(rf, config[key]['shapefile'])
                assert os.path.exists(shapefile)
            #
            # crust filename
            crust_filename = os.path.join(rf, tmps)
            #
            # classifying
            sce = SetCrustalEarthquakes(crust_filename,
                                        catalogue_fname,
                                        treg_filename,
                                        distance_delta,
                                        label=trlab,
                                        shapefile=shapefile,
                                        log_fname=log_fname)
            sce.classify(remove_from)
        #
        #
        else:
            raise ValueError('Undefined option')
        #
        # Updating the list of TR with lower priority
        if trlab not in remove_from:
            remove_from.append(trlab)
    #
    # reading filename
    c = get_catalogue(catalogue_fname)
    csvfname = 'classified_earthquakes.csv'
    fou = open(csvfname, 'w')
    f = h5py.File(treg_filename, 'r')
    fou.write('eventID,id,longitude,latitude,tr\n')
    for i, (eid, lo, la) in enumerate(
            zip(c.data['eventID'], c.data['longitude'], c.data['latitude'])):
        fnd = False
        for k in list(f.keys()):
            if f[k][i] and not fnd:
                fou.write('{:s},{:d},{:f},{:f},{:s}\n'.format(
                    str(eid), i, lo, la, k))
                fnd = True
        if not fnd:
            fou.write('{:s},{:d},{:f},{:f},{:s}\n'.format(
                eid, i, lo, la, 'unknown'))
    f.close()
    fou.close()
    """
    def classify(self, compute_distances, remove_from):
        """
        :param bool compute_distances:
            A boolean indicating if distances between earthquakes and the
            subduction surface should be computed. If False the distances
            stored in `self.distance_folder` will be used.
        :param list remove_from:
            A list of labels identifying TR from where the earthquakes assigned
            to this TR must be removed
        """
        #
        # set parameters
        treg_filename = self.treg_filename
        distance_folder = self.distance_folder
        edges_folder = self.edges_folder
        distance_buffer_below = self.distance_buffer_below
        distance_buffer_above = self.distance_buffer_above
        catalogue_filename = self.catalogue_filename
        lower_depth = self.lower_depth
        if lower_depth is None:
            lower_depth = 400
        #
        # open log file and prepare the group
        flog = h5py.File(self.log_fname, 'a')
        if self.label not in flog.keys():
            grp = flog.create_group('/{:s}'.format(self.label))
        else:
            grp = flog['/{:s}'.format(self.label)]
        #
        # read the catalogue
        catalogue = get_catalogue(catalogue_filename)
        neq = len(catalogue.data['longitude'])
        f = h5py.File(treg_filename, "a")
        if self.label in f.keys():
            treg = f[self.label]
        else:
            treg = np.full((neq), False, dtype=bool)
        #
        # create the spatial index
        sidx = get_rtree_index(catalogue)
        #
        # build the complex fault surface
        tedges = _read_edges(edges_folder)
        surface = build_complex_surface_from_edges(edges_folder)
        mesh = surface.mesh
        #
        # create polygon encompassing the mesh
        plo = list(mesh.lons[0, :])
        pla = list(mesh.lats[0, :])
        #
        plo += list(mesh.lons[:, -1])
        pla += list(mesh.lats[:, -1])
        #
        plo += list(mesh.lons[-1, ::-1])
        pla += list(mesh.lats[-1, ::-1])
        #
        plo += list(mesh.lons[::-1, 0])
        pla += list(mesh.lats[::-1, 0])
        #
        # set variables used in griddata
        data = np.array([mesh.lons.flatten().T, mesh.lats.flatten().T]).T
        values = mesh.depths.flatten().T

        ddd = np.array([
            mesh.lons.flatten().T,
            mesh.lats.flatten().T,
            mesh.depths.flatten()
        ]).T
        if self.label not in flog.keys():
            grp.create_dataset('mesh', data=ddd)
        #
        # set bounding box of the subduction surface
        min_lo_sub = np.amin(mesh.lons)
        min_la_sub = np.amin(mesh.lats)
        max_lo_sub = np.amax(mesh.lons)
        max_la_sub = np.amax(mesh.lats)
        #
        # select earthquakes within the bounding box
        idxs = sorted(
            list(
                sidx.intersection(
                    (min_lo_sub - DELTA, min_la_sub - DELTA, 0,
                     max_lo_sub + DELTA, max_la_sub + DELTA, lower_depth))))
        #
        # Select earthquakes within the bounding box of the surface
        # projection of the fault
        sidx = get_idx_points_inside_polygon(catalogue.data['longitude'][idxs],
                                             catalogue.data['latitude'][idxs],
                                             plo,
                                             pla,
                                             idxs,
                                             buff_distance=5000.)
        #
        # Select earthuakes and store indexes of selected ones
        ccc = []
        idxs = []
        for idx in sidx:
            #
            # Preselection based on magitude and time of occurrence
            if ((catalogue.data['magnitude'][idx] >= self.low_mag) &
                (catalogue.data['magnitude'][idx] <= self.upp_mag) &
                (catalogue.data['year'][idx] >= self.low_year) &
                (catalogue.data['year'][idx] <= self.upp_year)):
                idxs.append(idx)
                #
                # Update the log file
                ccc.append([
                    catalogue.data['longitude'][idx],
                    catalogue.data['latitude'][idx],
                    catalogue.data['depth'][idx]
                ])
        if self.label not in flog.keys():
            grp.create_dataset('cat', data=np.array(ccc))
        #
        # Prepare array for the selection of the catalogue
        flags = np.full((len(catalogue.data['longitude'])), False, dtype=bool)
        flags[idxs] = True
        #
        # Create a selector for the catalogue and select earthquakes within
        # bounding box
        sel = CatalogueSelector(catalogue, create_copy=True)
        cat = sel.select_catalogue(flags)
        self.cat = cat
        #
        # If none of the earthquakes in the catalogue is in the bounding box
        # used for the selection we stop the processing
        if len(cat.data['longitude']) < 1:
            f = h5py.File(treg_filename, "a")
            if self.label in f.keys():
                del f[self.label]
            f[self.label] = treg
            f.close()
            return
        #
        # compute distances between the earthquakes in the catalogue and
        # the surface of the fault
        out_filename = os.path.join(distance_folder,
                                    'dist_{:s}.pkl'.format(self.label))
        #
        #
        surf_dist = get_distances_from_surface(cat, surface)
        """
        if compute_distances:
            tmps = 'Computing distances'
            logging.info(tmps.format(out_filename))
            surf_dist = get_distances_from_surface(cat, surface)
            pickle.dump(surf_dist, open(out_filename, 'wb'))
        else:
            if not os.path.exists(out_filename):
                raise IOError('Distance file does not exist')
            surf_dist = pickle.load(open(out_filename, 'rb'))
            tmps = 'Loading distances from file: {:s}'
            logging.info(tmps.format(out_filename))
            tmps = '    number of values loaded: {:d}'
            logging.info(tmps.format(len(surf_dist)))
        """
        #
        # info
        neqks = len(cat.data['longitude'])
        tmps = 'Number of eqks in the new catalogue     : {:d}'
        logging.info(tmps.format(neqks))
        #
        # Calculate the depth of the top of the slab for every earthquake
        # location
        points = np.array(
            [[lo, la]
             for lo, la in zip(cat.data['longitude'], cat.data['latitude'])])
        #
        # compute the depth of the top of the slab at every epicenter
        # sub_depths = griddata(data, values, (points[:, 0], points[:, 1]),
        #                      method='cubic')
        #
        # interpolation
        rbfi = Rbf(data[:, 0], data[:, 1], values)
        sub_depths = rbfi(points[:, 0], points[:, 1])
        #
        # saving the distances to a file
        tmps = 'vert_dist_to_slab_{:s}.pkl'.format(self.label)
        out_filename = os.path.join(distance_folder, tmps)
        if not os.path.exists(out_filename):
            pickle.dump(surf_dist, open(out_filename, 'wb'))
        #
        # Let's find earthquakes close to the top of the slab
        idxa = np.nonzero((np.isfinite(surf_dist) & np.isfinite(sub_depths)
                           & np.isfinite(cat.data['depth'])) & (
                               (surf_dist < distance_buffer_below) &
                               (sub_depths > cat.data['depth']))
                          | ((surf_dist < distance_buffer_above)
                             & (sub_depths <= cat.data['depth'])))[0]
        idxa = []
        for srfd, subd, dept in zip(surf_dist, sub_depths, cat.data['depth']):
            if np.isfinite(srfd) & np.isfinite(subd) & np.isfinite(dept):
                if (float(srfd) <
                        min(distance_buffer_below, distance_buffer_above) *
                        0.90):
                    idxa.append(True)
                elif ((float(srfd) < distance_buffer_below) &
                      (float(subd) < float(dept))):
                    idxa.append(True)
                elif ((float(srfd) < distance_buffer_above) &
                      (float(subd) >= float(dept))):
                    idxa.append(True)
                else:
                    idxa.append(False)
            else:
                idxa.append(False)
        idxa = np.array(idxa)
        #
        # checking the size of lists
        assert len(idxa) == len(cat.data['longitude']) == len(idxs)
        #
        #
        self.surf_dist = surf_dist
        self.sub_depths = sub_depths
        self.tedges = tedges
        self.idxa = idxa
        self.treg = treg
        #
        #
        tl = np.zeros(len(idxa),
                      dtype={
                          'names':
                          ('lon', 'lat', 'dep', 'subd', 'srfd', 'idx'),
                          'formats': ('f8', 'f8', 'f8', 'f8', 'f8', 'i4')
                      })
        tl['lon'] = cat.data['longitude']
        tl['lat'] = cat.data['latitude']
        tl['dep'] = cat.data['depth']
        tl['subd'] = sub_depths
        tl['srfd'] = surf_dist
        tl['idx'] = idxa
        #
        # store log data
        if self.label not in flog.keys():
            grp.create_dataset('data', data=np.array(tl))
        #
        # updating the selection array
        for uuu, iii in enumerate(list(idxa)):
            aaa = idxs[uuu]
            assert catalogue.data['eventID'][aaa] == cat.data['eventID'][uuu]
            if iii:
                treg[aaa] = True
            else:
                treg[aaa] = False
        #
        # storing results in the .hdf5 file
        logging.info('Storing data in:\n{:s}'.format(treg_filename))
        f = h5py.File(treg_filename, "a")
        if len(remove_from):
            fmt = '    treg: {:d}'
            logging.info(fmt.format(len(treg)))
            iii = np.nonzero(treg)[0]
            for tkey in remove_from:
                logging.info('    Cleaning {:s}'.format(tkey))
                old = f[tkey][:]
                fmt = '     before: {:d}'
                logging.info(fmt.format(len(np.nonzero(old)[0])))
                del f[tkey]
                old[iii] = False
                f[tkey] = old
                fmt = '     after: {:d}'
                logging.info(fmt.format(len(np.nonzero(old)[0])))
        #
        # Removing the old classification and adding the new one
        if self.label in f.keys():
            del f[self.label]
        f[self.label] = treg
        #
        # closing files
        f.close()
        flog.close()
Beispiel #6
0
    def classify(self, remove_from):
        """
        :param str remove_from:
        """
        #
        # get catalogue
        icat = get_catalogue(self.catalogue_fname)
        #
        # prepare dictionary with classification
        treg = {}
        treg[self.label] = np.full((len(icat.data['longitude'])),
                                   False,
                                   dtype=bool)
        #
        # open log file and prepare the group
        flog = h5py.File(self.log_fname, 'a')
        if self.label not in flog.keys():
            grp = flog.create_group('/{:s}'.format(self.label))
        else:
            grp = flog['/{:s}'.format(self.label)]
        #
        # load the crust model
        crust, sidx = get_crust_model(self.crust_filename)
        #
        # classify earthquakes
        treg, data = set_crustal(icat, crust, sidx, self.delta)
        #
        # select eartquakes within the polygon
        if self.shapefile is not None:
            #
            # create an array with the coordinates of the earthquakes in the
            # catalogue
            cp = []
            idxs = []
            for i, (lo, la) in enumerate(
                    zip(icat.data['longitude'], icat.data['latitude'])):
                cp.append([lo, la])
                idxs.append(i)
            cp = np.array(cp)
            #
            # prepare array where to store the classification
            isel = np.full((len(icat.data['longitude'])), False, dtype=bool)
            #
            # read polygon using geopandas - get a geodataframe
            gdf = gpd.read_file(self.shapefile)
            #
            # process the geometry i.e. finds points inside
            idx_all_sel = []
            for pol in gdf.geometry:
                pcoo = []
                for pt in list(pol.exterior.coords):
                    pcoo.append(pt)
                pcoo = np.array(pcoo)
                sel_idx = get_idx_points_inside_polygon(
                    cp[:, 0], cp[:, 1], pcoo[:, 0], pcoo[:, 1], idxs)
                idx_all_sel += sel_idx
            #
            # final catalogue
            isel[idx_all_sel] = True
            #
            # final TR
            treg = np.logical_and(treg, isel)

        tl = np.zeros(len(treg),
                      dtype={
                          'names': ('lon', 'lat', 'dep', 'moh', 'idx'),
                          'formats': ('f8', 'f8', 'f8', 'f8', 'i4')
                      })
        tl['lon'] = icat.data['longitude']
        tl['lat'] = icat.data['latitude']
        tl['dep'] = icat.data['depth']
        tl['moh'] = np.array(data)[:, 1]
        tl['idx'] = treg
        #
        # store log data
        grp.create_dataset('data', data=np.array(tl))
        #
        # storing results in the .hdf5 file
        f = h5py.File(self.treg_filename, "a")
        if len(remove_from):
            iii = np.nonzero(treg)
            for tkey in remove_from:
                logging.info('    Cleaning {:s}'.format(tkey))
                old = f[tkey][:]
                del f[tkey]
                old[iii] = False
                f[tkey] = old
        if self.label in f.keys():
            del f[self.label]
        f[self.label] = treg
        #
        #
        f.close()
        flog.close()