コード例 #1
0
    def set_event(self, public_id, hypocenter = None):
        ''' Set the event to process.

        Parameters
        ----------
        public_id: str 
            The public id of the event.
        
        hypocenter: :obj:`tuple`
            The hypocenter of the event [m] (lon, lat, depth).
        '''
        self.event_public_id = public_id
        self.event_dir = util.event_dir_from_publicid(public_id)
        self.event_hypocenter = hypocenter
        # Load the event metadata from the supplement file.
        self.event_meta = util.get_supplement_data(self.event_public_id,
                                                   category = 'detectiondata',
                                                   name = 'metadata',
                                                   directory = self.supplement_dir)
        geom_dict = util.get_supplement_data(self.event_public_id,
                                             category = 'detectiondata',
                                             name = 'geometryinventory',
                                             directory = self.supplement_dir)
        self.geom_inventory = geom.inventory.Inventory.from_dict(geom_dict['inventory'])
        self.geom_inventory.compute_utm_coordinates()

        if self.event_hypocenter:
            self.compute_hypodistance()
コード例 #2
0
 def meta(self):
     ''' Load the metadata supplement.
     '''
     if self._meta is None:
         # Load the event metadata from the supplement file.
         self._meta = util.get_supplement_data(self.event.public_id,
                                               category = 'detectiondata',
                                               name = 'metadata',
                                               directory = self.supplement_dir)
     return self._meta['metadata']
コード例 #3
0
    def compute_detection_sequence_supplement(self):
        ''' Compute the supplement data representing the detection sequence triangles.
        '''
        # Load the event detection data from the supplement file.
        detection_data = util.get_supplement_data(self.event.public_id,
                                                  category = 'detectiondata',
                                                  name = 'detectiondata',
                                                  directory = self.supplement_dir)
        detection_data = detection_data['detection_data']

        # Load the event metadata from the supplement file.
        meta = self.meta

        # Compute the dataframe using the trigger data.
        sequence_df = None
        for cur_pw_key, cur_process_window in detection_data.items():
            trigger_data = cur_process_window['trigger_data']
            cur_df = self.compute_detection_data_df(trigger_data)
            if sequence_df is None:
                sequence_df = cur_df
            else:
                sequence_df = sequence_df.append(cur_df,
                                                 ignore_index = True)

        # Limit the data to the event timespan.
        pre_window = 6
        end_window = 6
        win_start = meta['start_time'] - pre_window
        win_end = meta['end_time'] + end_window
        df_utctime = np.array([obspy.UTCDateTime(x) for x in sequence_df.time])
        mask = (df_utctime >= win_start) & (df_utctime <= win_end)
        sequence_df = sequence_df.loc[mask, :]

        # Get some event properties to add to the properties of the feature collections.
        props = {'db_id': meta['db_id'],
                 'event_start': util.isoformat_tz(meta['start_time']),
                 'event_end': util.isoformat_tz(meta['end_time']),
                 'sequence_start': min(sequence_df.time),
                 'sequence_end': max(sequence_df.time),
                 'author_uri': self.project.author_uri,
                 'agency_uri': self.project.agency_uri}

        # Write the sequence dataframe to a geojson file.
        filepath = util.save_supplement(self.event.public_id,
                                        sequence_df.loc[:, ['geom_simp', 'time', 'pgv',
                                                            'pgv_min', 'pgv_max', 'triggered',
                                                            'added_to_event']],
                                        output_dir = self.supplement_dir,
                                        category = 'detectionsequence',
                                        name = 'simplices',
                                        props = props)
        self.logger.info('Saved detectioin sequence simplices to file %s.',
                         filepath)
コード例 #4
0
    def compute_pgv_sequence_supplement(self):
        ''' Compute the supplement data representing the PGV sequence.
        '''
        # Load the event metadata from the supplement file.
        meta = self.meta

        # Load the PGV data stream.
        pgv_stream = util.get_supplement_data(self.event.public_id,
                                                  category = 'detectiondata',
                                                  name = 'pgv',
                                                  directory = self.supplement_dir)

        # Trim the stream.
        pgv_stream.trim(starttime = meta['start_time'] - 6,
                        endtime = meta['end_time'] + 6,
                        pad = True)

        inventory = self.project.inventory

        station_nsl = [('MSSNet', x.stats.station, x.stats.location) for x in pgv_stream]
        station_nsl = [':'.join(x) for x in station_nsl]
        stations = [inventory.get_station(nsl_string = x)[0] for x in station_nsl]
        times = pgv_stream[0].times("utcdatetime")
        data = np.array([x.data for x in pgv_stream]).transpose()

        # Get the stations with no available data.
        available_stations = inventory.get_station()
        no_data_stations = [x for x in available_stations if x.nsl_string not in station_nsl]

        detection_limits = meta['detection_limits']

        sequence_df = None

        for k in range(len(times)):
            cur_time = times[k]
            triggered = []
            for cur_station in stations:
                if cur_station.nsl_string not in detection_limits.keys():
                    cur_trigger = False
                else:
                    cur_detection_limit = detection_limits[cur_station.nsl_string]
                    if cur_time >= cur_detection_limit[0] and cur_time <= cur_detection_limit[1]:
                        cur_trigger = True
                    else:
                        cur_trigger = False
                triggered.append(cur_trigger)

            cur_points = [shapely.geometry.Point(x.x, x.y) for x in stations]
            cur_df = gpd.GeoDataFrame({'geom_vor': [shapely.geometry.Polygon([])] * len(stations),
                                       'geom_stat': cur_points,
                                       'time': [util.isoformat_tz(cur_time)] * len(stations),
                                       'nsl': [x.nsl_string for x in stations],
                                       'x': [x.x for x in stations],
                                       'y': [x.y for x in stations],
                                       'x_utm': [x.x_utm for x in stations],
                                       'y_utm': [x.y_utm for x in stations],
                                       'pgv': data[k, :],
                                       'triggered': triggered},
                                      crs = "epsg:4326",
                                      geometry = 'geom_stat')

            # Add the station amplification factors.
            self.add_station_amplification(cur_df)

            # Compute the voronoi cells and add them as a geometry to the dataframe.
            # Compute the boundary clipping using shapely because geopandas clipping 
            # throws exceptions e.g. because of None values.
            voronoi.compute_voronoi_geometry(cur_df,
                                             boundary = self.network_boundary.loc[0, 'geometry'])

            # Add the no-data stations
            cur_nd_points = [shapely.geometry.Point(x.x, x.y) for x in no_data_stations]
            cur_nd_df = gpd.GeoDataFrame({'geom_vor': [shapely.geometry.Polygon([])] * len(no_data_stations),
                                          'geom_stat': cur_nd_points,
                                          'time': [util.isoformat_tz(cur_time)] * len(no_data_stations),
                                          'nsl': [x.nsl_string for x in no_data_stations],
                                          'x': [x.x for x in no_data_stations],
                                          'y': [x.y for x in no_data_stations],
                                          'x_utm': [x.x_utm for x in no_data_stations],
                                          'y_utm': [x.y_utm for x in no_data_stations],
                                          'pgv': [None] * len(no_data_stations),
                                          'triggered': [None] * len(no_data_stations)},
                                         crs = "epsg:4326",
                                         geometry = 'geom_stat')

            # Add the station amplification factors to the no-data stations.
            self.add_station_amplification(cur_nd_df)

            # Append the no-data stations.
            cur_df = cur_df.append(cur_nd_df)

            # Add the dataframe to the sequence.
            if sequence_df is None:
                sequence_df = cur_df
            else:
                sequence_df = sequence_df.append(cur_df)

        # Get some event properties to add to the properties of the feature collections.
        props = {'db_id': meta['db_id'],
                 'event_start': util.isoformat_tz(meta['start_time']),
                 'event_end': util.isoformat_tz(meta['end_time']),
                 'sequence_start': min(sequence_df.time),
                 'sequence_end': max(sequence_df.time),
                 'author_uri': self.project.author_uri,
                 'agency_uri': self.project.agency_uri}

        # Write the voronoi dataframe to a geojson file.
        sequence_df = sequence_df.set_geometry('geom_vor')
        filepath = util.save_supplement(self.event.public_id,
                                        sequence_df.loc[:, ['geom_vor', 'time', 'nsl',
                                                            'pgv', 'sa', 'triggered']],
                                        output_dir = self.supplement_dir,
                                        category = 'pgvsequence',
                                        name = 'pgvvoronoi',
                                        props = props)
        self.logger.info('Saved pgv voronoi sequence to file %s.', filepath)

        sequence_df = sequence_df.set_geometry('geom_stat')
        filepath = util.save_supplement(self.event.public_id,
                                        sequence_df.loc[:, ['geom_stat', 'time', 'nsl',
                                                            'pgv', 'sa', 'triggered']],
                                        output_dir = self.supplement_dir,
                                        category = 'pgvsequence',
                                        name = 'pgvstation',
                                        props = props)
        self.logger.info('Saved pgv station marker sequence to file %s.',
                         filepath)
コード例 #5
0
    def plot_seismogram(self, width = 120,
                        trace_height = 10,
                        stations_per_panel = 8,
                        start = 10,
                        length = 60):
        ''' Plot the seismogram data.

        Parameters
        ----------
        width: float 
            The figure width [mm].

        trace_height: float
            The height of a seismogram trace [mm].

        stations_per_panel: int 
            The number of stations per image.

        start: float
            The time after the minimum available start time to start the plot [s].

        length: float
            The length of the seismogram measured from the minimum available start time [s].
            
        '''
        # Load the velocity seismogram data.
        vel_st = util.get_supplement_data(public_id = self.event_public_id,
                                          category = 'detectiondata',
                                          name = 'velocity',
                                          directory = self.supplement_dir)
        
        if start:
            min_start = np.min([x.stats.starttime for x in vel_st])
            vel_st = vel_st.trim(starttime = min_start + start)
        if length:
            min_start = np.min([x.stats.starttime for x in vel_st])
            vel_st = vel_st.trim(endtime = min_start + length)

        print(vel_st)

        # Determine the stations to plot.
        stations_to_plot = sorted(list(set([x.stats.station for x in vel_st])))
        channels_to_plot = ['Hno', 'Hpa']

        channel_colors = ['black', 'grey']

        # Get the station instances from the inventory.
        stations_to_plot = [self.geom_inventory.get_station(name = x)[0] for x in stations_to_plot]
        
        # If a hypocenter is available sort the stations by
        # hypodistance.
        if self.event_hypocenter:
            stations_to_plot = sorted(stations_to_plot,
                                      key = lambda x: x.hypodist)
            
        max_amp = []

        if stations_per_panel:
            n_panels = int(np.ceil(len(stations_to_plot) / stations_per_panel))
        else:
            n_panels = 1
            stations_per_plot = len(stations_to_plot)

        for cur_panel_num in range(n_panels):
            start_ind = cur_panel_num * stations_per_panel
            end_ind = start_ind + stations_per_panel
            cur_stations_to_plot = stations_to_plot[start_ind:end_ind]
            
            # Create the figure.
            height = trace_height * len(cur_stations_to_plot) * len(channels_to_plot)
            fig = self.create_figure(width = width,
                                     height = height)
            gs = gridspec.GridSpec(len(cur_stations_to_plot), 1)
            gs.update(hspace = 0.1)
            
            for k, cur_station in enumerate(cur_stations_to_plot):
                cur_stat_st = vel_st.select(station = cur_station.name)
                cur_stat_gs = gs[k].subgridspec(len(channels_to_plot), 1,
                                                wspace = 0,
                                                hspace = 0)

                for m, cur_channel_name in enumerate(channels_to_plot):
                    cur_chan_st = cur_stat_st.select(channel = cur_channel_name)
                    cur_trace = cur_chan_st[0]
                    cur_trace.detrend('constant')
                    cur_ax = fig.add_subplot(cur_stat_gs[m])

                    cur_max_amp = np.max(np.abs(cur_trace.data))
                    cur_ax.plot(cur_trace.times(type = 'relative'),
                                cur_trace.data,
                                linewidth = 0.5,
                                color = channel_colors[m])

                    if (k == 0) and (m == 0):
                        cur_ax.tick_params(axis = 'x',
                                           direction = 'in',
                                           top = True,
                                           bottom = False,
                                           labeltop = False,
                                           labelbottom = False)
                    elif (k == stations_per_panel - 1) and (m == len(channels_to_plot) - 1):
                        cur_ax.tick_params(axis = 'x',
                                           direction = 'in',
                                           top = False,
                                           bottom = True,
                                           labelsize = 6)
                    else:
                        cur_ax.tick_params(axis = 'x',
                                           direction = 'in',
                                           top = False,
                                           bottom = False,
                                           labeltop = False,
                                           labelbottom = False)

                    cur_ax.tick_params(axis = 'y',
                                       direction = 'in',
                                       left = False,
                                       right = False,
                                       labelleft = False,
                                       labelright = False)

                    cur_ax.set_xlim(0, cur_trace.times(type = 'relative')[-1])
                    max_amp.append(cur_max_amp)
                    cur_ax.set_ylim(-cur_max_amp, cur_max_amp)

                    props = dict(boxstyle = 'round',
                                 edgecolor = 'black',
                                 facecolor='white')

                    stat_string = '{stat}:{chan}'.format(stat = cur_station.name,
                                                         chan = cur_channel_name)
                    cur_ax.text(0.02, 0.9, stat_string,
                                transform = cur_ax.transAxes,
                                fontsize = 6,
                                va = 'top',
                                ha = 'left',
                                bbox = props)

                    amp_string = "{amp:.3f} mm/s".format(amp = cur_max_amp * 1000)
                    cur_xlim = cur_ax.get_xlim()
                    cur_ax.text(cur_xlim[1], cur_max_amp, amp_string,
                                transform = cur_ax.transData,
                                fontsize = 6,
                                va = 'top',
                                ha = 'right')
                    cur_ax.text(cur_xlim[1], 0, '0',
                                transform = cur_ax.transData,
                                fontsize = 6,
                                va = 'top',
                                ha = 'right')

            plt.tight_layout()
            
            # Create the output directory.
            img_output_dir = os.path.join(self.output_dir,
                                          self.event_dir,
                                          'seismogram',
                                          'images')
            if not os.path.exists(img_output_dir):
                os.makedirs(img_output_dir)
            filename = '{pub_id}_seismogram_panel_{panel:02d}.png'.format(pub_id = self.event_public_id,
                                                                          panel = cur_panel_num)
            filepath = os.path.join(img_output_dir, filename)
            plt.savefig(filepath,
                        dpi = 300,
                        bbox_inches = 'tight',
                        pad_inches = 0)
コード例 #6
0
    def compute_pgv_contour_sequence_supplement(self):
        ''' Compute the supplement data representing the PGV sequence.
        '''
        # Load the event metadata from the supplement file.
        meta = self.meta

        # Load the PGV data stream.
        pgv_stream = util.get_supplement_data(self.event_public_id,
                                                  category = 'detectiondata',
                                                  name = 'pgv',
                                                  directory = self.supplement_dir)

        # Trim the stream.
        pgv_stream.trim(starttime = meta['start_time'] - 6,
                        endtime = meta['end_time'] + 6,
                        pad = True)

        inventory = self.project.inventory

        station_nsl = [('MSSNet', x.stats.station, x.stats.location) for x in pgv_stream]
        station_nsl = [':'.join(x) for x in station_nsl]
        stations = [inventory.get_station(nsl_string = x)[0] for x in station_nsl]
        times = pgv_stream[0].times("utcdatetime")
        data = np.array([x.data for x in pgv_stream]).transpose()

        detection_limits = meta['detection_limits']

        sequence_df = None
        last_pgv_df = None
        last_krig_z = None
        no_change_cnt = 0
        
        for k in range(len(times)):
            cur_time = times[k]
            self.logger.info("Computing frame {time}.".format(time = str(cur_time)))
            triggered = []
            for cur_station in stations:
                if cur_station.nsl_string not in detection_limits.keys():
                    cur_trigger = False
                else:
                    cur_detection_limit = detection_limits[cur_station.nsl_string]
                    if cur_time >= cur_detection_limit[0] and cur_time <= cur_detection_limit[1]:
                        cur_trigger = True
                    else:
                        cur_trigger = False
                triggered.append(cur_trigger)

            cur_points = [shapely.geometry.Point(x.x, x.y) for x in stations]
            cur_df = gpd.GeoDataFrame({'geom_vor': [shapely.geometry.Polygon([])] * len(stations),
                                       'geom_stat': cur_points,
                                       'time': [util.isoformat_tz(cur_time)] * len(stations),
                                       'nsl': [x.nsl_string for x in stations],
                                       'x': [x.x for x in stations],
                                       'y': [x.y for x in stations],
                                       'x_utm': [x.x_utm for x in stations],
                                       'y_utm': [x.y_utm for x in stations],
                                       'pgv': data[k, :],
                                       'triggered': triggered},
                                      crs = "epsg:4326",
                                      geometry = 'geom_stat')

            # Add the station amplification factors.
            self.add_station_amplification(cur_df)

            # Compute the corrected pgv values.
            cur_df['pgv_corr'] = cur_df.pgv / cur_df.sa

            # Use only the stations with a valid corrected pgv.
            cur_df = cur_df[cur_df['pgv_corr'].notna()]
            cur_df = cur_df.reset_index()

            # Update the pgv values to keep the event maximum pgv.
            # Track changes of the event maximum pgv.
            if last_pgv_df is not None:
                # Use the current PGV values only, if they are higher than
                # the last ones.
                #
                # Update the last_pgv_df with the current df. It is possible, that
                # rows are missing or new ones are available.
                # Remove the rows, that are not present in the cur_df.
                tmp_df = last_pgv_df[last_pgv_df.nsl.isin(cur_df.nsl)]
                # Add the rows, that are not present in the last_pgv_df.
                mask_df = tmp_df.append(cur_df[~cur_df.nsl.isin(last_pgv_df.nsl)],
                                        ignore_index = True)

                # Sort the two dataframes using the nsl.
                tmp_df = tmp_df.sort_values(by = 'nsl',
                                            ignore_index = True)
                mask_df = mask_df.sort_values(by = 'nsl',
                                              ignore_index = True)

                # Check for correct station snl.
                if (np.any(tmp_df['nsl'].values != mask_df['nsl'].values)):
                    raise RuntimeError("The statin SNL codes of the two dataframes to compare are not equal.")

                # Reset the values for the stations, that already had a larger pgv value.
                mask = cur_df.pgv_corr < mask_df.pgv_corr               
                cur_df.loc[mask, 'pgv_corr'] = mask_df.loc[mask, 'pgv_corr']

                if np.all(mask):
                    no_change_cnt += 1
                else:
                    no_change_cnt = 0
                self.logger.info('no_change_cnt: ' + str(no_change_cnt))

            # Exit if the was no change of the max event pgv data for some time.
            if no_change_cnt >= 5:
                self.logger.info('No change for some time, stop computation of contours.')
                break

            # Keep the last pgv dataframe.
            # Get the rows, that are not available in cur_df and keep them.
            if last_pgv_df is not None:
                tmp_df = last_pgv_df[~last_pgv_df.nsl.isin(cur_df.nsl)]
                last_pgv_df = cur_df.copy()
                last_pgv_df = last_pgv_df.append(tmp_df.copy(),
                                                 ignore_index = True)
            else:
                last_pgv_df = cur_df.copy()
           
            # Interpolate to a regular grid using ordinary kriging.
            self.logger.info("Interpolate")
            krig_z, krig_sigmasq, grid_x, grid_y = util.compute_pgv_krigging(x = cur_df.x_utm.values,
                                                                             y = cur_df.y_utm.values,
                                                                             z = np.log10(cur_df.pgv_corr),
                                                                             nlags = 40,
                                                                             verbose = False,
                                                                             enable_plotting = False,
                                                                             weight = True)

            # Update the interpolated pgv values only if they are higher than the last ones.
            #if last_krig_z is not None:
            #    cur_mask = krig_z < last_krig_z
            #    krig_z[cur_mask] = last_krig_z[cur_mask]
            #last_krig_z = krig_z

            self.logger.info("Contours")
            # Compute the contours.
            intensity = np.arange(2, 8.1, 0.1)
            # Add lower and upper limits to catch all the data below or 
            # above the desired intensity range.
            intensity = np.hstack([[-10], intensity, [20]])
            # Use a low intensity_I_pgv value to make sure, that the lowest countour
            # level captures all PGV values.
            intensity_pgv = util.intensity_to_pgv(intensity = intensity,
                                                  intensity_I_pgv = 1e-9)

            # Create and delete a figure to prevent pyplot from plotting the
            # contours.
            fig = plt.figure()
            ax = fig.add_subplot(111)
            cs = ax.contourf(grid_x, grid_y, krig_z, np.log10(intensity_pgv[:, 1]))
            contours = util.contourset_to_shapely(cs)
            fig.clear()
            plt.close(fig)
            del ax
            del fig
            del cs

            self.logger.info('dataframe')
            # Create a geodataframe of the contour polygons.
            cont_data = {'time': [],
                         'geometry': [],
                         'intensity': [],
                         'pgv': []}

            for cur_level, cur_poly in contours.items():
                cur_intensity = util.pgv_to_intensity(pgv = [10**cur_level] * len(cur_poly))
                cont_data['time'].extend([util.isoformat_tz(cur_time)] * len(cur_poly))
                cont_data['geometry'].extend(cur_poly)
                cont_data['intensity'].extend(cur_intensity[:, 1].tolist())
                cont_data['pgv'].extend([10**cur_level] * len(cur_poly))
            cur_cont_df = gpd.GeoDataFrame(data = cont_data)

            # Convert the polygon coordinates to EPSG:4326.
            src_proj = pyproj.Proj(init = 'epsg:' + self.project.inventory.get_utm_epsg()[0][0])
            dst_proj = pyproj.Proj(init = 'epsg:4326')
            cur_cont_df = util.reproject_polygons(df = cur_cont_df,
                                                  src_proj = src_proj,
                                                  dst_proj = dst_proj)

            # Clip to the network boundary.
            # Clipping a polygon may created multiple polygons.
            # Therefore create a new dataframe to have only one polygon per,
            # entry. Thus avoiding possible problems due to a mixture of 
            # multipolygons and polygons.
            self.logger.info('Clipping.')
            cont_data = {'time': [],
                         'geometry': [],
                         'intensity': [],
                         'pgv': []}
            for cur_id, cur_row in cur_cont_df.iterrows():
                cur_poly = cur_row.geometry
                clipped_poly = cur_poly.intersection(self.network_boundary.loc[0, 'geometry'])
                self.logger.info(type(clipped_poly))
                if isinstance(clipped_poly, shapely.geometry.multipolygon.MultiPolygon):
                    cont_data['time'].extend([cur_row.time] * len(clipped_poly))
                    cont_data['geometry'].extend([x for x in clipped_poly])
                    cont_data['intensity'].extend([cur_row.intensity] * len(clipped_poly))
                    cont_data['pgv'].extend([cur_row.pgv] * len(clipped_poly))
                else:
                    cont_data['time'].append(cur_row.time)
                    cont_data['geometry'].append(clipped_poly)
                    cont_data['intensity'].append(cur_row.intensity)
                    cont_data['pgv'].append(cur_row.pgv)
            cur_cont_df = gpd.GeoDataFrame(data = cont_data)

            # Remove rows having an empty geometry.
            self.logger.info(cur_cont_df['geometry'])
            cur_cont_df = cur_cont_df[~cur_cont_df['geometry'].is_empty]
            self.logger.info(cur_cont_df['geometry'])
            
            self.logger.info('Appending to sequence.')
            # Add the dataframe to the sequence.
            if sequence_df is None:
                sequence_df = cur_cont_df
            else:
                sequence_df = sequence_df.append(cur_cont_df)

        # Get some event properties to add to the properties of the feature collections.
        props = {'db_id': meta['db_id'],
                 'event_start': util.isoformat_tz(meta['start_time']),
                 'event_end': util.isoformat_tz(meta['end_time']),
                 'sequence_start': min(sequence_df.time),
                 'sequence_end': max(sequence_df.time),
                 'author_uri': self.project.author_uri,
                 'agency_uri': self.project.agency_uri,
                 'station_correction_applied': True}

        # Write the voronoi dataframe to a geojson file.
        filepath = util.save_supplement(self.event_public_id,
                                        sequence_df,
                                        output_dir = self.supplement_dir,
                                        category = 'pgvsequence',
                                        name = 'pgvcontour',
                                        props = props)
        self.logger.info('Saved pgv contour sequence to file %s.', filepath)