Esempio n. 1
0
    def create_result(self, scnl, origin_resource = None):
        ''' Write the psd data for the given scnl to file.
        '''
        export_data = self.psd_data[scnl]

        first_time = UTCDateTime(sorted(export_data.keys())[0])
        last_time = UTCDateTime(sorted(export_data.keys())[-1])
        #last_key = sorted(export_data.iterkeys())[-1]
        #last_time = export_data[last_key]['end_time']
        #first_time = sorted([x['start_time'] for x in export_data.values()])[0]
        #last_time = sorted([x['end_time'] for x in export_data.values()])[-1]

        shelve_result = result.ShelveResult(name = 'psd',
                                            start_time = first_time,
                                            end_time = last_time,
                                            origin_name = self.name,
                                            origin_resource = origin_resource,
                                            sub_directory = (scnl[0],
                                                             scnl[1],
                                                             "{0:04d}_{1:03d}".format(first_time.year,
                                                                                      first_time.julday)),
                                            postfix = '_'.join(scnl),
                                            db = export_data)
        self.result_bag.add(shelve_result)
        self.logger.info("Published the result for scnl %s (%s to %s).", scnl,
                                                                         first_time.isoformat(),
                                                                         last_time.isoformat())

        self.psd_data[scnl] = {}
    def test_write_detection_to_database(self):
        ''' Test the writing to the database.
        '''
        # Create a detection.
        # Set the date values.
        start_time = '2000-01-01T00:00:00'
        end_time = '2000-01-01T01:00:00'
        creation_time = UTCDateTime()

        # Get the stations from the inventory.
        inventory = self.project.db_inventory
        stat1 = inventory.get_station(name='DUBA')[0]
        stat2 = inventory.get_station(name='WADU')[0]
        stat3 = inventory.get_station(name='WAPE')[0]

        det = detection.Detection(start_time=start_time,
                                  end_time=end_time,
                                  creation_time=creation_time,
                                  stations=[stat1, stat2, stat3],
                                  max_pgv={
                                      stat1.nsl_string: 0.1,
                                      stat2.nsl_string: 0.2,
                                      stat3.nsl_string: 0.3
                                  })
        det.write_to_database(self.project)

        detection_orm = self.project.db_tables['detection']
        db_session = self.project.get_db_session()
        try:
            result = db_session.query(detection_orm).\
                options(sqlalchemy.orm.subqueryload(detection_orm.stat1)).\
                options(sqlalchemy.orm.subqueryload(detection_orm.stat2)).\
                options(sqlalchemy.orm.subqueryload(detection_orm.stat3)).\
                filter(detection_orm.id == det.db_id).all()
        finally:
            db_session.close()

        self.assertEqual(len(result), 1)
        tmp = result[0]
        self.assertEqual(tmp.start_time, UTCDateTime(start_time).timestamp)
        self.assertEqual(tmp.end_time, UTCDateTime(end_time).timestamp)
        self.assertEqual(tmp.creation_time, creation_time.isoformat())
        self.assertEqual(tmp.stat1_id, stat1.id)
        self.assertEqual(tmp.stat2_id, stat2.id)
        self.assertEqual(tmp.stat3_id, stat3.id)
        self.assertEqual(tmp.stat1.id, stat1.id)
        self.assertEqual(tmp.stat2.id, stat2.id)
        self.assertEqual(tmp.stat3.id, stat3.id)
Esempio n. 3
0
    def test_write_to_database(self):
        ''' Test the write_to_database method.
        '''
        creation_time = UTCDateTime()
        catalog = ev_core.Catalog(name='test',
                                  description='A test description.',
                                  agency_uri='uot',
                                  author_uri='tester',
                                  creation_time=creation_time)
        catalog.write_to_database(self.project)

        db_catalog_orm = self.project.dbTables['event_catalog']
        db_session = self.project.getDbSession()
        result = db_session.query(db_catalog_orm).all()
        db_session.close()
        self.assertEqual(len(result), 1)
        tmp = result[0]
        self.assertEqual(tmp.name, 'test')
        self.assertEqual(tmp.description, 'A test description.')
        self.assertEqual(tmp.agency_uri, 'uot')
        self.assertEqual(tmp.author_uri, 'tester')
        self.assertEqual(tmp.creation_time, creation_time.isoformat())
Esempio n. 4
0
    def process(self,
                start_time,
                end_time,
                station_names,
                channel_names,
                event_catalog,
                event_ids=None):
        ''' Start the detection.

        Parameters
        ----------
        start_time : :class:`~obspy.core.utcdatetime.UTCDateTime`
            The start time of the timespan for which to detect the events.

        end_time : :class:`~obspy.core.utcdatetime.UTCDateTime`
            The end time of the timespan for which to detect the events.

        station_names : list of Strings
            The names of the stations to process.

        channel_names : list of Strings
            The names of the channels to process.

        event_catalog : String
            The name of the event catalog to process.

        event_ids : List of Integer
            If individual events are specified, this list contains the database IDs of the events
            to process.
        '''
        self.logger.info("Processing timespan %s to %s.",
                         start_time.isoformat(), end_time.isoformat())

        result_bag = ResultBag()

        event_lib = ev_core.Library('events')
        event_lib.load_catalog_from_db(self.project, name=event_catalog)
        catalog = event_lib.catalogs[event_catalog]
        if event_ids is None:
            # Load the events for the given time span from the database.
            # TODO: Remove the hardcoded min_event_length value and create
            # user-selectable filter fields.
            catalog.load_events(project=self.project,
                                start_time=start_time,
                                end_time=end_time,
                                min_event_length=1)
        else:
            # Load the events with the given ids from the database. Ignore the
            # time-span.
            catalog.load_events(event_id=event_ids)

        # Abort the execution if no events are available for the time span.
        if not catalog.events:
            if event_ids is None:
                self.logger.info('No events found for the timespan %s to %s.',
                                 start_time.isoformat(), end_time.isoformat())
            else:
                self.logger.info(
                    'No events found for the specified event IDs: %s.',
                    event_ids)
            return

        # Get the channels to process.
        channels = []
        for cur_station in station_names:
            for cur_channel in channel_names:
                channels.extend(
                    self.project.geometry_inventory.get_channel(
                        station=cur_station, name=cur_channel))
        scnl = [x.scnl for x in channels]

        n_events = len(catalog.events)
        active_timespan = ()
        try:
            for k, cur_event in enumerate(catalog.events):
                self.logger.info("Processing event %d (%d/%d).",
                                 cur_event.db_id, k, n_events)

                # Load the waveform data for the event and the given stations and
                # channels.
                # TODO: Add a feature which allows adding a window before and after
                # the event time limits.
                pre_event_time = 20
                post_event_time = 10

                # TODO: Make the length of the waveform load interval user
                # selectable.
                waveform_load_interval = 3600

                # When many short events with small gaps inbetween are processed,
                # it is very ineffective to load the waveform for each event. Load
                # a larger time-span and then request the waveform data from the
                # waveclient stock.
                timespan_begin = UTCDateTime(cur_event.start_time.year,
                                             cur_event.start_time.month,
                                             cur_event.start_time.day,
                                             cur_event.start_time.hour)
                timespan_end = UTCDateTime(
                    cur_event.end_time.year, cur_event.end_time.month,
                    cur_event.end_time.day,
                    cur_event.start_time.hour) + waveform_load_interval
                if not active_timespan:
                    self.logger.info(
                        "Initial stream request for hourly time-span: %s to %s.",
                        timespan_begin.isoformat(), timespan_end.isoformat())
                    stream = self.request_stream(start_time=timespan_begin,
                                                 end_time=timespan_end,
                                                 scnl=scnl)
                    active_timespan = (timespan_begin, timespan_end)

                cur_start_time = cur_event.start_time - pre_event_time
                cur_end_time = cur_event.end_time + post_event_time

                if not (((cur_start_time >= active_timespan[0]) and
                         (cur_start_time <= active_timespan[1])) and
                        ((cur_end_time >= active_timespan[0]) and
                         (cur_end_time <= active_timespan[1]))):
                    self.logger.info(
                        "Requesting stream for hourly time-span: %s to %s.",
                        timespan_begin.isoformat(), timespan_end.isoformat())
                    stream = self.request_stream(start_time=timespan_begin,
                                                 end_time=timespan_end,
                                                 scnl=scnl)
                    active_timespan = (timespan_begin, timespan_end)

                stream = self.request_stream(start_time=cur_start_time,
                                             end_time=cur_end_time,
                                             scnl=scnl)

                # Execute the processing stack.
                # TODO: The 0.5 seconds where added because there's currently no
                # access to the event detection of the individual channels. Make
                # sure, that this hard-coded value is turned into a user-selectable
                # one or removed completely.
                process_limits = (cur_event.start_time - 0.5,
                                  cur_event.end_time)
                self.processing_stack.execute(stream=stream,
                                              process_limits=process_limits)

                # Put the results of the processing stack into the results bag.
                results = self.processing_stack.get_results()
                resource_id = self.project.rid + cur_event.rid
                result_bag.add(resource_id=resource_id, results=results)

        finally:
            # Add the time-span directory to the output directory.
            if k != len(catalog.events) - 1:
                cur_end_time = cur_event.end_time
            else:
                cur_end_time = end_time
            timespan_dir = start_time.strftime(
                '%Y%m%dT%H%M%S') + '_to_' + cur_end_time.strftime(
                    '%Y%m%dT%H%M%S')
            cur_output_dir = os.path.join(self.output_dir, timespan_dir)
            # Save the processing results to files.
            result_bag.save(output_dir=cur_output_dir, scnl=scnl)
Esempio n. 5
0
def read_gcmt_file(cmt_file):
    """We read the moment tensor information from a GCMT moment tensor file,
    and store its content in a python dictionary
    
    :param cmt_file: location of text file to be used
    :type cmt_file: string
    """
    with open(cmt_file, 'r') as input_file:
        lines = [line.split() for line in input_file]

    if 'Q' in lines[0][0]:
        new_line = lines[0]
        code, year = lines[0][0].split('Q')
        lines[0] = [code, year] + new_line[1:]
    if 'W' in lines[0][0]:
        new_line = lines[0]
        code, year = lines[0][0].split('W')
        lines[0] = [code, year] + new_line[1:]

    year, month, day, hour, minute, second, lat, lon, hyp_depth\
        = lines[0][1:10]
    year = int(year)
    month = int(month)
    day = int(day)
    hour = int(hour)
    minute = int(minute)
    second = int(float(second))
    origin_time = UTCDateTime(year, month, day, hour, minute, second)
    date_time = origin_time.isoformat()
    time_shift = max(float(lines[2][2]), 5)
    half_duration = float(lines[3][2])

    centroid_lat = float(lines[4][1])
    centroid_lon = float(lines[5][1])
    centroid_depth = float(lines[6][1])

    mrr = float(lines[7][1])
    mtt = float(lines[8][1])
    mpp = float(lines[9][1])
    mrt = float(lines[10][1])
    mrp = float(lines[11][1])
    mtp = float(lines[12][1])

    input_dict = {
        'mrr': mrr,
        'mtt': mtt,
        'mpp': mpp,
        'mrt': mrt,
        'mrp': mrp,
        'mtp': mtp,
        'datetime': date_time,
        'date_origin': origin_time,
        'lat': lat,
        'lon': lon,
        'depth': hyp_depth,
        'time_shift': time_shift,
        'half_duration': half_duration,
        'centroid_lat': centroid_lat,
        'centroid_lon': centroid_lon,
        'centroid_depth': centroid_depth
    }

    return input_dict
Esempio n. 6
0
def read_quake_file(quake_file):
    """We read the moment tensor information from a QuakeMl moment tensor file,
    and store its content in a python dictionary
    
    :param cmt_file: location of text file to be used
    :type cmt_file: string
    """
    tree = ET.parse(quake_file)
    root = tree.getroot()
    tensor = next(elem for elem in root.iter() if 'momentTensor' in elem.tag)

    MRR = next(elem for elem in tensor.iter() if 'Mrr' in elem.tag)
    mrr = int(next(child.text for child in MRR)) * 10**7
    MTT = next(elem for elem in tensor.iter() if 'Mtt' in elem.tag)
    mtt = int(next(child.text for child in MTT)) * 10**7
    MPP = next(elem for elem in tensor.iter() if 'Mpp' in elem.tag)
    mpp = int(next(child.text for child in MPP)) * 10**7
    MRT = next(elem for elem in tensor.iter() if 'Mrt' in elem.tag)
    mrt = int(next(child.text for child in MRT)) * 10**7
    MRP = next(elem for elem in tensor.iter() if 'Mrp' in elem.tag)
    mrp = int(next(child.text for child in MRP)) * 10**7
    MTP = next(elem for elem in tensor.iter() if 'Mtp' in elem.tag)
    mtp = int(next(child.text for child in MTP)) * 10**7
    time_shift = next(elem.text for elem in tensor.iter()
                      if 'riseTime' in elem.tag)
    M0 = next(elem for elem in tensor.iter() if 'scalarMoment' in elem.tag)
    m0 = float(next(child.text for child in M0)) * 10**7
    half_duration = 1.2 * 10**-8 * m0**(1 / 3)

    origins = [elem for elem in root.iter() if 'origin' in elem.tag]
    first_origin = []
    second_origin = []
    for origin in origins:
        values = ['depthType' in elem.tag for elem in origin.iter()]
        if not any(values):
            first_origin = origin
        else:
            second_origin = origin

    Event_Lat = next(elem for elem in first_origin.iter()
                     if 'latitude' in elem.tag)
    event_lat = float(next(child.text for child in Event_Lat))
    Event_Lon = next(elem for elem in first_origin.iter()
                     if 'longitude' in elem.tag)
    event_lon = float(next(child.text for child in Event_Lon))
    Depth = next(elem for elem in first_origin.iter() if 'depth' in elem.tag)
    depth = float(next(child.text for child in Depth)) / 1000
    Centroid_Lat = next(elem for elem in second_origin.iter()
                        if 'latitude' in elem.tag)
    centroid_lat = float(next(child.text for child in Centroid_Lat))
    Centroid_Lon = next(elem for elem in second_origin.iter()
                        if 'longitude' in elem.tag)
    centroid_lon = float(next(child.text for child in Centroid_Lon))
    Centroid_Depth = next(elem for elem in second_origin.iter()
                          if 'depth' in elem.tag)
    centroid_depth = float(next(child.text for child in Centroid_Depth)) / 1000
    Time = next(elem for elem in first_origin.iter() if 'time' in elem.tag)
    time = next(child.text for child in Time)
    origin_time = UTCDateTime(time)
    date_time = origin_time.isoformat()

    input_dict = {
        'mrr': mrr,
        'mtt': mtt,
        'mpp': mpp,
        'mrt': mrt,
        'mrp': mrp,
        'mtp': mtp,
        'datetime': date_time,
        'date_origin': origin_time,
        'lat': event_lat,
        'lon': event_lon,
        'depth': depth,
        'time_shift': time_shift,
        'half_duration': half_duration,
        'centroid_lat': centroid_lat,
        'centroid_lon': centroid_lon,
        'centroid_depth': centroid_depth
    }

    return input_dict