예제 #1
0
    def __init__(self, **args):
        ''' Initialize the instance.
        '''
        package_nodes.LooperCollectionChildNode.__init__(self, **args)

        # No waveform data is needed.
        self.need_waveform_data = False

        # The detection catalog libarary
        self.detection_library = detect.Library('binder')

        # The working detection catalog.
        self.detection_catalog = None

        # The event catalog library
        self.event_library = event_core.Library('binder')

        # The working event catalog.
        self.event_catalog = None

        # The detection binder.
        self.binder = None

        # Create the preference items.
        self.create_preferences()
예제 #2
0
 def test_library_creation(self):
     ''' Test the pSysmon Event class.
     '''
     # Create an event with valid time limits.
     library = ev_core.Library(name='test_name')
     self.assertIsInstance(library, ev_core.Library)
     self.assertEqual(library.name, 'test_name')
     self.assertIsInstance(library.catalogs, dict)
     self.assertEqual(library.catalogs, {})
예제 #3
0
    def __init__(self, **args):
        package_nodes.LooperCollectionNode.__init__(self, **args)

        # The event catalog library
        self.event_library = event_core.Library('binder')

        self.create_selector_preferences()
        self.create_component_selector_preferences()
        self.create_filter_preferences()
        self.create_processing_preferences()
        self.create_output_preferences()
예제 #4
0
    def __init__(self, **args):
        ''' Initialize the instance.
        '''
        package_nodes.CollectionNode.__init__(self, **args)

        # The event catalog library
        self.event_library = event_core.Library('binder')

        self.create_selector_preferences()
        self.create_filter_preferences()
        self.create_output_preferences()
예제 #5
0
    def test_load_catalog_from_db(self):
        ''' Test the loading of catalogs from the database.
        '''
        # Write event data to the database.
        bulletin_file = os.path.join(self.data_path, 'bulletin_ims1.0_1.txt')
        parser = ev_bulletin.ImsParser()
        parser.parse(bulletin_file)
        catalog = parser.get_catalog(name='REB', agency_uri='REB')
        catalog.write_to_database(self.project)

        bulletin_file = os.path.join(self.data_path,
                                     'bulletin_zamg_ims1.0_1.txt')
        parser = ev_bulletin.ImsParser()
        parser.parse(bulletin_file)
        catalog = parser.get_catalog(name='ZAMG_AUTODRM', agency_uri='ZAMG')
        catalog.write_to_database(self.project)

        # Create the library and test the loading of one catalog.
        library = ev_core.Library(name='test_name')
        library.load_catalog_from_db(project=self.project, name='REB')

        self.assertEqual(len(library.catalogs), 1)
        self.assertEqual(library.catalogs.keys(), ['REB'])
        self.assertIsInstance(library.catalogs['REB'], ev_core.Catalog)

        cur_catalog = library.catalogs['REB']
        self.assertEqual(len(cur_catalog.events), 1)
        cur_event = cur_catalog.events[0]
        self.assertIsInstance(cur_event, ev_core.Event)
        self.assertEqual(cur_event.public_id, '112460')

        # Create the library and test the loading of multiple catalogs.
        library = ev_core.Library(name='test_name')
        library.load_catalog_from_db(project=self.project,
                                     name=['REB', 'ZAMG_AUTODRM'])
        self.assertEqual(len(library.catalogs), 2)
        self.assertListEqual(sorted(library.catalogs.keys()),
                             ['REB', 'ZAMG_AUTODRM'])
예제 #6
0
    def __init__(self):
        ''' Initialize the instance.

        '''
        InteractivePlugin.__init__(self,
                                   name='create event',
                                   category='edit',
                                   tags=['create', 'event'])

        # Create the logging logger instance.
        logger_prefix = psysmon.logConfig['package_prefix']
        loggerName = logger_prefix + "." + __name__ + "." + self.__class__.__name__
        self.logger = logging.getLogger(loggerName)

        self.icons['active'] = icons.event_new_16
        self.cursor = wx.CURSOR_CROSS

        # The event catalog library used to manage the catalogs.
        self.library = event_core.Library('event library')

        # The name of the selected catalog.
        self.selected_catalog_name = None

        self.begin_line = {}
        self.end_line = {}
        self.bg = {}
        self.motion_notify_cid = []
        self.startTime = None
        self.endTime = None

        # Add the pages to the preferences manager.
        self.pref_manager.add_page('tool options')

        # Add the plugin preferences.
        item = psy_pm.SingleChoicePrefItem(
            name='event_catalog',
            label='event catalog',
            group='catalog',
            value='',
            limit=[],
            tool_tip='Select an event catalog to work on.',
            hooks={'on_value_change': self.on_select_catalog})
        self.pref_manager.add_item(pagename='tool options', item=item)

        item = psy_pm.ActionItem(name='create_new_catalog',
                                 label='create new catalog',
                                 group='catalog',
                                 mode='button',
                                 action=self.on_create_new_catalog)
        self.pref_manager.add_item(pagename='tool options', item=item)
예제 #7
0
    def test_get_catalogs_in_db(self):
        ''' Test the query of catalog names from the database.
        '''
        # Write event data to the database.
        bulletin_file = os.path.join(self.data_path,
                                     'bulletin_zamg_ims1.0_1.txt')
        parser = ev_bulletin.ImsParser()
        parser.parse(bulletin_file)
        catalog = parser.get_catalog(name='ZAMG_AUTODRM', agency_uri='ZAMG')
        catalog.write_to_database(self.project)

        bulletin_file = os.path.join(self.data_path, 'bulletin_ims1.0_1.txt')
        parser = ev_bulletin.ImsParser()
        parser.parse(bulletin_file)
        catalog = parser.get_catalog(name='REB', agency_uri='REB')
        catalog.write_to_database(self.project)

        # Create the library.
        library = ev_core.Library(name='test_name')
        catalog_names = library.get_catalogs_in_db(project=self.project)

        self.assertIsInstance(catalog_names, list)
        self.assertEqual(len(catalog_names), 2)
        self.assertListEqual(catalog_names, ['REB', 'ZAMG_AUTODRM'])
예제 #8
0
    def __init__(self):
        ''' Initialize the instance.

        '''
        OptionPlugin.__init__(self,
                              name='select event',
                              category='select',
                              tags=['event', 'select'])

        # Create the logging logger instance.
        logger_prefix = psysmon.logConfig['package_prefix']
        loggerName = logger_prefix + "." + __name__ + "." + self.__class__.__name__
        self.logger = logging.getLogger(loggerName)

        self.icons['active'] = icons.flag_icon_16

        # The events library.
        self.library = ev_core.Library(name=self.rid)

        # The currently selected event.
        self.selected_event = {}

        # The plot colors used by the plugin.
        self.colors = {}

        # Setup the pages of the preference manager.
        select_page = self.pref_manager.add_page('Select')
        dts_group = select_page.add_group('detection time span')
        es_group = select_page.add_group('event selection')

        item = psy_pm.DateTimeEditPrefItem(
            name='start_time',
            label='start time',
            value=UTCDateTime('2015-01-01T00:00:00'),
            tool_tip=
            'The start time of the detection time span (UTCDateTime string format YYYY-MM-DDTHH:MM:SS).'
        )
        dts_group.add_item(item)

        item = psy_pm.FloatSpinPrefItem(
            name='window_length',
            label='window length [s]',
            value=3600,
            limit=(0, 86400),
            digits=1,
            tool_tip=
            'The length of the time window for which events should be loaded.')
        dts_group.add_item(item)

        item = psy_pm.SingleChoicePrefItem(
            name='event_catalog',
            label='event catalog',
            value='',
            limit=[],
            tool_tip='Select an event catalog for which to load the events.')
        es_group.add_item(item)

        item = psy_pm.ActionItem(name='load_events',
                                 label='load events',
                                 mode='button',
                                 action=self.on_load_events)
        es_group.add_item(item)

        column_labels = [
            'db_id', 'start_time', 'length', 'public_id', 'description',
            'agency_uri', 'author_uri', 'comment'
        ]
        item = psy_pm.ListCtrlEditPrefItem(
            name='events',
            label='events',
            value=[],
            column_labels=column_labels,
            limit=[],
            hooks={'on_value_change': self.on_event_selected},
            tool_tip='The available events.')
        es_group.add_item(item)
예제 #9
0
    def process(self,
                looper_nodes,
                start_time,
                end_time,
                processing_interval,
                scnl,
                event_catalog,
                event_ids=None,
                event_types=None,
                event_tags=None):
        ''' Start the detection.

        Parameters
        ----------
        looper_nodes : list of
            The looper nodes to execute.

        start_time : :class:`~obspy.core.utcdatetime.UTCDateTime`
            The start time of the timespan for which to detect the events.

        end_time : :class:`~obspy.core.utcdatetime.UTCDateTime`
            The end time of the timespan for which to detect the events.

        processing_interval : String or Float
            The interval which is used to handle a set of events. If a float is
            passed, it is interpreted as the inteval length in seconds.
            (whole, hour, day, week, month or a float value)

        scnl : list of Strings
            The scnl codes of the components to process.

        event_catalog : String
            The name of the event catalog to process.

        event_ids : List of Integer
            If individual events are specified, this list contains the database IDs of the events
            to process.
        '''
        self.logger.info("Processing whole timespan %s to %s.",
                         start_time.isoformat(), end_time.isoformat())

        # Use only the enabled looper nodes.
        looper_nodes = [x for x in looper_nodes if x.enabled]

        if not looper_nodes:
            self.logger.warning("No looper nodes found.")
            return

        if event_tags is None:
            event_tags = []

        if event_types is None:
            event_types = []

        interval_start = self.compute_intervals(start_time=start_time,
                                                end_time=end_time,
                                                interval=processing_interval)

        event_lib = event_core.Library('events')
        event_lib.load_catalog_from_db(self.project, name=event_catalog)
        catalog = event_lib.catalogs[event_catalog]

        for k, cur_start_time in enumerate(interval_start[:-1]):
            cur_end_time = interval_start[k + 1]
            self.logger.info("Processing interval timespan %s to %s.",
                             cur_start_time.isoformat(),
                             cur_end_time.isoformat())
            catalog.clear_events()

            if event_ids is None:
                # Load the events for the given time span from the database.
                # TODO: Remove the hardcoded min_event_length value and create
                # user-selectable filter fields.
                catalog.load_events(project=self.project,
                                    start_time=cur_start_time,
                                    end_time=cur_end_time,
                                    min_event_length=1,
                                    event_tags=event_tags)
            else:
                # Load the events with the given ids from the database. Ignore the
                # time-span.
                catalog.load_events(event_id=event_ids)

            # Abort the execution if no events are available for the time span.
            if not catalog.events:
                if event_ids is None:
                    self.logger.info(
                        'No events found for the timespan %s to %s.',
                        cur_start_time.isoformat(), cur_end_time.isoformat())
                else:
                    self.logger.info(
                        'No events found for the specified event IDs: %s.',
                        event_ids)
                continue

            # Get the channels to process.
            channels = []
            #for cur_station in station_names:
            #    for cur_channel in channel_names:
            #        channels.extend(self.project.geometry_inventory.get_channel(station = cur_station,
            #                                                                    name = cur_channel))
            for cur_scnl in scnl:
                channels.extend(
                    self.project.geometry_inventory.get_channel(
                        network=cur_scnl[2],
                        station=cur_scnl[0],
                        location=cur_scnl[3],
                        name=cur_scnl[1]))
            scnl = [x.scnl for x in channels]

            n_events = len(catalog.events)
            try:
                for k, cur_event in enumerate(
                        sorted(catalog.events, key=lambda x: x.start_time)):
                    self.logger.info("Processing event %d (%d/%d).",
                                     cur_event.db_id, k, n_events)

                    # Assign the channel instance to the detections.
                    cur_event.assign_channel_to_detections(
                        self.project.geometry_inventory)

                    # Get the pre- and post timewindow time required by the looper
                    # children. The pre- and post timewindow times could be needed because
                    # of effects due to filter buildup.
                    pre_event_length = [
                        x.pre_stream_length for x in looper_nodes
                    ]
                    post_event_length = [
                        x.post_stream_length for x in looper_nodes
                    ]
                    pre_event_length = max(pre_event_length)
                    post_event_length = max(post_event_length)

                    cur_window_start = cur_event.start_time
                    cur_window_end = cur_event.end_time

                    # Execute the looper nodes.
                    resource_id = self.parent_rid + '/event_processor/' + str(
                        cur_event.db_id)
                    process_limits = (cur_window_start, cur_window_end)
                    waveform_loaded = False
                    stream = None
                    for cur_node in looper_nodes:
                        if not cur_node.initialized:
                            cur_node.initialize()

                        # Load the waveform data when it is needed by a looper
                        # node.
                        if not waveform_loaded and cur_node.need_waveform_data:
                            stream = self.project.request_data_stream(
                                start_time=cur_window_start - pre_event_length,
                                end_time=cur_window_end + post_event_length,
                                scnl=scnl)
                            waveform_loaded = True

                        self.logger.debug("Executing node %s.", cur_node.name)
                        ret = cur_node.execute(stream=stream,
                                               process_limits=process_limits,
                                               origin_resource=resource_id,
                                               channels=channels,
                                               event=cur_event)

                        self.logger.debug("Finished execution of node %s.",
                                          cur_node.name)

                        # Get the results of the node.
                        if cur_node.result_bag:
                            if len(cur_node.result_bag.results) > 0:
                                for cur_result in cur_node.result_bag.results:
                                    cur_result.event_id = cur_event.db_id
                                    cur_result.base_output_dir = self.output_dir
                                    cur_result.save()

                                cur_node.result_bag.clear()

                        # Handle the looper child return value.
                        if ret and ret == 'abort':
                            break
            finally:
                pass

        # Call the cleanup method for all nodes.
        for cur_node in looper_nodes:
            cur_node.cleanup(origin_resource=resource_id)

            # Get the remaining results of the node and save them.
            if cur_node.result_bag:
                for cur_result in cur_node.result_bag.results:
                    cur_result.base_output_dir = self.output_dir
                    cur_result.save()

            cur_node.result_bag.clear()

        # Save the collection settings to the result directory if it exists.
        if os.path.exists(self.output_dir):
            exec_meta = {}
            exec_meta['rid'] = self.parent_rid
            exec_meta['node_settings'] = looper_nodes[0].parent.get_settings()
            settings_filename = 'execution_metadata.json'
            settings_filepath = os.path.join(self.output_dir,
                                             settings_filename)
            with open(settings_filepath, 'w') as fp:
                json.dump(exec_meta, fp=fp, cls=json_util.GeneralFileEncoder)
예제 #10
0
    def execute(self, prevNodeOutput={}):
        ''' Execute the looper collection node.

        '''
        output_dir = self.pref_manager.get_value('output_dir')
        prefix = self.pref_manager.get_value('prefix')
        blast_filename = os.path.join(output_dir,
                                      prefix + 'quarry_blasts.json')

        # Download the quarry information file.
        src_filename = self.pref_manager.get_value('filename')
        tmp_fid, tmp_filename = tempfile.mkstemp(prefix='quarry_validation',
                                                 dir=self.project.tmpDir)
        ftp = ftplib.FTP(host=self.pref_manager.get_value('host'),
                         user=self.pref_manager.get_value('username'),
                         passwd=self.pref_manager.get_value('password'))
        try:
            with open(tmp_filename, 'wb') as fp:
                ftp.retrbinary('RETR ' + src_filename, fp.write)
        except:
            self.logger.error(
                "Couldn't download the blast exchange file %s from %s.",
                src_filename, ftp.host)
        finally:
            ftp.quit()
            os.close(tmp_fid)

        if os.path.exists(blast_filename):
            with open(blast_filename, 'r') as fp:
                quarry_blast = json.load(fp=fp, cls=QuarryFileDecoder)
        else:
            quarry_blast = {}

        # Parse the quarry information file.
        with open(tmp_filename, 'r') as fp:
            reader = csv.DictReader(fp, delimiter=';')
            for cur_row in reader:
                if cur_row['Sprengnummer'] not in iter(quarry_blast.keys()):
                    tmp = {}
                    tmp['id'] = int(cur_row['ID'])

                    date = datetime.datetime.strptime(
                        cur_row['Datum_Sprengung'], '%d.%m.%Y %H:%M:%S')
                    time = datetime.datetime.strptime(
                        cur_row['Uhrzeit_Sprengung'], '%d.%m.%Y %H:%M:%S')
                    local = pytz.timezone("Europe/Vienna")
                    orig_time = utcdatetime.UTCDateTime(year=date.year,
                                                        month=date.month,
                                                        day=date.day,
                                                        hour=time.hour,
                                                        minute=time.minute,
                                                        second=time.second)
                    local_dt = local.localize(orig_time.datetime, is_dst=None)
                    utc_dt = local_dt.astimezone(pytz.utc)
                    tmp['time'] = utcdatetime.UTCDateTime(utc_dt)

                    # Compute the coordinate of the blast using the two points
                    # of the line.
                    x = []
                    try:
                        x.append(float(cur_row['Koord_y1'].replace(',', '.')))
                    except:
                        self.logger.warning("Koord_y1 couldn't be converted.")

                    try:
                        x.append(float(cur_row['Koord_y2'].replace(',', '.')))
                    except:
                        self.logger.warning("Koord_y2 couldn't be converted.")

                    y = []
                    try:
                        y.append(float(cur_row['Koord_x1'].replace(',', '.')))
                    except:
                        self.logger.warning("Koord_x1 couldn't be converted.")

                    try:
                        y.append(float(cur_row['Koord_x2'].replace(',', '.')))
                    except:
                        self.logger.warning("Koord_x2 couldn't be converted.")

                    y = [k - 5000000 for k in y]

                    z = []
                    try:
                        z.append(float(cur_row['Koord_z1'].replace(',', '.')))
                    except:
                        self.logger.warning("Koord_z1 couldn't be converted.")

                    try:
                        z.append(float(cur_row['Koord_z2'].replace(',', '.')))
                    except:
                        self.logger.warning("Koord_z2 couldn't be converted.")

                    if x:
                        tmp['x'] = np.mean(x)
                    else:
                        tmp['x'] = -9999

                    if y:
                        tmp['y'] = np.mean(y)
                    else:
                        tmp['y'] = -9999

                    if z:
                        tmp['z'] = np.mean(z)
                    else:
                        tmp['z'] = -9999

                    # Get the coordinates of the DUBAM station.
                    try:
                        x_dubam = float(
                            cur_row['Tab_Messorte_y_koord'].replace(',', '.'))
                    except:
                        self.logger.warning(
                            "Tab_Messorte_y_Koord couldn't be converted.")
                        x_dubam = None

                    try:
                        y_dubam = float(
                            cur_row['Tab_Messorte_x_Koord'].replace(',', '.'))
                        y_dubam = y_dubam - 5000000
                    except:
                        self.logger.warning(
                            "Tab_Messorte_x_Koord couldn't be converted.")
                        y_dubam = None

                    try:
                        z_dubam = float(
                            cur_row['Tab_Messorte_z_Koord'].replace(',', '.'))
                    except:
                        self.logger.warning(
                            "Tab_Messorte_z_Koord couldn't be converted.")
                        z_dubam = None

                    tmp['x_dubam'] = x_dubam
                    tmp['y_dubam'] = y_dubam
                    tmp['z_dubam'] = z_dubam

                    # Get the alternative coordinates of the DUBAM station.
                    # These coordinates are used in case that the DUBAM is
                    # positioned somewhere else than given in the standard
                    # DUBAM coordinates.
                    try:
                        x_dubam_1 = float(
                            cur_row['Tab_Messorte_1_y_koord'].replace(
                                ',', '.'))
                    except:
                        self.logger.warning(
                            "Tab_Messorte_1_y_Koord couldn't be converted.")
                        x_dubam_1 = None

                    try:
                        y_dubam_1 = float(
                            cur_row['Tab_Messorte_1_x_Koord'].replace(
                                ',', '.'))
                        y_dubam_1 = y_dubam_1 - 5000000
                    except:
                        self.logger.warning(
                            "Tab_Messorte_1_x_Koord couldn't be converted.")
                        y_dubam_1 = None

                    try:
                        z_dubam_1 = float(
                            cur_row['Tab_Messorte_1_z_Koord'].replace(
                                ',', '.'))
                    except:
                        self.logger.warning(
                            "Tab_Messorte_1_z_Koord couldn't be converted.")
                        z_dubam_1 = None

                    tmp['x_dubam_1'] = x_dubam_1
                    tmp['y_dubam_1'] = y_dubam_1
                    tmp['z_dubam_1'] = z_dubam_1

                    # MGI / Austria GK M34
                    tmp['epsg'] = '31256'

                    quarry_blast[cur_row['Sprengnummer']] = tmp

        # Search for related events in the database.
        catalog_name = 'rt_binding'
        event_lib = event_core.Library('events')
        event_lib.load_catalog_from_db(self.project, name=catalog_name)
        catalog = event_lib.catalogs[catalog_name]

        # The search window for the psysmon event [s].
        # TODO: Make this a preference item.
        search_win = 600
        for cur_key, cur_blast in quarry_blast.items():
            catalog.clear_events()
            catalog.load_events(project=self.project,
                                start_time=cur_blast['time'] - search_win,
                                end_time=cur_blast['time'] + search_win)
            # Select by event type.
            quarry_events = [
                x for x in catalog.events
                if x.event_type and x.event_type.name == 'duernbach'
            ]
            if len(quarry_events) > 1:
                self.logger.error(
                    "More than one event related to the quarry blast %s. Using the one nearest to the blast time.",
                    cur_key)
                time_diff = [
                    np.abs(cur_blast['time'] - x.start_time)
                    for x in quarry_events
                ]
                nearest_ind = np.argmin(time_diff)
                quarry_events = [quarry_events[nearest_ind]]

            if quarry_events:
                quarry_blast[cur_key]['psysmon_event_id'] = [
                    x.db_id for x in quarry_events
                ]
                for cur_event in [
                        x for x in quarry_events
                        if 'mss_result_computed' not in x.tags
                ]:
                    cur_event.tags = [
                        'mss_result_needed',
                        'baumit_id:' + cur_key.replace(',', ';')
                    ]
                    cur_event.write_to_database(self.project)

        # Save the results.
        with open(blast_filename, 'w') as fp:
            json.dump(quarry_blast, fp=fp, cls=QuarryFileEncoder)
예제 #11
0
    def __init__(self):
        ''' Initialize the instance.

        '''
        InteractivePlugin.__init__(self,
                              name = 'create event',
                              category = 'edit',
                              tags = ['create', 'event']
                             )

        # Create the logging logger instance.
        logger_prefix = psysmon.logConfig['package_prefix']
        loggerName = logger_prefix + "." + __name__ + "." + self.__class__.__name__
        self.logger = logging.getLogger(loggerName)

        self.icons['active'] = icons.event_new_16
        self.cursor = wx.CURSOR_CROSS

        # The event catalog library used to manage the catalogs.
        self.library = event_core.Library('event library')

        # The name of the selected catalog.
        self.selected_catalog_name = None

        # The plot colors used by the plugin.
        self.colors = {}
        self.colors['event_vspan'] = '0.9'

        # Animation stuff.
        self.bg = {}
        self.startTime = None
        self.endTime = None


        # Add the pages to the preferences manager.
        options_page = self.pref_manager.add_page('tool options')
        event_group = options_page.add_group('event')
        catalog_group = options_page.add_group('catalog')

        item = psy_pm.SingleChoicePrefItem(name = 'default_event_type',
                                           label = 'defaul event type',
                                           limit = [],
                                           value = None,
                                           tool_tip = 'The default event type when creating a new event.',
                                           hooks = {'on_value_change': self.on_select_default_event_type})
        event_group.add_item(item)

        # Add the plugin preferences.
        item = psy_pm.SingleChoicePrefItem(name = 'event_catalog',
                                           label = 'event catalog',
                                           value = '',
                                           limit = [],
                                           tool_tip = 'Select an event catalog to work on.',
                                           hooks = {'on_value_change': self.on_select_catalog})
        catalog_group.add_item(item)

        item = psy_pm.ActionItem(name = 'create_new_catalog',
                                 label = 'create new catalog',
                                 mode = 'button',
                                 action = self.on_create_new_catalog)
        catalog_group.add_item(item)
예제 #12
0
    def export(self, start_time, end_time, output_interval,
               event_catalog, event_ids = None,
               event_types = None, event_tags = None,
               pick_catalog_name = None):
        ''' Export the picks of events.
        '''
        self.logger.info("Exporting event picks for timespan timespan %s to %s.",
                         start_time.isoformat(),
                         end_time.isoformat())

        if event_tags is None:
            event_tags = []

        if event_types is None:
            event_types = []


        interval_start = self.compute_intervals(start_time = start_time,
                                                end_time = end_time,
                                                interval = output_interval)
        event_lib = event_core.Library('events')
        event_lib.load_catalog_from_db(self.project, name = event_catalog)
        catalog = event_lib.catalogs[event_catalog]

        pick_lib = pick_core.Library('picks')
        pick_lib.load_catalog_from_db(self.project,
                                      name = pick_catalog_name)
        pick_catalog = pick_lib.catalogs[pick_catalog_name]

        for k, cur_start_time in enumerate(interval_start[:-1]):
            cur_end_time = interval_start[k + 1]
            self.logger.info("Processing interval timespan %s to %s.",
                             cur_start_time.isoformat(),
                             cur_end_time.isoformat())
            catalog.clear_events()

            if event_ids is None:
                # Load the events for the given time span from the database.
                # TODO: Remove the hardcoded min_event_length value and create
                # user-selectable filter fields.
                catalog.load_events(project = self.project,
                                    start_time = cur_start_time,
                                    end_time = cur_end_time,
                                    min_event_length = 0.1,
                                    event_tags = event_tags)
            else:
                # Load the events with the given ids from the database. Ignore the
                # time-span.
                catalog.load_events(event_id = event_ids)

            # Abort the execution if no events are available for the time span.
            if not catalog.events:
                if event_ids is None:
                    self.logger.info('No events found for the timespan %s to %s.', cur_start_time.isoformat(), cur_end_time.isoformat())
                else:
                    self.logger.info('No events found for the specified event IDs: %s.', event_ids)
                continue

            res_columns = ['event_start_time', 'event_end_time', 'network',
                           'station', 'location', 'channel', 'pick_label',
                           'time']

            # Loop through the events.
            n_events = len(catalog.events)
            sorted_events = sorted(catalog.events,
                                   key = lambda x: x.start_time)
            for k, cur_event in enumerate(sorted_events):
                self.logger.info("Processing event %d (%d/%d).",
                                 cur_event.db_id,
                                 k + 1,
                                 n_events)
                
                pick_catalog.clear_picks()

                # Create the result.
                cur_res = result.TableResult(name = 'event_picks',
                                             key_name = 'id',
                                             event_id = cur_event.db_id,
                                             start_time = cur_event.start_time,
                                             end_time = cur_event.end_time,
                                             origin_name = self.parent_name,
                                             origin_resource = self.parent_rid,
                                             column_names = res_columns)

                # TODO: Maybe it is more efficient to load all picks of the
                # processing timespan and then request the picks using the
                # start- and endtimes of the events in get_pick().
                pick_catalog.load_picks(project = self.project,
                                        start_time = cur_event.start_time,
                                        end_time = cur_event.end_time)

                event_picks = pick_catalog.get_pick(start_time = cur_event.start_time,
                                                    end_time = cur_event.end_time)

                self.logger.debug("event_picks: %s", event_picks)

                for cur_pick in event_picks:
                    cur_res.add_row(key = cur_event.db_id,
                                    event_start_time = cur_event.start_time.isoformat(),
                                    event_end_time = cur_event.end_time.isoformat(),
                                    network = cur_pick.channel.parent_station.network,
                                    station = cur_pick.channel.parent_station.name,
                                    location = cur_pick.channel.parent_station.location,
                                    channel = cur_pick.channel.name,
                                    pick_label = cur_pick.label,
                                    time = cur_pick.time)

                cur_res.base_output_dir = self.output_dir
                
                if len(event_picks) > 0:
                    cur_res.save()
예제 #13
0
    def export(self, start_time, end_time, output_interval,
               event_catalog, event_ids = None,
               event_types = None, event_tags = None):
        ''' Export the events of a catalog.
        '''
        self.logger.info("Exporting events for timespan timespan %s to %s.",
                         start_time.isoformat(),
                         end_time.isoformat())

        if event_tags is None:
            event_tags = []

        if event_types is None:
            event_types = []


        interval_start = self.compute_intervals(start_time = start_time,
                                                end_time = end_time,
                                                interval = output_interval)
        event_lib = event_core.Library('events')
        event_lib.load_catalog_from_db(self.project, name = event_catalog)
        catalog = event_lib.catalogs[event_catalog]

        available_event_types = self.load_event_types()

        for k, cur_start_time in enumerate(interval_start[:-1]):
            cur_end_time = interval_start[k + 1]
            self.logger.info("Processing interval timespan %s to %s.",
                             cur_start_time.isoformat(),
                             cur_end_time.isoformat())
            catalog.clear_events()

            if event_ids is None:
                # Load the events for the given time span from the database.
                # TODO: Remove the hardcoded min_event_length value and create
                # user-selectable filter fields.
                catalog.load_events(project = self.project,
                                    start_time = cur_start_time,
                                    end_time = cur_end_time,
                                    min_event_length = 0.1,
                                    event_tags = event_tags)
            else:
                # Load the events with the given ids from the database. Ignore the
                # time-span.
                catalog.load_events(project = self.project,
                                    event_id = event_ids)

            # Abort the execution if no events are available for the time span.
            if not catalog.events:
                if event_ids is None:
                    self.logger.info('No events found for the timespan %s to %s.', cur_start_time.isoformat(), cur_end_time.isoformat())
                else:
                    self.logger.info('No events found for the specified event IDs: %s.', event_ids)
                continue

            res_columns = ['event_start_time', 'event_end_time', 'n_stations',
                           'detection_scnl', 'detection_start',
                           'detection_end', 'catalog_name', 'event_type_id',
                           'event_type']
            # Create the result.
            cur_res = result.TableResult(name = 'event',
                                         key_name = 'id',
                                         start_time = cur_start_time,
                                         end_time = cur_end_time,
                                         origin_name = self.parent_name,
                                         origin_resource = self.parent_rid,
                                         column_names = res_columns)

            # Loop through the events.
            n_events = len(catalog.events)
            for k, cur_event in enumerate(sorted(catalog.events, key = lambda x: x.start_time)):
                self.logger.info("Processing event %d (%d/%d).", cur_event.db_id, k, n_events)

                # Assign the channel instance to the detections.
                cur_event.assign_channel_to_detections(self.project.geometry_inventory)

                # TODO: Load the event type from the database when loading the event.
                # Get the related event_type name.
                cur_event_type = None
                if cur_event.event_type is not None:
                    cur_event_type = [x.name for x in available_event_types if x.id == cur_event.event_type]
                    if len(cur_event_type) == 1:
                        cur_event_type = cur_event_type[0]
                    else:
                        cur_event_type = None
                

                detection_scnl = [str(x.channel.scnl_string) for x in cur_event.detections]
                detection_start = [str(x.start_time.timestamp) for x in cur_event.detections]
                detection_end = [str(x.end_time.timestamp) for x in cur_event.detections]
                cur_res.add_row(key = cur_event.db_id,
                                event_start_time = cur_event.start_time.isoformat(),
                                event_end_time = cur_event.end_time.isoformat(),
                                n_stations = len(cur_event.detections),
                                detection_scnl = ','.join(detection_scnl),
                                detection_start = ','.join(detection_start),
                                detection_end = ','.join(detection_end),
                                catalog_name = catalog.name,
                                event_type_id = cur_event.event_type,
                                event_type = cur_event_type)

            cur_res.base_output_dir = self.output_dir
            cur_res.save()