Exemple #1
0
def qc_fields():
    '''
       Check each column for regular expression, type, range.
    '''
    n = 0
    ret = []
    for t in TABLE:
        n += 1
        for i in range(len(t)):
            key = NAMES[i]
            if key == 'Ignore':
                continue
            v = t[i]
            if 'type' in COLS[key]:
                ttype = COLS[key]['type']
            else:
                ttype = None
            if 'help' in COLS[key]:
                hhelp = COLS[key]['help']
            else:
                hhelp = None
            if 're' in COLS[key]:
                rre = COLS[key]['re']
            else:
                rre = None
            if 'range' in COLS[key]:
                rrange = COLS[key]['range']
            else:
                rrange = None
            # Check regular expression
            if not match_re(v, rre):
                ret.append(
                    "{0}: Value of column {1} {2} does not match re. Help:"
                    " {3}".format(n, key, v, hhelp))
            # Check type
            if not match_type(v, ttype):
                ret.append(
                    "{0}: Value of column {1} {2} does not match type. Type:"
                    " {3}".format(n, key, v, ttype))
            # Check range
            if not match_range(v, rrange):
                ret.append(
                    "{0}: Value of column {1} {2} does not match expected"
                    " range. Range: {3}".format(n, key, v, rrange))
            # Check if ascii time
            if timeRE.match(key):
                try:
                    timedoy.fdsn2epoch(v, fepoch=True)
                except timedoy.TimeError:
                    try:
                        timedoy.passcal2epoch(v, fepoch=True)
                    except timedoy.TimeError:
                        ret.append(
                            "{0}: Value of column {1} {2} does not match"
                            " expected time string".format(n, key, v))

    return ret
Exemple #2
0
    def qc_time(t1, t2):
        '''
           Measure difference between deployment and pickup times.
        '''
        try:
            e1 = timedoy.fdsn2epoch(t1)
        except timedoy.TimeError:
            e1 = timedoy.passcal2epoch(t1)

        try:
            e2 = timedoy.fdsn2epoch(t2)
        except timedoy.TimeError:
            e2 = timedoy.passcal2epoch(t2)

        return e2 - e1
Exemple #3
0
def get_times(key, value):
    '''
       Create time entries for Array_t_xxx or Event_t[_xxx]
    '''
    try:
        fepoch = timedoy.fdsn2epoch(value, fepoch=True)
    except timedoy.TimeError:
        try:
            fepoch = timedoy.passcal2epoch(value, fepoch=True)
        except timedoy.TimeError:
            # This SHOULD never happen
            pre = key.split('/')[0]
            LOGGER.error("Bad time value for {0} {1}.".format(key, value))
            line = "\t{0}/ascii_s = {1}\n".format(pre, time.ctime(int(0)))
            line += "\t{0}/epoch_l = {1}\n".format(pre, int(0))
            line += "\t{0}/micro_seconds_i = {1}\n".format(
                pre, int(0. * 1000000.))
            line += "\t{0}/type_s = {1}\n".format(pre, 'BOTH')
            return line

    f, i = math.modf(fepoch)
    pre = key.split('/')[0]
    line = "\t{0}/ascii_s = {1}\n".format(pre, time.ctime(int(i)))
    line += "\t{0}/epoch_l = {1}\n".format(pre, int(i))
    line += "\t{0}/micro_seconds_i = {1}\n".format(pre, int(f * 1000000.))
    line += "\t{0}/type_s = {1}\n".format(pre, 'BOTH')

    return line
Exemple #4
0
    def parse_inventory(self, inventory):
        """
        :type inventory: class: obspy.core.inventory.inventory.Inventory
        :param inventory:
        :return: list of dictionaries containing array data to write to PH5
        """
        array_list = []
        for network in inventory:
            for station in network:

                array_station = {}
                array_station['seed_station_name_s'] = station.code.encode(
                    'ascii', 'ignore')
                array_station['id_s'] = station.code.encode('ascii', 'ignore')
                LOGGER.info('*****************'.format(station.code))
                LOGGER.info('Found station {0}'.format(station.code))
                for channel in station:
                    LOGGER.info('Found channel {0}'.format(channel.code))
                    array_channel = {}
                    if channel.start_date:
                        array_channel['deploy_time/ascii_s'] = (
                            channel.start_date.isoformat())
                        time = timedoy.fdsn2epoch(
                            channel.start_date.isoformat(), fepoch=True)
                        microsecond = (time % 1) * 1000000
                        array_channel['deploy_time/epoch_l'] = (int(time))
                        array_channel['deploy_time/micro_seconds_i'] = (
                            microsecond)
                    else:
                        array_channel['deploy_time/ascii_s'] = ""
                        array_channel['deploy_time/epoch_l'] = ""
                        array_channel['deploy_time/micro_seconds_i'] = ""
                    array_channel['deploy_time/type_s'] = "BOTH"

                    if channel.end_date:
                        array_channel['pickup_time/ascii_s'] = (
                            channel.end_date.isoformat())
                        time = timedoy.fdsn2epoch(channel.end_date.isoformat(),
                                                  fepoch=True)
                        microsecond = (time % 1) * 1000000
                        array_channel['pickup_time/epoch_l'] = (int(time))
                        array_channel['pickup_time/micro_seconds_i'] = (
                            microsecond)
                    else:
                        array_channel['pickup_time/ascii_s'] = ""
                        array_channel['pickup_time/epoch_l'] = ""
                        array_channel['pickup_time/micro_seconds_i'] = ""
                    array_channel['pickup_time/type_s'] = "BOTH"

                    channel_list = list(channel.code)
                    array_channel['seed_band_code_s'] = (
                        channel_list[0].encode('ascii', 'ignore'))
                    array_channel['seed_instrument_code_s'] = (
                        channel_list[1].encode('ascii', 'ignore'))
                    array_channel['seed_orientation_code_s'] = (
                        channel_list[2].encode('ascii', 'ignore'))

                    if array_channel['seed_orientation_code_s'] in ({
                            '3', 'Z', 'z'
                    }):
                        array_channel['channel_number_i'] = 3
                    elif array_channel['seed_orientation_code_s'] in ({
                            '2', 'E', 'e'
                    }):
                        array_channel['channel_number_i'] = 2
                    elif array_channel['seed_orientation_code_s'] in ({
                            '1', 'N', 'n'
                    }):
                        array_channel['channel_number_i'] = 1
                    elif array_channel['seed_orientation_code_s'].isdigit():
                        array_channel['channel_number_i'] = array_channel
                        ['seed_orientation_code_s']
                    elif channel.code == 'LOG':
                        array_channel['channel_number_i'] = -2
                    else:
                        array_channel['channel_number_i'] = -5

                    array_channel['seed_location_code_s'] = (
                        channel.location_code)

                    if channel.sample_rate >= 1 or channel.sample_rate == 0:
                        array_channel['sample_rate_i'] = channel.sample_rate
                        array_channel['sample_rate_multiplier_i'] = 1
                    else:
                        array_channel['sample_rate_i'] = 1
                        array_channel['sample_rate_multiplier_i'] = (
                            1 / channel.sample_rate)
                    if channel.longitude != 0.0:
                        array_channel['location/X/value_d'] = channel.longitude
                        array_channel['location/X/units_s'] = "degrees"
                        array_channel['location/Y/value_d'] = channel.latitude
                        array_channel['location/Y/units_s'] = "degrees"
                        array_channel['location/Z/value_d'] = channel.elevation
                        array_channel['location/Z/units_s'] = "m"
                    else:
                        array_channel['location/X/value_d'] = station.longitude
                        array_channel['location/X/units_s'] = "degrees"
                        array_channel['location/Y/value_d'] = station.latitude
                        array_channel['location/Y/units_s'] = "degrees"
                        array_channel['location/Z/value_d'] = station.elevation
                        array_channel['location/Z/units_s'] = "m"
                    if channel.sensor:
                        array_channel['sensor/model_s'] = str(
                            channel.sensor.type)
                        array_channel['sensor/manufacturer_s'] = str(
                            (channel.sensor.manufacturer))
                        array_channel['sensor/serial_number_s'] = str(
                            (channel.sensor.serial_number))
                        array_channel['sensor/notes_s'] = str(
                            (channel.sensor.description))
                    else:
                        array_channel['sensor/model_s'] = ""
                        array_channel['sensor/manufacturer_s'] = ""
                        array_channel['sensor/serial_number_s'] = ""
                        array_channel['sensor/notes_s'] = ""

                    if channel.data_logger:
                        array_channel['das/model_s'] = str(
                            channel.data_logger.type)
                        array_channel['das/manufacturer_s'] = str(
                            (channel.data_logger.manufacturer))
                        array_channel['das/serial_number_s'] = str(
                            (channel.data_logger.serial_number))
                        if not channel.data_logger.serial_number:
                            LOGGER.error(
                                "Datalogger serial required for Station {0} "
                                "before data "
                                "can be loaded".format(
                                    array_station['seed_station_name_s']))
                        array_channel['das/notes_s'] = str(
                            (channel.data_logger.description))
                    else:
                        array_channel['das/model_s'] = ""
                        array_channel['das/manufacturer_s'] = ""
                        array_channel['das/serial_number_s'] = ""
                        array_channel['das/notes_s'] = ""
                        LOGGER.error(
                            "Datalogger serial required for Station {0} "
                            "Channel {1} before data can be loaded".format(
                                array_station['seed_station_name_s'],
                                channel.code))
                    if hasattr(channel, 'response'):
                        if hasattr(channel.response, 'response_stages'):
                            if len(channel.response.response_stages) > 0:

                                LOGGER.info('Response found for station {0} '
                                            'channel {1}'.format(
                                                station.code, channel.code))
                                array_channel['response_table_n_i'] = (
                                    self.load_response(array_channel, channel,
                                                       station.code))
                            else:
                                array_channel['response_table_n_i'] = -1
                                response_t = {}
                                response_t['n_i'] = -1
                                self.response_t.append(response_t)
                    else:
                        array_channel['response_table_n_i'] = -1
                        response_t = {}
                        response_t['n_i'] = -1
                        self.response_t.append(response_t)

                    # Select receiver table n_i
                    if channel.azimuth == 0.0 and channel.dip == 90.0:
                        array_channel['receiver_table_n_i'] = 0

                    elif channel.azimuth == 0.0 and channel.dip == 0:
                        array_channel['receiver_table_n_i'] = 1

                    elif channel.azimuth == 90.0 and channel.dip == 0:
                        array_channel['receiver_table_n_i'] = 2

                    elif channel.azimuth == 0.0 and channel.dip == -90.0:
                        array_channel['receiver_table_n_i'] = 3
                    elif channel.code == 'LOG':
                        # new receiver table entry
                        array_channel['receiver_table_n_i'] = 1
                    else:
                        array_channel['receiver_table_n_i'] = -1

                    array_dict = array_station.copy()
                    array_dict.update(array_channel)

                    array_list.append(array_dict)
                    LOGGER.info("Loaded channel {0}".format(channel.code))
                LOGGER.info("Loaded Station {0}".format(station.code))
                LOGGER.info("******************\n".format(station.code))

        return array_list
Exemple #5
0
    def toph5(self, file_tuple):
        """
        Takes a tuple (file_name or obspy stream, type)
        and loads it into ph5_object
        :type tuple
        :param file_tuple containing
        file_handle or obspy stream and file type as str
        :return:
        """
        index_t = list()
        time_corrected = False
        correction = False
        current_mini = None
        in_type = None
        das_station_map = self.get_das_station_map()
        existing_minis = self.get_minis(self.ph5_path)

        if not das_station_map:
            err = "Array metadata must exist before loading data"
            LOGGER.error(err)
            return "stop", index_t

        # gets mapping of whats dases each minifile contains
        minis = self.mini_map(existing_minis)

        # check if we are opening a file or have an obspy stream
        if isinstance(file_tuple[0], str):
            st = reader(file_tuple[0], format=file_tuple[1])
            in_type = "file"
            if file_tuple[1] == 'MSEED':
                try:
                    flags = get_flags(file_tuple[0])
                    if flags['activity_flags_counts'][
                            'time_correction_applied'] > 0:
                        LOGGER.info("Timing correction has been applied")
                        time_corrected = True
                    if flags["timing_correction"] != 0.0:
                        LOGGER.info('Timing Correction found')
                        correction = True

                except BaseException:
                    pass
        # is this an obspy stream?
        elif isinstance(file_tuple[0], Stream):
            st = file_tuple[0]
            in_type = 'stream'
        # is this an obspy trace?
        elif isinstance(file_tuple[0], Trace):
            st = Stream(traces=[file_tuple[0]])
            in_type = 'trace'

        # Loop through data and load it in to PH5
        LOGGER.info('Processing {0} traces in stream for {1}'.format(
            len(st), file_tuple[0]))
        count = 1
        for trace in st:
            if self.verbose:
                LOGGER.info('Processing trace {0} in {1}'.format(
                    trace.stats.channel, trace.stats.station))
            if not trace.stats.channel == 'LOG':
                if not trace.data.any():
                    LOGGER.info("No data for trace {0}...skipping".format(
                        trace.stats.channel))
                    continue
            if not existing_minis:
                current_mini = self.first_mini
            else:
                current_mini = None
                for mini in minis:
                    for entry in das_station_map:
                        if (entry['serial'] in mini[1]
                                and entry['station'] == trace.stats.station):
                            current_mini = mini[0]
                    if not current_mini:
                        largest = 0
                        for x in minis:
                            if x[0] >= largest:
                                largest = x[0]
                        if (self.get_size_mini(largest) < self.mini_size_max):
                            current_mini = largest
                        else:
                            current_mini = largest + 1
            # iterate through das_station_map
            for entry in das_station_map:
                time_t = {}
                das = {}
                index_t_entry = {}
                # only load data if it matches
                if trace.stats.station == entry['station']:

                    # open mini file
                    mini_handle, mini_name = self.openmini(current_mini)
                    # get node reference or create new node
                    d = mini_handle.ph5_g_receivers.getdas_g(entry['serial'])
                    if not d:
                        d, t, r, ti = mini_handle.ph5_g_receivers.newdas(
                            entry['serial'])

                    # start populating das table and data arrays
                    das['time/ascii_s'] = trace.stats.starttime
                    index_t_entry['start_time/ascii_s'] = (
                        trace.stats.starttime.isoformat())
                    time = timedoy.fdsn2epoch(
                        trace.stats.starttime.isoformat(), fepoch=True)
                    microsecond = (time % 1) * 1000000
                    das['time/epoch_l'] = (int(time))
                    das['time/micro_seconds_i'] = microsecond
                    das['time/type_s'] = 'BOTH'
                    index_t_entry['start_time/epoch_l'] = (int(time))
                    index_t_entry['start_time/micro_seconds_i'] = (microsecond)
                    index_t_entry['start_time/type_s'] = 'BOTH'
                    time = timedoy.fdsn2epoch(trace.stats.endtime.isoformat(),
                                              fepoch=True)
                    microsecond = (time % 1) * 1000000
                    index_t_entry['end_time/ascii_s'] = (
                        trace.stats.endtime.isoformat())
                    index_t_entry['end_time/epoch_l'] = (int(time))
                    index_t_entry['end_time/micro_seconds_i'] = (microsecond)
                    index_t_entry['end_time/type_s'] = 'BOTH'
                    now = UTCDateTime.now()
                    index_t_entry['time_stamp/ascii_s'] = (now.isoformat())
                    time = timedoy.fdsn2epoch(now.isoformat(), fepoch=True)
                    microsecond = (time % 1) * 1000000
                    index_t_entry['time_stamp/epoch_l'] = (int(time))
                    index_t_entry['time_stamp/micro_seconds_i'] = (
                        int(microsecond))
                    index_t_entry['time_stamp/type_s'] = 'BOTH'

                    if correction or time_corrected:
                        time_t['das/serial_number_s'] = entry['serial']

                        if in_type == 'file':
                            time_t['description_s'] = file_tuple[0]
                        else:
                            time_t['description_s'] = (
                                str(trace.stats.station) +
                                str(trace.stats.channel))
                        # SEED time correction
                        # units are 0.0001 seconds per unit
                        time_t['offset_d'] = \
                            flags["timing_correction"] * 0.0001
                        time_t['start_time/epoch_l'] =\
                            index_t_entry['start_time/epoch_l']
                        time_t['start_time/micro_seconds_i'] =\
                            index_t_entry['start_time/micro_seconds_i']
                        time_t['end_time/epoch_l'] =\
                            index_t_entry['end_time/epoch_l']
                        time_t['end_time/micro_seconds_i'] =\
                            index_t_entry['end_time/micro_seconds_i']
                        length = trace.stats.npts * trace.stats.delta
                        if length != 0:
                            time_t['slope_d'] = time_t['offset_d'] / length
                        else:
                            time_t['slope_d'] = 0

                    if time_corrected:
                        time_t['corrected_i'] = 1

                    if time_t:
                        self.time_t.append(time_t)

                    if (trace.stats.sampling_rate >= 1
                            or trace.stats.sampling_rate == 0):
                        das['sample_rate_i'] = trace.stats.sampling_rate
                        das['sample_rate_multiplier_i'] = 1
                    else:
                        das['sample_rate_i'] = 0
                        das['sample_rate_multiplier_i'] = (
                            1 / trace.stats.sampling_rate)
                    channel_list = list(trace.stats.channel)
                    if channel_list[2] in ({'3', 'Z', 'z'}):
                        das['channel_number_i'] = 3
                    elif channel_list[2] in ({'2', 'E', 'e'}):
                        das['channel_number_i'] = 2
                    elif channel_list[2] in ({'1', 'N', 'n'}):
                        das['channel_number_i'] = 1
                    elif channel_list[2].isdigit():
                        das['channel_number_i'] = channel_list[2]
                    elif trace.stats.channel == 'LOG':
                        das['channel_number_i'] = -2
                        das['sample_rate_i'] = 0
                        das['sample_rate_multiplier_i'] = 1
                    else:
                        das['channel_number_i'] = -5
                    if in_type == 'file':
                        das['raw_file_name_s'] = file_tuple[0]
                    else:
                        das['raw_file_name_s'] = 'obspy_stream'
                    if trace.stats.channel == 'LOG':
                        das['sample_count_i'] = 0
                    else:
                        das['sample_count_i'] = trace.stats.npts

                    # figure out receiver and response n_i
                    for array_entry in self.arrays:
                        if (array_entry['sample_rate_i']
                                == trace.stats.sampling_rate
                                and array_entry['channel_number_i']
                                == das['channel_number_i'] and
                                array_entry['id_s'] == trace.stats.station):
                            das['receiver_table_n_i'] =\
                                array_entry['receiver_table_n_i']
                            das['response_table_n_i'] =\
                                array_entry['response_table_n_i']

                    # Make sure we aren't overwriting a data array
                    while True:
                        next_ = str(count).zfill(5)
                        das['array_name_data_a'] = "Data_a_{0}".format(next_)
                        node = mini_handle.ph5_g_receivers.find_trace_ref(
                            das['array_name_data_a'])
                        if not node:
                            break
                        count = count + 1
                        continue

                    mini_handle.ph5_g_receivers.setcurrent(d)
                    data = array(trace.data)
                    if trace.stats.channel == 'LOG':
                        mini_handle.ph5_g_receivers.newarray(
                            das['array_name_data_a'],
                            data,
                            dtype='|S1',
                            description=None)
                    else:
                        data_type = data[0].__class__.__name__
                        mini_handle.ph5_g_receivers.newarray(
                            das['array_name_data_a'],
                            data,
                            dtype=data_type,
                            description=None)
                    mini_handle.ph5_g_receivers.populateDas_t(das)

                    index_t_entry['external_file_name_s'] = "./{}".format(
                        mini_name)
                    das_path = "/Experiment_g/Receivers_g/" \
                               "Das_g_{0}".format(entry['serial'])
                    index_t_entry['hdf5_path_s'] = das_path
                    index_t_entry['serial_number_s'] = entry['serial']

                    index_t.append(index_t_entry)
                    # Don't forget to close minifile
                    mini_handle.ph5close()
        LOGGER.info('Finished processing {0}'.format(file_tuple[0]))

        # last thing is to return the index table so far.
        # index_t will be populated in main() after all
        # files are loaded
        return "done", index_t