예제 #1
0
def getParam(args, key, msgLib, value=None):
    """get a run argument for the given key"""
    if key in args.keys():
        msgLib.info(': '.join([key, args[key]]))
        return args[key]
    elif value is not None:
        return value
    else:
        msgLib.error("missing parameter " + key, 1)
        usage()
        sys.exit()
예제 #2
0
def get_param(these_args, this_key, value=None):
    """Get a run argument for a given key.
   """
    if this_key in these_args.keys():
        msgLib.info('{}: {}'.format(this_key, these_args[this_key]))
        return these_args[this_key]
    elif value is not None:
        return value
    else:
        msgLib.error('Missing parameter {}'.format(this_key), 1)
        usage()
        sys.exit()
예제 #3
0
def make_path(directory):
    """Checks a directory for existance and if it does not exist, create it.
    If needed, create all parent directories."""

    # The path must be an absolute path.
    if not os.path.isabs(directory):
        msg_lib.error(f'path must be an absolute path {directory}', 2)
        return None

    # Create the directories.
    path = os.path.abspath(directory)
    if not os.path.exists(path):
        os.makedirs(path)
    return path
예제 #4
0
def mkdir(baseDir, dir):
    """ make a directory"""
    try:
        thisDir = os.path.join(baseDir, dir)
        if not os.path.exists(thisDir):
            os.makedirs(thisDir)
        return thisDir
    except:
        return msgLib.error("failed to create directory " + thisDir, None)
예제 #5
0
def get_param(arg_dict, key, default_value, usage):
    """ Get a run argument for the given key.
  """
    if key in arg_dict.keys():
        return arg_dict[key]
    elif default_value is not None:
        return default_value
    else:
        usage()
        code = msg_lib.error(f'missing parameter {key}', 1)
        sys.exit(code)
예제 #6
0
def mkdir(target_directory):
    """ Make a directory if it does not exist."""
    directory = None
    try:
        directory = target_directory
        if not os.path.exists(directory):
            os.makedirs(directory)
        return directory
    except Exception as _er:
        code = msg_lib.error(f'Failed to create directory {directory}\n{_er}',
                             3)
        return None
예제 #7
0
def addMonths(thisDate=None, add=1):
    """add number of months (add) to a given/current date"""
    import datetime
    if thisDate is None:
        today = datetime.date.today()
        month = today.month
        year = today.year
    else:
        values = thisDate.split('-')
        if len(values) < 3:
            msgLib.error("bad date " + thisDate)
            usage()
            sys.exit()
        year = int(values[0])
        month = int(values[1])
    month = month - 1 + add
    year = year + (int(add) / 12)
    month = month % 12 + 1
    if month > 12:
        month -= 12
        year += 1
    return "%4d-%02d-01" % (year, month)
예제 #8
0
def get_args(arg_list, usage):
    """Get the run arguments.
  """
    args_dict = dict()

    for _i, arg_value in enumerate(arg_list):

        # Skip the script's name.
        if _i <= 0:
            continue
        if arg_value.strip() == 'usage':
            usage()
            sys.exit()
        try:
            key, value = arg_value.split('=')
            args_dict[key] = value
        except Exception as _ex:
            usage()
            code = msg_lib.error(f'Bad {arg_value} parameter!\n{_ex}', 3)
            sys.exit(code)
    return args_dict
예제 #9
0
def get_response_inventory(resp_dir, debug=False):
    """"
    get_response_inventory
        build a list of response inventories under a given met directory

        2016-10-27 Manoch: craeted
    """

    t0 = time()
    accept_count = 0
    reject_count = 0

    msg_lib.info('reading response files')
    file_list = [f for f in listdir(resp_dir) if isfile(join(resp_dir, f))]
    inventory = list()
    for resp_file in file_list:
        try:
            validation = validate_StationXML(os.path.join(resp_dir, resp_file))
            if not validation[0]:
                msg_lib.error(
                    f'Skipped invalid response file {os.path.join(resp_dir, resp_file)}  {validation[1]}',
                    4)
            inv = read_inventory(os.path.join(resp_dir, resp_file))
            accept_count += 1
            inventory.append(inv)
        except TypeError as e:
            if debug:
                msg_lib.error(
                    f'Skipped invalid response file {os.path.join(resp_dir, resp_file)} {e.message}',
                    4)
            reject_count += 1
        except Exception as ex:
            if debug:
                msg_lib.error(
                    f'Skipped, could not read {os.path.join(resp_dir, resp_file)} {ex}',
                    4)
            reject_count += 1
    t1 = utils_lib.time_it("Response Inventory", t0)
    msg_lib.info(
        f'response inventory:{accept_count} valid and {reject_count} rejected')
    return inventory
예제 #10
0
#
libraryPath = os.path.join(os.path.dirname(__file__), '..', 'lib')
sys.path.append(libraryPath)

import msgLib as msgLib
import fileLib as fileLib
import staLib as staLib

args = getArgs(sys.argv)

#
# see if user has provided the run arguments
#

if len(args) < 8:
    msgLib.error("missing argument(s)", 1)
    usage()
    sys.exit()

#
# import the user-provided parameter file
#
# os.path.dirname(__file__) gives the current directory
#

paramFile = getParam(args, 'param', msgLib, None)
import importlib

paramPath = os.path.join(os.path.dirname(__file__), '..', 'param')

#
예제 #11
0
        msgLib.error('Missing parameter {}'.format(this_key), 1)
        usage()
        sys.exit()


# Get user-provided arguments and script libraries.
args = get_args(sys.argv)
param_file_name = '{}_param'.format(script.replace('.py', ''))

print('\n\n\n')
msgLib.info(', '.join([script, param_file_name, version]))

try:
    param = importlib.import_module(param_file_name)
except Exception as e:
    msgLib.error('failed to load: {}\n{}'.format(param_file_name, e), 1)
    sys.exit()

verbose = int(get_param(args, 'verbose', value=param.verbose))
do_plot = int(get_param(args, 'plot', value=param.plot))
plot_nnm = int(get_param(args, 'plotnnm', value=param.plotnnm))
if verbose:
    msgLib.info('Param Path: {}'.format(paramPath))
    msgLib.info('loaded: {}'.format(param_file_name))

# If plot is not requested, turn the display requirement.
if do_plot <= 0:
    msgLib.info('Plot OFF!')
    matplotlib.use('agg')
else:
    from obspy.imaging.cm import pqlx
예제 #12
0
def param(params, var):
    """Get a variable from the parameter file by name"""
    if var in dir(params):
        return params
    code = msg_lib.error(f'variable {var} is not in the parameter file', 3)
    sys.exit(code)
예제 #13
0
#
libraryPath = os.path.join(os.path.dirname(__file__), '..', 'lib')
sys.path.append(libraryPath)

import msgLib as msgLib
import fileLib as fileLib
import staLib as staLib

args = getArgs(sys.argv)

#
# see if user has provided the run arguments
#

if len(args) < 9:
    msgLib.error("missing argument(s)", 1)
    usage()
    sys.exit()

script = sys.argv[0]

#
# import the user-provided parameter file
#
# os.path.dirname(__file__) gives the current directory
#

paramFile = getParam(args, 'param', msgLib, None)
import importlib

#
예제 #14
0
################################################################################################

#
# set parameters
#
args = getArgs(sys.argv)
paramFileName = script.replace('.py', '') + '_param'
print("\n\n\n")
msgLib.info(', '.join([script, paramFileName, version]))
msgLib.info('Param Path: ' + paramPath)

try:
    param = importlib.import_module(paramFileName)
    msgLib.info("loaded: " + paramFileName)
except:
    msgLib.error("failed to load: " + paramFileName, 1)
    sys.exit()

channelList = getParam(args, 'chan', msgLib, param.chan)
network = getParam(args, 'net', msgLib)
if network is None:
    msgLib.error('network not defined!', 1)
    sys.exit()
station = getParam(args, 'sta', msgLib)
if station is None:
    msgLib.error('station not defined!', 1)
    sys.exit()
location = getParam(args, 'loc', msgLib)
if location == 'DASH':
    location = '--'
if location is None:
예제 #15
0
version = 'V.2.0.0'
script = sys.argv[0]
script = os.path.basename(script)

# Initial mode settings.
timing = False
do_plot = False
verbose = False
mode_list = ['0', 'plot', 'time', 'verbose']
default_param_file = 'binPsdDay'
if os.path.isfile(os.path.join(param_path, f'{default_param_file}.py')):
    param = importlib.import_module(default_param_file)
else:
    code = msg_lib.error(
        f'could not load the default parameter file  [param/{default_param_file}.py]',
        2)
    sys.exit(code)


def usage():
    """ Usage message.
   """
    print(
        f'\n\n{script} version {version}\n\n'
        f'A Python 3 script to bin PSDs to daily files for a given channel and bounding parameters. '
        f'\n\nUsage:\n\t{script} to display the usage message (this message)'
        f'\n\t  OR'
        f'\n\t{script} param=FileName net=network sta=station loc=location chan=channel(s)'
        f' start=YYYY-MM-DDTHH:MM:SS end=YYYY-MM-DDTHH:MM:SS xtype=[period|frequency] verbose=[0|1]\n'
        f'\n\twhere:'
예제 #16
0
sys.path.append(libraryPath)

import msgLib  as msgLib
import fileLib as fileLib
import staLib  as staLib
import sfLib as SFL
import tsLib as TSL

args = getArgs(sys.argv)

#
# see if user has provided the run arguments
#

if len(args) < 8:
   msgLib.error("missing argument(s)",1)
   usage()
   sys.exit()

script = sys.argv[0]

#
# import the user-provided parameter file
#
# os.path.dirname(__file__) gives the current directory
#

paramFile      =  getParam(args,'param',msgLib,None)
import importlib
paramPath      = os.path.join(os.path.dirname(__file__), '..', 'param')
예제 #17
0
def get_channel_waveform_files(network,
                               station,
                               location,
                               channel,
                               start_time,
                               end_time,
                               client,
                               file_tag,
                               resp_dir=None,
                               inventory=None):
    """
    get_channel_waveform_files gets data from files and
    the response form the FDSN client. for the requested
    network/station/location/channel and time

    the output is the corresponding data stream
    file_tag should be in such a way to guide the function in
    selecting waveform files like

    {this Path}/*.SAC

    channel may have the last one or two letters wildcarded (e.g. channel="EH*")
    to select all components with a common band/instrument code.

    All other selection criteria that accept strings (network, station, location)
    may also contain Unix style wildcards (*, ?, ...).

    HISTORY:
       2015-03-17 Manoch: added the "waterLevel" parameter to provide user with more control on how the ObsPy module
       shrinks values under water-level of max spec amplitude
                          when removing the instrument response.
       2015-02-24 Manoch: introduced two new parameters (performInstrumentCorrection, applyScale) to allow user avoid
       instrument correction also now user can turn od decon. filter

       2014-03-15 Manoch: created
   """
    debug = True
    sender = 'get_channel_waveform_files'

    # Stream holds the final stream
    this_start_time = UTCDateTime(start_time)
    this_end_time = UTCDateTime(end_time)
    stream = Stream()
    try:
        # Read in the files to a stream.
        msg_lib.info(f'checking: {file_tag}')
        msg_lib.info('Apply scaling')
        stream_in = read(file_tag,
                         start_time=this_start_time,
                         end_time=this_end_time,
                         nearest_sample=True)
    except Exception as ex:
        msg_lib.error(
            f'{network}, {station}, {location}, {channel}, {start_time}, {end_time} {ex}',
            2)
        return None

    try:

        # Select the desire streams only.
        if location == "--":
            stream_out = stream_in.select(network=network,
                                          station=station,
                                          location="",
                                          channel=channel)
        else:
            stream_out = stream_in.select(network=network,
                                          station=station,
                                          location=location,
                                          channel=channel)

        for i in range(len(stream_out)):

            # Get the network, station, location and channel information.
            this_nslc, this_time, junk = str(stream_out[i]).split('|')
            net, sta, loc, chan = this_nslc.strip().split('.')
            if len(loc) == 0:
                loc = "--"

            # If resp_dir is defined, first look into user's resp_dir for stationXML files,
            # if not found get it from FDSN
            start, end = this_time.split(' - ')
            inv = None
            if resp_dir is not None:
                msg_lib.info(f'Getting response from {resp_dir}')
                this_loc = loc
                if loc == '--':
                    this_loc = ''

                inventory, inv = get_response_from_file(
                    inventory, resp_dir, net, sta, this_loc, chan, start_time,
                    debug)
                if inv is not None:
                    if debug:
                        msg_lib.info(f'Attaching {inv}')
                    stream_out[i].attach_response(inv)
                    stream += stream_out[i]
                else:
                    this_start_time = UTCDateTime(start.strip())
                    msg_lib.warning(
                        sender,
                        f'NO RESPONSE FILE: {net}, {sta}, {loc}, {chan}, {this_start_time}'
                    )
            if inv is None and client is not None:
                # The FDSN webservices return StationXML metadata.
                msg_lib.info('Getting response from IRIS')
                try:
                    this_start_time = UTCDateTime(start.strip())
                    this_end_time = UTCDateTime(end.strip())
                    inv = client.get_stations(network=net,
                                              station=sta,
                                              location=loc,
                                              channel=chan,
                                              starttime=this_start_time,
                                              endtime=this_end_time,
                                              level="response")
                    stream_out[i].attach_response(inv)
                    stream += stream_out[i]
                    if debug:
                        msg_lib.info(f'Response attached: {inv}')
                except Exception as ex:
                    this_start_time = UTCDateTime(start.strip())
                    msg_lib.warning(
                        sender, f'NO RESPONSE: {net}, {sta}, {loc}, {chan}, '
                        f'{this_start_time}, {this_end_time} {ex}')
                    continue

    except Exception as ex:
        print(str(e))
        msg_lib.error(
            f'get_channel_waveform_files {network}, {station}, {location}, {channel}, {start_time}, '
            f'{end_time}, {ex}', 2)
        return None, None

    return inventory, stream
예제 #18
0
def get_fedcatalog_station(req_url,
                           request_start,
                           request_end,
                           chunk_length,
                           chunk_count=1):
    """Get station list from fedcatalog service."""

    # This dictionary stores all the fedcatalog information.
    fedcatalog_info = dict()

    # This dictionary provides a template for fetdatalog creation.
    catalog_info = dict()

    bulk_list = collections.OrderedDict()
    dc_chunk_list = dict()

    msg_lib.info(f'sending request to fedcatalog: {req_url}')

    try:
        content = utils_lib.read_url(req_url)
    except Exception as _er:
        code = msg_lib.error(f'Request  {req_url}: {_er}', 4)
        sys.exit(code)

    # Go through the station list and see if they qualify.
    _lines = content.split('\n')

    _line_index = -1
    previous_dc = None
    dc_name = None
    for _line in _lines:
        _line_index += 1

        # Skip the blank and the comment lines.
        if not _line.strip() or _line.startswith('#'):
            continue

        # From the parameter=value lines, we are interested in the DATACENTER and DATASELECTSERVICE lines.
        elif '=' in _line:
            _par, _value = _line.split('=')

            # Found the data center name.
            if _par == 'DATACENTER':
                if dc_name is not None:
                    previous_dc = dc_name
                msg_lib.info(f'from the {_value} data center')
                dc_name, dc_url = _value.strip().split(',')

                # Initialize the data center information, create chunk_count containers for chunked requests.
                if dc_name not in catalog_info.keys():
                    msg_lib.info(
                        f'Initiating fedcatalog request for {dc_name}')
                    catalog_info[dc_name] = utils_lib.ObjDict({
                        'url':
                        dc_url,
                        'dataselect_service':
                        '',
                        'bulk':
                        list()
                    })

                # if this is not the first data center, save the previous data center's bulk list
                if bulk_list:
                    this_dc_list = list()
                    for _key in bulk_list:
                        this_dc_list.append(bulk_list[_key][0])

                    # Break the  list into chunks and add it to fedcatalog_info. We incorporate band_index,
                    # in case multiple bands are requested. Otherwise, chunk_index of the next band will overwrite
                    # chunk_index of this band.
                    for chunk_index, chunk in enumerate(
                            divide_to_chunks(this_dc_list, chunk_count)):
                        chunk_dc = f'{previous_dc}_{chunk_index}'

                        # Keep track of chunks for each DC for later use.
                        if previous_dc not in dc_chunk_list.keys():
                            dc_chunk_list[previous_dc] = list()
                        dc_chunk_list[previous_dc].append(chunk_dc)

                        fedcatalog_info[chunk_dc] = catalog_info[
                            previous_dc].copy()
                        fedcatalog_info[chunk_dc]['bulk'] = chunk

                    # The list is saved. Now, reset the bulk_list.
                    bulk_list = collections.OrderedDict()

                continue
            # Found the dataselect service address.
            elif _par == 'DATASELECTSERVICE':
                # Save the dataselect service address for all chunks.
                if dc_name in dc_chunk_list.keys():
                    for chunk_dc in dc_chunk_list[dc_name]:
                        fedcatalog_info[chunk_dc][
                            'dataselect_service'] = _value.strip()

                # Save the dataselect service address in the catalog for this DC,
                catalog_info[dc_name]['dataselect_service'] = _value.strip()
                msg_lib.info(f'dataselect service is {_value.strip()}')
                continue
            else:
                # Ignore the other definitions.
                continue

        # The rest are the station lines.
        # Skip the blank lines.
        if not (_line.strip()):
            continue

        # Get the station information.
        net, sta, loc, chan, sta_start, sta_end = get_request_items(_line)

        start = UTCDateTime(request_start)
        end = UTCDateTime(request_end)
        segment = -1

        while start < end:
            segment += 1
            req_start = start.strftime('%Y-%m-%dT%H:%M:%S')
            if start + chunk_length <= end:
                req_end = (start + chunk_length).strftime('%Y-%m-%dT%H:%M:%S')
            else:
                req_end = end.strftime('%Y-%m-%dT%H:%M:%S')
            _net_sta_key = f'{net}_{sta}_{chan}_{segment}'
            bulk_list[_net_sta_key] = (net, sta, loc, chan, req_start, req_end)
            start += chunk_length + 0.0001

    # Save the last data center's bulk list.
    if bulk_list:
        this_dc_list = list()
        for _key in bulk_list.keys():
            this_dc_list.append(bulk_list[_key])

        # Break the  list into chunks and add it to fedcatalog_info.
        for chunk_index, chunk in enumerate(
                divide_to_chunks(this_dc_list, chunk_count)):
            chunk_dc = f'{dc_name}_{chunk_index}'
            # Keep track of chunks for each DC for later use.
            if dc_name not in dc_chunk_list.keys():
                dc_chunk_list[dc_name] = list()
            dc_chunk_list[dc_name].append(chunk_dc)

            fedcatalog_info[chunk_dc] = catalog_info[dc_name].copy()
            fedcatalog_info[chunk_dc]['bulk'] = chunk

        # Reset the bulk_list.

        bulk_list = collections.OrderedDict()
    return utils_lib.ObjDict(fedcatalog_info)
예제 #19
0
version = 'V.2.0.0'
script = sys.argv[0]
script = os.path.basename(script)

# Initial mode settings.
timing = False
do_plot = False
verbose = False
mode_list = ['0', 'plot', 'time', 'verbose']
default_param_file = 'medianPower'
if os.path.isfile(os.path.join(param_path, f'{default_param_file}.py')):
    param = importlib.import_module(default_param_file)
else:
    code = msg_lib.error(
        f'could not load the default parameter file  [param/{default_param_file}.py]',
        2)
    sys.exit(code)


def usage():
    """ Usage message.
    """
    print(
        f'\n\n{script} version {version}\n\n'
        f'A Python 3 script to calculate median power for a given window length '
        f'based on the computed PSD powers.'
        f'\n\nUsage:\n\t{script} to display the usage message (this message)'
        f'\n\n\t  OR'
        f'\n\n\t{script} param=FileName net=network sta=station loc=location chan=channel '
        f' start=YYYY-MM-DDTHH:MM:SS end=YYYY-MM-DDTHH:MM:SS win=hour verbose=[0|1] file=PSD_file_name'
예제 #20
0
"""

version = 'V.2.0.0'
script = sys.argv[0]
script = os.path.basename(script)

# Initial mode settings.
timing = False
do_plot = False
verbose = False
mode_list = ['0', 'plot', 'time', 'verbose']
default_param_file = 'computePower'
if os.path.isfile(os.path.join(param_path, f'{default_param_file}.py')):
    param = importlib.import_module(default_param_file)
else:
    code = msg_lib.error(f'could not load the default parameter file  [param/{default_param_file}.py]', 2)
    sys.exit(code)


def usage():
    """ Usage message.
    """
    print(f'\n\n{script} version {version}\n\n'
          f'A Python 3 script to calculate power of each PSD window over selected bin period bands.'
          f'\n\nUsage:\n\t{script} to display the usage message (this message)'
          f'\n\t  OR'
          f'\n\t{script} param=FileName net=network sta=station loc=location chandir=channel directory'
          f' start=YYYY-MM-DDTHH:MM:SS end=YYYY-MM-DDTHH:MM:SS xtype=[period|frequency] verbose=[0|1]\n'
          f'\n\tto perform extraction where:'
          f'\n\t param\t\t[default: {default_param_file}] the configuration file name '
          f'\n\t net\t\t[required] network code'
예제 #21
0
version = 'V.2.0.0'
script = sys.argv[0]
script = os.path.basename(script)

# Initial mode settings.
timing = False
do_plot = False
verbose = False
mode_list = ['0', 'plot', 'time', 'verbose']
default_param_file = 'plotPower'
if os.path.isfile(os.path.join(param_path, f'{default_param_file}.py')):
    param = importlib.import_module(default_param_file)
else:
    code = msg_lib.error(
        f'could not load the default parameter file  [param/{default_param_file}.py]',
        2)
    sys.exit(code)


def usage():
    """ Usage message.
    """
    print(
        f'\n\n{script} version {version}\n\n'
        f'A Python 3 script to plot median power obtained from the median power file produced by '
        f'ntk_medianPower.py.'
        f'\n\nUsage:\n\t{script} to display the usage message (this message)'
        f'\n\t  OR'
        f'\n\t{script} param=FileName net=network sta=station loc=location chandir=channel _irectory'
        f' start=YYYY-MM-DDTHH:MM:SS end=YYYY-MM-DDTHH:MM:SS xtype=[period|frequency] verbose=[0|1] '
예제 #22
0
def qc_3c_stream(stream, segment_length, window, sorted_channel_list,
                 channel_groups, verbose):
    """
      qc_3c_stream performs a QC on a 3-C stream by making sure
      all channels are present, traces are the same
      length and have same start and end times  mostly needed for polarization analysis
     
      the output is an array of trace record numbers in the stream that passed
      the QC
      
      HISTORY:
         2014-04-21 Manoch: created
    """
    sender = 'qc_3c_stream'
    traces = str(stream)
    traces = traces.split("\n")
    if verbose:
        msg_lib.info(f'{sender}, there are total of {len(traces)} traces.')

    stream_list = list()

    # The first line is title.
    for trace_index, trace in enumerate(traces):
        if trace_index == 0:
            continue
        stream_list.append(f'{trace}|{trace_index}')
    # Sort to make sure related records are one after another.
    streams = sorted(stream_list)

    # extract the list, one record (line) at a time and group them
    qc_record_list = list()
    previous_group_name = ""
    group_count = -1
    group_channels = list()
    group_records = list()
    group_names = list()
    station_info_list = list()
    time_info_list = list()
    channel_info_list = list()
    record_info_list = list()

    for line_index, line in enumerate(stream_list):
        # Reset the list for each record (line).
        this_station_info_list = list()
        this_time_info_list = list()
        this_channel_info_list = list()
        this_record_info_list = list()
        """
          RECORD: NM.SIUC..BHE | 2009-11-01T11:00:00.019537Z - 2009-11-01T11:59:59.994537Z | 40.0 Hz, 144000 samples|1
                      |                                      |                                         |             |
                     sta_info                               time_info                                 chan_info         rec_info
         
          from each record extract parts
        """
        sta_info, time_info, chan_info, rec_info = line.split("|")
        """
          from each part extract list
         
                              0   1   2    3
          this_station_info_list = [NET,STA,LOC,CHAN]
        """
        this_station_info_list = sta_info.strip().split(".")

        # Replace blank locations with "--".
        this_station_info_list[2] = sta_lib.get_location(
            this_station_info_list[2])
        """
                                0   1  
          this_time_info_list = [START,END]
        """
        this_time_info_list.append(time_info.strip().split(" - "))
        """
                                0        1     2      3
          this_channel_info_list = [SAMPLING,UNIT,SAMPLES,TEXT]
        """
        this_channel_info_list.append(chan_info.strip().split(" "))

        # This_record_info_list = RECORD.
        this_record_info_list.append(int(rec_info.strip()))

        # Name each record as a channel group (do not include channel).
        this_group_name = ".".join(
            this_station_info_list[ii]
            for ii in range(len(this_station_info_list) - 1))

        # Starting the first group, start saving info.
        if this_group_name != previous_group_name:
            group_count += 1
            if verbose:
                msg_lib.info(
                    f'{sender}, started group {group_count}: {this_group_name}'
                )
            group_names.append(this_group_name)

            group_channels.append(list())
            group_records.append(list())

            previous_group_name = this_group_name

        # Save the channel names.
        group_channels[group_count].append(this_station_info_list[-1])
        group_records[group_count].append(line_index)

        #
        # note: the following arrays are not grouped, hence extend and not append
        #
        time_info_list.extend(this_time_info_list)
        channel_info_list.extend(this_channel_info_list)
        station_info_list.extend([this_station_info_list])
        record_info_list.extend(this_record_info_list)

    if verbose:
        msg_lib.info(f'{sender}, found {len(group_records)} record groups.')

    # QC each group
    for rec_index, rec in enumerate(group_records):
        # All group elements are in, start the QC.
        qc_passed = True

        if verbose:
            msg_lib.info(
                f'{sender}, QC for record group {group_names[rec_index]}')

        # Create a sorted list of unique channels.
        channel_list = sorted(set(group_channels[rec_index]))
        if verbose:
            msg_lib.info(f'{sender}, channel list: {channel_list}')

        # Missing Channels?
        # - based on missing records.
        if len(group_records[rec_index]) < 3:
            msg_lib.info(
                f'{sender}, missing channels records, received {len(group_records[rec_index])}'
            )
            qc_passed = False
        else:
            # - based on channels missing from channel list.
            if channel_list not in sorted_channel_list:
                msg_lib.info(
                    f'{sender}, missing channels records from {group_names[rec_index]} got '
                    f'{channel_list} while expecting {sorted_channel_list}')
                qc_passed = False
            # - channel list is valid
            else:
                msg_lib.info(f'{sender}, channel list complete {channel_list}')
                """
                  Gaps?
                  This is a simple rejection based on gaps. A better choice will be to take segments and process 
                  those with sufficient length but with 3 channels involved, this will be too complicated 
                  -- manoch 2014-04-18
                """
                if len(group_records[rec_index]) > 3:
                    msg_lib.info(f'{sender}, gaps in {group_names[rec_index]}')
                    qc_passed = False
                else:
                    msg_lib.info(
                        f'{sender}, no gaps in {group_names[rec_index]}')

                    # Check for sampling rates.
                    rec1, rec2, rec3 = map(int, group_records[rec_index])
                    sampling_frequency_01 = float(channel_info_list[rec1][0])
                    sampling_frequency_02 = float(channel_info_list[rec2][0])
                    sampling_frequency_03 = float(channel_info_list[rec3][0])
                    if sampling_frequency_01 != sampling_frequency_02 or sampling_frequency_01 != sampling_frequency_03:
                        msg_lib.info(
                            f'{sender}, sampling frequencies do not match! ({sampling_frequency_01}, '
                            f'{sampling_frequency_02}, {sampling_frequency_03}'
                        )
                        qc_passed = False
                    else:
                        msg_lib.info(
                            f'{sender}, sampling frequencies: [{sampling_frequency_01}, '
                            f'{sampling_frequency_02},'
                            f'{sampling_frequency_03}]')

                        # Check for mismatched start time - Note: there are exactly 3 records.
                        delay01 = np.abs(
                            UTCDateTime(time_info_list[rec1][0]) -
                            UTCDateTime(time_info_list[rec2][0]))
                        delay02 = np.abs(
                            UTCDateTime(time_info_list[rec1][0]) -
                            UTCDateTime(time_info_list[rec3][0]))
                        samplerate = 1.0 / float(channel_info_list[rec1][0])

                        # Calculate number of points needed for FFT (as a power of 2) based on the run parameters.
                        num_samp_needed_03 = 2**int(
                            math.log(
                                int((float(segment_length) / samplerate + 1) /
                                    window), 2))  # make sure it is power of 2
                        if delay01 == 0.0 and delay02 == 0.0:
                            msg_lib.info(f'{sender}, start times OK')
                        else:
                            if 0.0 < delay01 < samplerate:
                                msg_lib.info(
                                    f'{sender}, start time difference between '
                                    f'{".".join(station_info_list[rec1])} '
                                    f'and {".".join(station_info_list[rec2])} is {delay01}s and is less '
                                    f'than 1 sample')
                            elif delay01 > 0.0 and delay01 >= samplerate:
                                msg_lib.info(
                                    f'{sender}, start time difference between '
                                    f'{".".join(station_info_list[rec1])} '
                                    f'and {".".join(station_info_list[rec2])} is {delay01}s and is  '
                                    f'one sample or more')
                                qc_passed = False
                            if 0.0 < delay02 < samplerate:
                                msg_lib.info(
                                    f'{sender}, start time difference between '
                                    f'{".".join(station_info_list[rec1])} '
                                    f'and {".".join(station_info_list[rec3])} is {delay02}s and is less '
                                    f'than 1 sample')
                            elif delay02 > 0.0 and delay02 >= samplerate:
                                msg_lib.info(
                                    f'{sender}, start time difference between '
                                    f'{".".join(station_info_list[rec1])} '
                                    f'and {".".join(station_info_list[rec3])} is {delay02}s and is  '
                                    f'one sample or more')

                        # Check for insufficient number of samples.
                        if qc_passed:
                            samples_list = list()
                            for _rec in (rec1, rec2, rec3):
                                samples_list.append(
                                    float(channel_info_list[_rec][2]))

                            if verbose:
                                msg_lib.info(
                                    f'{sender}, samples: {samples_list}')
                            minimum_samples = np.min(samples_list)
                            if minimum_samples < num_samp_needed_03:
                                msg_lib.info(
                                    f'{sender}, wanted minimum of {num_samp_needed_03} '
                                    f'but got only {minimum_samples}')
                                qc_passed = False
                            else:
                                msg_lib.info(
                                    f'{sender}, wanted minimum of '
                                    f'{num_samp_needed_03} got {minimum_samples}, OK'
                                )

                                # mismatched end time.
                                delay01 = np.abs(
                                    UTCDateTime(time_info_list[rec1][1]) -
                                    UTCDateTime(time_info_list[rec2][1]))
                                delay02 = np.abs(
                                    UTCDateTime(time_info_list[rec1][1]) -
                                    UTCDateTime(time_info_list[rec3][1]))
                                samplerate = 1.0 / float(
                                    channel_info_list[rec1][0])
                                qc_passed = True
                                if delay01 == 0.0 and delay02 == 0.0:
                                    msg_lib.info(f'{sender}, end times OK')

                                # For information only, we know we have enough samples!
                                else:
                                    if 0.0 < delay01 < samplerate:
                                        msg_lib.info(
                                            f'{sender}, end time difference between '
                                            f'{".".join(station_info_list[rec1])}'
                                            f' and {".".join(station_info_list[rec2])} is {delay01}s less '
                                            f'than 1 sample')
                                    elif 0.0 < delay01 >= samplerate:
                                        msg_lib.info(
                                            f'{sender}, end time difference between '
                                            f'{".".join(station_info_list[rec1])}'
                                            f' and {".".join(station_info_list[rec2])} '
                                            f'is {delay01}s is 1 sample or more'
                                        )
                                    if 0.0 < delay02 < samplerate:
                                        msg_lib.info(
                                            f'{sender}, end time difference between '
                                            f'{".".join(station_info_list[rec1])}'
                                            f' and {".".join(station_info_list[rec3])} is {delay02}s and is '
                                            f'less than 1 sample')
                                    elif delay02 > 0.0 and delay02 >= samplerate:
                                        msg_lib.info(
                                            f'{sender}, end time difference between '
                                            f'{".".join(station_info_list[rec1])}'
                                            f' and {".".join(station_info_list[rec3])}'
                                            f' is {delay02}s and is 1 sample or more'
                                        )

                # End of the QC save qc_passed flag.
                if qc_passed:
                    chan_group_found = False
                    # qc_record_list provides index of the record for each channel_groups element.
                    for chans in channel_groups:
                        # found the matching channel group?
                        if group_channels[rec_index][0] in chans and group_channels[rec_index][1] in \
                                chans and group_channels[rec_index][2] in chans:
                            msg_lib.info(
                                f'{sender}, output channel order should be {chans}'
                            )
                            ordered_group_records = list()
                            group_channels_list = group_channels[rec_index]
                            chan_group_found = True
                            for chan in chans:
                                qc_record_list.append(
                                    group_channels_list.index(chan))
                            break
                    if not chan_group_found:
                        code = msg_lib.error(
                            f'{sender}, channel_groups parameter matching the '
                            f'output channel order [{group_channels[rec_index][0]}, '
                            f'{group_channels[rec_index][1]}, {group_channels[rec_index][2]}] '
                            f'not found', 4)
                        sys.exit(code)

    if verbose:
        msg_lib.info(f'{sender}, passed records: {qc_record_list}')
    return qc_record_list
예제 #23
0
version = 'V.2.0.0'
script = sys.argv[0]
script = os.path.basename(script)

# Initial mode settings.
timing = False
do_plot = False
verbose = False
mode_list = ['0', 'plot', 'time', 'verbose']
default_param_file = 'extractPolarHour'
if os.path.isfile(os.path.join(param_path, f'{default_param_file}.py')):
    param = importlib.import_module(default_param_file)
else:
    code = msg_lib.error(
        f'could not load the default parameter file  [param/{default_param_file}.py]',
        2)
    sys.exit(code)


def usage():
    """ Usage message.
    """
    print(
        f'\n\n{script} version {version}\n\n'
        f'A Python 3 script to extract hourly polarization values for each of the variables '
        f'defined by the "variables" parameter in the computePolarization parameter file (the parameter file for '
        f'this script must point to this variables in the "computePolarization" parameter file.'
        f'\n\nUsage:\n\t{script} to display the usage message (this message)'
        f'\n\t  OR'
        f'\n\t{script} param=FileName net=network sta=station loc=location chandir=channel_directory'
예제 #24
0
# import the Noise Toolkit libraries
#
libraryPath      = os.path.join(os.path.dirname(__file__), '..', 'lib')
sys.path.append(libraryPath)

import msgLib as msgLib
import fileLib as fileLib
import staLib as staLib

args = getArgs(sys.argv)

#
# see if user has provided the run arguments
#
if len(args) < 9:
   msgLib.error("missing argument(s)",1)
   usage()
   sys.exit()

script = sys.argv[0]

#
# import the user-provided parameter file
#
# os.path.dirname(__file__) gives the current directory
#
paramFile      =  getParam(args,'param',msgLib,None)
import importlib

#
# check to see if param file exists
예제 #25
0
sys.path.append(libraryPath)

import msgLib as msgLib
import fileLib as fileLib
import staLib as staLib
import sfLib as SFL
import tsLib as TSL

args = getArgs(sys.argv)

#
# see if user has provided the run arguments
#

if len(args) < 8:
    msgLib.error("missing argument(s)", 1)
    usage()
    sys.exit()

script = sys.argv[0]

#
# import the user-provided parameter file
#
# os.path.dirname(__file__) gives the current directory
#

paramFile = getParam(args, 'param', msgLib, None)
import importlib
paramPath = os.path.join(os.path.dirname(__file__), '..', 'param')
예제 #26
0
version = 'V.2.0.0'
script = sys.argv[0]
script = os.path.basename(script)

# Initial mode settings.
timing = False
do_plot = False
verbose = False
mode_list = ['0', 'plot', 'time', 'verbose']
default_param_file = 'binPolarDay'
if os.path.isfile(os.path.join(param_path, f'{default_param_file}.py')):
    param = importlib.import_module(default_param_file)
else:
    code = msg_lib.error(
        f'could not load the default parameter file  [param/{default_param_file}.py]',
        2)
    sys.exit(code)


def usage():
    """ Usage message.
    """
    print(
        f'\n\n{script} version {version}\n\n'
        f'A Python 3 script to bin polarization parameters into daily files for a given channel '
        f'tag and bounding parameters for each of the variables defined by the "variables" parameter '
        f'in the computePolarization parameter file (the parameter file for this script must point to this '
        f'"variables" parameter). The output of this script is similar to those available '
        f'from the former IRIS PDF/PSD Bulk Data Delivery System.'
        f'\n\nUsage:\n\t{script} display the usage message (this message)'
예제 #27
0
"""

version = 'V.2.0.0'
script = sys.argv[0]
script = os.path.basename(script)

# Initial mode settings.
timing = False
do_plot = False
verbose = False
mode_list = ['0', 'plot', 'time', 'verbose']
default_param_file = 'computePSD'
if os.path.isfile(os.path.join(param_path, f'{default_param_file}.py')):
    param = importlib.import_module(default_param_file)
else:
    code = msg_lib.error(f'could not load the default parameter file  [param/{default_param_file}.py]', 2)
    sys.exit(code)


def usage():
    """ Usage message.
    """
    sw_width = param.octaveWindowWidth
    sw_shift = param.octaveWindowShift
    print(f'\n\n{script} version {version}\n\n'
          f'A Python 3 script that calculates average power spectral densities for a given station. '
          f'The script:'
          f'\n\t- identifies the FDSN data provider for the requested station using the Fedcatalog service '
          f'\n\t  from IRIS (https://service.iris.edu/irisws/fedcatalog/1/)'
          f'\n\t- requests waveform and response data for the given station(s)/channels(s) using '
          f'ObsPy\'s FDSN client'