예제 #1
0
def read_nsmn(filename):
    """Read the Turkish NSMN strong motion data format.

    Args:
        filename (str): path to NSMN data file.

    Returns:
        list: Sequence of one StationStream object containing 3 StationTrace objects.
    """
    header = _read_header(filename)
    header1 = copy.deepcopy(header)
    header2 = copy.deepcopy(header)
    header3 = copy.deepcopy(header)
    header1['standard']['horizontal_orientation'] = 0.0
    header1['channel'] = get_channel_name(header['sampling_rate'], True, False,
                                          True)
    header2['standard']['horizontal_orientation'] = 90.0
    header2['channel'] = get_channel_name(header['sampling_rate'], True, False,
                                          False)
    header3['standard']['horizontal_orientation'] = 0.0
    header3['channel'] = get_channel_name(header['sampling_rate'], True, True,
                                          False)
    # three columns of NS, EW, UD
    # data = np.genfromtxt(filename, skip_header=TEXT_HDR_ROWS,
    #                      delimiter=[COLWIDTH] * NCOLS, encoding=ENCODING)
    data = np.loadtxt(filename, skiprows=TEXT_HDR_ROWS, encoding=ENCODING)
    data1 = data[:, 0]
    data2 = data[:, 1]
    data3 = data[:, 2]
    trace1 = StationTrace(data=data1, header=header1)
    trace2 = StationTrace(data=data2, header=header2)
    trace3 = StationTrace(data=data3, header=header3)
    stream = StationStream(traces=[trace1, trace2, trace3])
    return [stream]
예제 #2
0
def read_nsmn(filename, **kwargs):
    """Read the Turkish NSMN strong motion data format.

    Args:
        filename (str):
            path to NSMN data file.
        kwargs (ref):
            Other arguments will be ignored.

    Returns:
        list: Sequence of one StationStream object containing 3
        StationTrace objects.
    """
    header = _read_header(filename)
    header1 = copy.deepcopy(header)
    header2 = copy.deepcopy(header)
    header3 = copy.deepcopy(header)
    header1['standard']['horizontal_orientation'] = 0.0
    header1['standard']['vertical_orientation'] = np.nan
    header1['channel'] = get_channel_name(header['sampling_rate'], True, False,
                                          True)
    header1['standard']['units_type'] = get_units_type(header1['channel'])
    header2['standard']['horizontal_orientation'] = 90.0
    header2['standard']['vertical_orientation'] = np.nan
    header2['channel'] = get_channel_name(header['sampling_rate'], True, False,
                                          False)
    header2['standard']['units_type'] = get_units_type(header2['channel'])
    header3['standard']['horizontal_orientation'] = 0.0
    header3['standard']['vertical_orientation'] = np.nan
    header3['channel'] = get_channel_name(header['sampling_rate'], True, True,
                                          False)
    header3['standard']['units_type'] = get_units_type(header3['channel'])

    # three columns of NS, EW, UD
    # data = np.genfromtxt(filename, skip_header=TEXT_HDR_ROWS,
    #                      delimiter=[COLWIDTH] * NCOLS, encoding=ENCODING)
    data = np.loadtxt(filename, skiprows=TEXT_HDR_ROWS, encoding=ENCODING)
    data1 = data[:, 0]
    data2 = data[:, 1]
    data3 = data[:, 2]
    trace1 = StationTrace(data=data1, header=header1)
    response = {'input_units': 'counts', 'output_units': 'cm/s^2'}
    trace1.setProvenance('remove_response', response)
    trace2 = StationTrace(data=data2, header=header2)
    trace2.setProvenance('remove_response', response)
    trace3 = StationTrace(data=data3, header=header3)
    trace3.setProvenance('remove_response', response)
    stream = StationStream(traces=[trace1, trace2, trace3])
    return [stream]
예제 #3
0
def read_nsmn(filename, config=None):
    """Read the Turkish NSMN strong motion data format.

    Args:
        filename (str):
            path to NSMN data file.
        config (dict):
            Dictionary containing configuration.

    Returns:
        list: Sequence of one StationStream object containing 3 StationTrace
        objects.
    """
    header = _read_header(filename)
    header1 = copy.deepcopy(header)
    header2 = copy.deepcopy(header)
    header3 = copy.deepcopy(header)
    header1["standard"]["horizontal_orientation"] = 0.0
    header1["standard"]["vertical_orientation"] = np.nan
    header1["channel"] = get_channel_name(header["sampling_rate"], True, False,
                                          True)
    header1["standard"]["units_type"] = get_units_type(header1["channel"])
    header2["standard"]["horizontal_orientation"] = 90.0
    header2["standard"]["vertical_orientation"] = np.nan
    header2["channel"] = get_channel_name(header["sampling_rate"], True, False,
                                          False)
    header2["standard"]["units_type"] = get_units_type(header2["channel"])
    header3["standard"]["horizontal_orientation"] = 0.0
    header3["standard"]["vertical_orientation"] = np.nan
    header3["channel"] = get_channel_name(header["sampling_rate"], True, True,
                                          False)
    header3["standard"]["units_type"] = get_units_type(header3["channel"])

    # three columns of NS, EW, UD
    # data = np.genfromtxt(filename, skip_header=TEXT_HDR_ROWS,
    #                      delimiter=[COLWIDTH] * NCOLS, encoding=ENCODING)
    data = np.loadtxt(filename, skiprows=TEXT_HDR_ROWS, encoding=ENCODING)
    data1 = data[:, 0]
    data2 = data[:, 1]
    data3 = data[:, 2]
    trace1 = StationTrace(data=data1, header=header1)
    response = {"input_units": "counts", "output_units": "cm/s^2"}
    trace1.setProvenance("remove_response", response)
    trace2 = StationTrace(data=data2, header=header2)
    trace2.setProvenance("remove_response", response)
    trace3 = StationTrace(data=data3, header=header3)
    trace3.setProvenance("remove_response", response)
    stream = StationStream(traces=[trace1, trace2, trace3])
    return [stream]
def test_channel():
    rate = 50
    tchannel1 = get_channel_name(rate, is_acceleration=True,
                                 is_vertical=False, is_north=True)
    assert tchannel1 == 'BN1'

    tchannel2 = get_channel_name(rate, is_acceleration=True,
                                 is_vertical=False, is_north=False)
    assert tchannel2 == 'BN2'

    tchannel3 = get_channel_name(rate, is_acceleration=True,
                                 is_vertical=True, is_north=False)
    assert tchannel3 == 'BNZ'

    rate = 100
    tchannel4 = get_channel_name(rate, is_acceleration=True,
                                 is_vertical=False, is_north=True)
    assert tchannel4 == 'HN1'

    tchannel5 = get_channel_name(rate, is_acceleration=True,
                                 is_vertical=False, is_north=False)
    assert tchannel5 == 'HN2'

    tchannel6 = get_channel_name(rate, is_acceleration=True,
                                 is_vertical=True, is_north=False)
    assert tchannel6 == 'HNZ'

    tchannel4 = get_channel_name(rate, is_acceleration=False,
                                 is_vertical=False, is_north=True)
    assert tchannel4 == 'HH1'
예제 #5
0
def _get_channel(angle, sampling_rate):
    if angle == 500 or angle == 600 or (angle >= 0 and angle <= 360):
        if angle == 500 or angle == 600:
            channel = get_channel_name(
                sampling_rate, is_acceleration=True, is_vertical=True, is_north=False
            )
        elif angle >= 315 or angle < 45 or (angle >= 135 and angle < 225):
            channel = get_channel_name(
                sampling_rate, is_acceleration=True, is_vertical=False, is_north=True
            )
        else:
            channel = get_channel_name(
                sampling_rate, is_acceleration=True, is_vertical=False, is_north=False
            )
    else:
        errstr = (
            "Not enough information to distinguish horizontal from "
            "vertical channels."
        )
        raise BaseException("DMG: " + errstr)
    return channel
예제 #6
0
def _get_channel(angle, sampling_rate):
    if angle == 500 or angle == 600 or (angle >= 0 and angle <= 360):
        if angle == 500 or angle == 600:
            channel = get_channel_name(sampling_rate,
                                       is_acceleration=True,
                                       is_vertical=True,
                                       is_north=False)
        elif angle > 315 or angle < 45 or (angle > 135 and angle < 225):
            channel = get_channel_name(sampling_rate,
                                       is_acceleration=True,
                                       is_vertical=False,
                                       is_north=True)
        else:
            channel = get_channel_name(sampling_rate,
                                       is_acceleration=True,
                                       is_vertical=False,
                                       is_north=False)
    else:
        errstr = ('Not enough information to distinguish horizontal from '
                  'vertical channels.')
        raise GMProcessException('DMG: ' + errstr)
    return channel
예제 #7
0
def read_fdsn(filename):
    """Read Obspy data file (SAC, MiniSEED, etc).

    Args:
        filename (str):
            Path to data file.
        kwargs (ref):
            Other arguments will be ignored.
    Returns:
        Stream: StationStream object.
    """
    logging.debug("Starting read_fdsn.")
    if not is_fdsn(filename):
        raise Exception('%s is not a valid Obspy file format.' % filename)

    streams = []
    tstream = read(filename)
    xmlfile = _get_station_file(filename, tstream)
    inventory = read_inventory(xmlfile)
    traces = []
    for ttrace in tstream:
        trace = StationTrace(data=ttrace.data,
                             header=ttrace.stats,
                             inventory=inventory)
        location = ttrace.stats.location

        trace.stats.channel = get_channel_name(
            trace.stats.sampling_rate, trace.stats.channel[1] == 'N',
            inventory.get_orientation(trace.id)['dip'] in [90, -90]
            or trace.stats.channel[2] == 'Z',
            is_channel_north(inventory.get_orientation(trace.id)['azimuth']))

        if trace.stats.location == '':
            trace.stats.location = '--'

        network = ttrace.stats.network
        if network in LOCATION_CODES:
            codes = LOCATION_CODES[network]
            if location in codes:
                sdict = codes[location]
                if sdict['free_field']:
                    trace.stats.standard.structure_type = 'free_field'
                else:
                    trace.stats.standard.structure_type = sdict['description']
        head, tail = os.path.split(filename)
        trace.stats['standard']['source_file'] = tail or os.path.basename(head)
        traces.append(trace)
    stream = StationStream(traces=traces)
    streams.append(stream)

    return streams
예제 #8
0
def test_channel():
    rate = 50
    tchannel1 = get_channel_name(rate,
                                 is_acceleration=True,
                                 is_vertical=False,
                                 is_north=True)
    assert tchannel1 == 'BN1'

    tchannel2 = get_channel_name(rate,
                                 is_acceleration=True,
                                 is_vertical=False,
                                 is_north=False)
    assert tchannel2 == 'BN2'

    tchannel3 = get_channel_name(rate,
                                 is_acceleration=True,
                                 is_vertical=True,
                                 is_north=False)
    assert tchannel3 == 'BNZ'

    rate = 100
    tchannel4 = get_channel_name(rate,
                                 is_acceleration=True,
                                 is_vertical=False,
                                 is_north=True)
    assert tchannel4 == 'HN1'

    tchannel5 = get_channel_name(rate,
                                 is_acceleration=True,
                                 is_vertical=False,
                                 is_north=False)
    assert tchannel5 == 'HN2'

    tchannel6 = get_channel_name(rate,
                                 is_acceleration=True,
                                 is_vertical=True,
                                 is_north=False)
    assert tchannel6 == 'HNZ'

    tchannel4 = get_channel_name(rate,
                                 is_acceleration=False,
                                 is_vertical=False,
                                 is_north=True)
    assert tchannel4 == 'HH1'
예제 #9
0
def test_channel():
    rate = 50
    tchannel1 = get_channel_name(
        rate, is_acceleration=True, is_vertical=False, is_north=True
    )
    assert tchannel1 == "BN1"

    tchannel2 = get_channel_name(
        rate, is_acceleration=True, is_vertical=False, is_north=False
    )
    assert tchannel2 == "BN2"

    tchannel3 = get_channel_name(
        rate, is_acceleration=True, is_vertical=True, is_north=False
    )
    assert tchannel3 == "BNZ"

    rate = 100
    tchannel4 = get_channel_name(
        rate, is_acceleration=True, is_vertical=False, is_north=True
    )
    assert tchannel4 == "HN1"

    tchannel5 = get_channel_name(
        rate, is_acceleration=True, is_vertical=False, is_north=False
    )
    assert tchannel5 == "HN2"

    tchannel6 = get_channel_name(
        rate, is_acceleration=True, is_vertical=True, is_north=False
    )
    assert tchannel6 == "HNZ"

    tchannel4 = get_channel_name(
        rate, is_acceleration=False, is_vertical=False, is_north=True
    )
    assert tchannel4 == "HH1"
예제 #10
0
def _get_header_info(filename, any_structure=False, accept_flagged=False, location=""):
    """Return stats structure from various headers.

    Output is a dictionary like this:
     - network
     - station
     - channel
     - location (str): Set to floor the sensor is located on. If not a
            multi-sensor array, default is '--'. Can be set manually by
            the user.
     - starttime
     - sampling_rate
     - npts
     - coordinates:
       - latitude
       - longitude
       - elevation
    - standard
      - horizontal_orientation
      - instrument_period
      - instrument_damping
      - process_level
      - station_name
      - sensor_serial_number
      - instrument
      - comments
      - structure_type
      - corner_frequency
      - units
      - source
      - source_format
    - format_specific
      - vertical_orientation
      - building_floor (0=basement, 1=floor above basement,
        -1=1st sub-basement, etc.
      - bridge_number_spans
      - bridge_transducer_location (
            "free field",
            "at the base of a pier or abutment",
            "on an abutment",
            "on the deck at the top of a pier"
            "on the deck between piers or between an
            abutment and a pier.")
        dam_transducer_location (
            "upstream or downstream free field",
            "at the base of the dam",
            "on the crest of the dam",
            on the abutment of the dam")
        construction_type (
            "Reinforced concrete gravity",
            "Reinforced concrete arch",
            "earth fill",
            "other")

        filter_poles
        data_source
    """
    stats = {}
    standard = {}
    format_specific = {}
    coordinates = {}
    # read the ascii header lines
    with open(filename) as f:
        ascheader = [next(f).strip() for x in range(ASCII_HEADER_LINES)]

    standard["process_level"] = PROCESS_LEVELS[VALID_HEADERS[ascheader[0]]]
    logging.debug(f"process_level: {standard['process_level']}")

    # station code is in the third line
    stats["station"] = ""
    if len(ascheader[2]) >= 4:
        stats["station"] = ascheader[2][0:4].strip()
        stats["station"] = stats["station"].strip("\x00")
    logging.debug(f"station: {stats['station']}")

    standard["process_time"] = ""
    standard["station_name"] = ascheader[5][10:40].strip()
    # sometimes the data source has nothing in it,
    # most of the time it seems has has USGS in it
    # sometimes it's something like JPL/USGS, CDOT/USGS, etc.
    # if it's got USGS in it, let's just say network=US, otherwise "--"
    stats["network"] = "--"
    if ascheader[7].find("USGS") > -1:
        stats["network"] = "US"

    try:
        standard["source"] = ascheader[7].split("=")[2].strip()
    except IndexError:
        standard["source"] = "USGS"
    if standard["source"] == "":
        standard["source"] = "USGS"
    standard["source_format"] = "smc"

    # read integer header data

    intheader = np.genfromtxt(
        filename,
        dtype=np.int32,
        max_rows=INTEGER_HEADER_LINES,
        skip_header=ASCII_HEADER_LINES,
        delimiter=INT_HEADER_WIDTHS,
    )
    # 8 columns per line
    # first line is start time information, and then inst. serial number
    missing_data = intheader[0, 0]
    year = intheader[0, 1]

    # sometimes the year field has a 0 in it. When this happens, we
    # can try to get a timestamp from line 4 of the ascii header.
    if year == 0:
        parts = ascheader[3].split()
        try:
            year = int(parts[0])
        except ValueError as ve:
            fmt = (
                "Could not find year in SMC file %s. Not present "
                "in integer header and not parseable from line "
                '4 of ASCII header. Error: "%s"'
            )
            raise ValueError(fmt % (filename, str(ve)))

    jday = intheader[0, 2]
    hour = intheader[0, 3]
    minute = intheader[0, 4]
    if (
        year != missing_data
        and jday != missing_data
        and hour != missing_data
        and minute != missing_data
    ):

        # Handle second if missing
        second = 0
        if not intheader[0, 5] == missing_data:
            second = intheader[0, 5]

        # Handle microsecond if missing and convert milliseconds to
        # microseconds
        microsecond = 0
        if not intheader[0, 6] == missing_data:
            microsecond = intheader[0, 6] / 1e3
        datestr = "%i %00i %i %i %i %i" % (
            year,
            jday,
            hour,
            minute,
            second,
            microsecond,
        )

        stats["starttime"] = datetime.strptime(datestr, "%Y %j %H %M %S %f")
    else:
        logging.warning(
            "No start time provided. "
            "This must be set manually for network/station: "
            "%s/%s." % (stats["network"], stats["station"])
        )
        standard["comments"] = "Missing start time."

    standard["sensor_serial_number"] = ""
    if intheader[1, 3] != missing_data:
        standard["sensor_serial_number"] = str(intheader[1, 3])

    # we never get a two character location code so floor location is used
    if location == "":
        location = intheader.flatten()[24]
        if location != missing_data:
            location = str(location)
            if len(location) < 2:
                location = location.zfill(2)
            stats["location"] = location
        else:
            stats["location"] = "--"
    else:
        stats["location"] = location

    # second line is information about number of channels, orientations
    # we care about orientations
    format_specific["vertical_orientation"] = np.nan
    if intheader[1, 4] != missing_data:
        format_specific["vertical_orientation"] = int(intheader[1, 4])

    standard["horizontal_orientation"] = np.nan
    standard["vertical_orientation"] = np.nan
    if intheader[1, 5] != missing_data:
        standard["horizontal_orientation"] = float(intheader[1, 5])

    if intheader[1, 6] == missing_data or intheader[1, 6] not in INSTRUMENTS:
        standard["instrument"] = ""
    else:
        standard["instrument"] = INSTRUMENTS[intheader[1, 6]]

    num_comments = intheader[1, 7]

    # third line contains number of data points
    stats["npts"] = intheader[2, 0]
    problem_flag = intheader[2, 1]
    if problem_flag == 1:
        if not accept_flagged:
            fmt = "SMC: Record found in file %s has a problem flag!"
            raise BaseException(fmt % filename)
        else:
            logging.warning(
                "SMC: Data contains a problem flag for network/station: "
                "%s/%s. See comments." % (stats["network"], stats["station"])
            )
    stype = intheader[2, 2]
    if stype == missing_data:
        stype = np.nan
    elif stype not in STRUCTURES:
        # structure type is not defined and should will be considered 'other'
        stype = 4
    fmt = "SMC: Record found in file %s is not a free-field sensor!"
    standard["structure_type"] = STRUCTURES[stype]
    if standard["structure_type"] == "building" and not any_structure:
        raise Exception(fmt % filename)

    format_specific["building_floor"] = np.nan
    if intheader[3, 0] != missing_data:
        format_specific["building_floor"] = intheader[3, 0]

    format_specific["bridge_number_spans"] = np.nan
    if intheader[3, 1] != missing_data:
        format_specific["bridge_number_spans"] = intheader[3, 1]

    format_specific["bridge_transducer_location"] = BRIDGE_LOCATIONS[0]
    if intheader[3, 2] != missing_data:
        bridge_number = intheader[3, 2]
        format_specific["bridge_transducer_location"] = BRIDGE_LOCATIONS[bridge_number]

    format_specific["dam_transducer_location"] = DAM_LOCATIONS[0]
    if intheader[3, 3] != missing_data:
        dam_number = intheader[3, 3]
        format_specific["dam_transducer_location"] = DAM_LOCATIONS[dam_number]

    c1 = format_specific["bridge_transducer_location"].find("free field") == -1
    c2 = format_specific["dam_transducer_location"].find("free field") == -1
    if (c1 or c2) and not any_structure:
        raise Exception(fmt % filename)

    format_specific["construction_type"] = CONSTRUCTION_TYPES[4]
    if intheader[3, 4] != missing_data:
        format_specific["construction_type"] = CONSTRUCTION_TYPES[intheader[3, 4]]

    # station is repeated here if all numeric
    if not len(stats["station"]):
        stats["station"] = "%i" % intheader[3, 5]

    # read float header data
    skip = ASCII_HEADER_LINES + INTEGER_HEADER_LINES
    floatheader = np.genfromtxt(
        filename,
        max_rows=FLOAT_HEADER_LINES,
        skip_header=skip,
        delimiter=FLOAT_HEADER_WIDTHS,
    )

    # float headers are 10 lines of 5 floats each
    missing_data = floatheader[0, 0]
    stats["sampling_rate"] = floatheader[0, 1]
    if stats["sampling_rate"] >= MAX_ALLOWED_SAMPLE_RATE:
        fmt = "Sampling rate of %.2g samples/second is nonsensical."
        raise Exception(fmt % stats["sampling_rate"])
    coordinates["latitude"] = floatheader[2, 0]
    # the documentation for SMC says that sometimes longitudes are
    # positive in the western hemisphere. Since it is very unlikely
    # any of these files exist for the eastern hemisphere, check for
    # positive longitudes and fix them.
    lon = floatheader[2, 1]
    if lon > 0:
        lon = -1 * lon
    coordinates["longitude"] = lon
    coordinates["elevation"] = 0.0
    if floatheader[2, 2] != missing_data:
        coordinates["elevation"] = floatheader[2, 2]
    else:
        logging.warning("Setting elevation to 0.0")

    # figure out the channel code
    if format_specific["vertical_orientation"] in [0, 180]:
        stats["channel"] = get_channel_name(
            stats["sampling_rate"],
            is_acceleration=True,
            is_vertical=True,
            is_north=False,
        )
    else:
        ho = standard["horizontal_orientation"]
        quad1 = ho > 315 and ho <= 360
        quad2 = ho > 0 and ho <= 45
        quad3 = ho > 135 and ho <= 225
        if quad1 or quad2 or quad3:
            stats["channel"] = get_channel_name(
                stats["sampling_rate"],
                is_acceleration=True,
                is_vertical=False,
                is_north=True,
            )
        else:
            stats["channel"] = get_channel_name(
                stats["sampling_rate"],
                is_acceleration=True,
                is_vertical=False,
                is_north=False,
            )

    logging.debug(f"channel: {stats['channel']}")
    sensor_frequency = floatheader[4, 1]
    standard["instrument_period"] = 1 / sensor_frequency
    standard["instrument_damping"] = floatheader[4, 2]

    standard["corner_frequency"] = floatheader[3, 4]
    format_specific["filter_poles"] = floatheader[4, 0]
    standard["units"] = "acc"
    standard["units_type"] = get_units_type(stats["channel"])

    # this field can be used for instrument correction
    # when data is in counts
    standard["instrument_sensitivity"] = np.nan

    # read in the comment lines
    with open(filename) as f:
        skip = ASCII_HEADER_LINES + INTEGER_HEADER_LINES + FLOAT_HEADER_LINES
        _ = [next(f) for x in range(skip)]
        standard["comments"] = [
            next(f).strip().lstrip("|") for x in range(num_comments)
        ]

    standard["comments"] = " ".join(standard["comments"])
    stats["coordinates"] = coordinates
    stats["standard"] = standard
    stats["format_specific"] = format_specific

    head, tail = os.path.split(filename)
    stats["standard"]["source_file"] = tail or os.path.basename(head)

    return (stats, num_comments)
예제 #11
0
def _read_header(lines, filename, table):
    header = {}
    standard = {}
    coords = {}
    format_specific = {}

    # fill out the standard dictionary
    standard["source"] = SOURCE
    standard["source_format"] = SOURCE_FORMAT
    standard["instrument"] = ""
    standard["sensor_serial_number"] = ""
    standard["process_level"] = PROCESS_LEVELS["V1"]
    standard["process_time"] = lines[0].split(":")[1].strip()
    # station name line can look like this:
    # VI�A DEL MAR CENTRO S/N 675
    sparts = lines[5].split()
    station_name = " ".join(sparts[0:sparts.index("S/N")])
    standard["station_name"] = station_name

    # this table gives us station coordinates and structure type
    station_row = table[table["Name"] == station_name]
    if not len(station_row):
        logging.warning("Unknown structure type.")
        standard["structure_type"] = ""
    else:
        row = station_row.iloc[0]
        standard["structure_type"] = row["Structure Type"]
    standard["corner_frequency"] = np.nan
    standard["units"] = "cm/s^2"
    standard["units_type"] = "acc"

    inst_dict = {}
    for part in lines[9].split(","):
        key, value = part.split("=")
        fvalue_str = re.search(FLOATRE, value.strip()).group()
        inst_dict[key.strip()] = float(fvalue_str)

    standard["instrument_period"] = inst_dict["INSTR PERIOD"]
    standard["instrument_damping"] = inst_dict["DAMPING"]
    standard["horizontal_orientation"] = np.nan
    standard["vertical_orientation"] = np.nan
    standard["comments"] = " ".join(lines[11:13]).replace("\n", "")
    head, tail = os.path.split(filename)
    standard["source_file"] = tail or os.path.basename(head)

    # this field can be used for instrument correction
    # when data is in counts
    standard["instrument_sensitivity"] = inst_dict["SENSITIVITY"]
    standard["volts_to_counts"] = np.nan

    # fill out the stats stuff
    try:
        stimestr = re.search(TIME_RE, lines[3]).group()
    except AttributeError:
        try:
            stimestr = re.search(TIME_RE2, lines[3]).group()
        except AttributeError:
            logging.warning("Setting time to epoch.")
            stimestr = "01/01/1970 00:00:00.000"

    # 2/27/2010 2:45:46.000 GMT
    stime = datetime.strptime(stimestr, TIMEFMT)

    # it appears that sometimes the trigger time is set to Jan 1, 1980
    # by default.
    if stime.year == 1980 and stime.month == 1 and stime.day == 1:
        fmt = "Trigger time set to %s in file %s"
        logging.warning(fmt % (str(stime), standard["source_file"]))

    header["starttime"] = stime
    npts, duration = re.findall(FLOATRE, lines[10])
    npts = int(npts)
    duration = float(duration)
    header["npts"] = npts
    header["delta"] = duration / (npts - 1)
    header["sampling_rate"] = (npts - 1) / duration
    header["duration"] = duration
    raw_channel = lines[6][9:11].strip()
    if raw_channel in NORTH_CHANNELS:
        channel = get_channel_name(header["sampling_rate"], True, False, True)
    elif raw_channel in WEST_CHANNELS:
        channel = get_channel_name(header["sampling_rate"], True, False, False)
    elif raw_channel in VERTICAL_CHANNELS:
        channel = get_channel_name(header["sampling_rate"], True, True, False)
    else:
        raise KeyError(f"Channel name {raw_channel} not defined")

    header["channel"] = channel
    header["station"] = lines[5].split()[-1]
    header["location"] = "--"
    header["network"] = NETWORK

    # these files seem to have all zeros for station coordinates!
    if not len(station_row):
        logging.warning(f"Could not find station match for {station_name}")
        coordparts = lines[4].split()
        lat = float(re.search(FLOATRE, coordparts[2]).group())
        lon = float(re.search(FLOATRE, coordparts[3]).group())
        if lon == 0 or lat == 0:
            logging.warning("Latitude or Longitude values are 0")
        if "S" in coordparts[2]:
            lat = -1 * lat
        if "W" in coordparts[3]:
            lon = -1 * lon
    else:
        row = station_row.iloc[0]
        lat = row["Lat"]
        lon = row["Lon"]

    altitude = 0.0
    logging.warning("Setting elevation to 0.0")
    coords = {"latitude": lat, "longitude": lon, "elevation": altitude}

    header["coordinates"] = coords
    header["standard"] = standard
    header["format_specific"] = format_specific

    return header
def _read_header(hdr_data, station, name, component, data_format,
                 instrument, resolution):
    """Construct stats dictionary from header lines.

    Args:
        hdr_data (ndarray): (10,10) numpy array containing header data.
        station (str): Station code obtained from previous text portion of
            header.
        location (str): Location string obtained from previous text portion
            of header.
        component (str): Component direction (N18E, S72W, etc.)
    Returns:
        Dictionary containing fields:
            - network "NZ"
            - station
            - channel H1,H2,or Z.
            - location
            - sampling_rate Samples per second.
            - delta Interval between samples (seconds)
            - calib Calibration factor (always 1.0)
            - npts Number of samples in record.
            - starttime Datetime object containing start of record.
            - standard:
              - station_name
              - units "acc"
              - source 'New Zealand Institute of Geological and Nuclear
                Science'
              - horizontal_orientation
              - instrument_period
              - instrument_damping
              - processing_time
              - process_level
              - sensor_serial_number
              - instrument
              - comments
              - structure_type
              - corner_frequency
              - source_format
            - coordinates:
              - lat Latitude of station.
              - lon Longitude of station.
              - elevation Elevation of station.
            - format_specific:
              - sensor_bit_resolution

    """
    hdr = {}
    standard = {}
    coordinates = {}
    format_specific = {}
    hdr['station'] = station
    standard['station_name'] = name
    if data_format == 'V1':
        hdr['sampling_rate'] = hdr_data[4, 0]
        sampling_rate = hdr['sampling_rate']
        hdr['delta'] = 1 / hdr['sampling_rate']
    else:
        hdr['delta'] = hdr_data[6, 5]
        hdr['sampling_rate'] = 1 / hdr['delta']
        # V2 files have been resampled, we need sensor rate for
        # channel naming.
        sampling_rate = 1 / hdr_data[6, 4]
    hdr['calib'] = 1.0
    if data_format == 'V1':
        hdr['npts'] = int(hdr_data[3, 0])
    else:
        hdr['npts'] = int(hdr_data[3, 3])
    hdr['network'] = 'NZ'
    standard['units'] = 'acc'
    standard['source'] = ('New Zealand Institute of Geological and '
                          'Nuclear Science')
    logging.debug('component: %s' % component)
    if component.lower() in ['up', 'down']:
        standard['horizontal_orientation'] = np.nan
        hdr['channel'] = get_channel_name(
            sampling_rate,
            is_acceleration=True,
            is_vertical=True,
            is_north=False)
    else:
        angle = _get_channel(component)
        logging.debug('angle: %s' % angle)
        standard['horizontal_orientation'] = float(angle)
        if (angle > 315 or angle < 45) or (angle > 135 and angle < 225):
            hdr['channel'] = get_channel_name(
                sampling_rate,
                is_acceleration=True,
                is_vertical=False,
                is_north=True)
        else:
            hdr['channel'] = get_channel_name(
                sampling_rate,
                is_acceleration=True,
                is_vertical=False,
                is_north=False)

    logging.debug('channel: %s' % hdr['channel'])
    hdr['location'] = '--'

    # figure out the start time
    milliseconds = hdr_data[3, 9]
    seconds = int(milliseconds / 1000)
    microseconds = int(np.round(milliseconds / 1000.0 - seconds))
    year = int(hdr_data[0, 8])
    month = int(hdr_data[0, 9])
    day = int(hdr_data[1, 8])
    hour = int(hdr_data[1, 9])
    minute = int(hdr_data[3, 8])
    hdr['starttime'] = datetime(
        year, month, day, hour, minute, seconds, microseconds)

    # figure out station coordinates
    latdg = hdr_data[2, 0]
    latmn = hdr_data[2, 1]
    latsc = hdr_data[2, 2]
    coordinates['latitude'] = _dms_to_dd(latdg, latmn, latsc) * -1
    londg = hdr_data[2, 3]
    lonmn = hdr_data[2, 4]
    lonsc = hdr_data[2, 5]
    coordinates['longitude'] = _dms_to_dd(londg, lonmn, lonsc)
    coordinates['elevation'] = 0.0

    # get other standard metadata
    standard['instrument_period'] = 1 / hdr_data[4, 0]
    standard['instrument_damping'] = hdr_data[4, 1]
    standard['process_time'] = ''
    standard['process_level'] = PROCESS_LEVELS[data_format]
    logging.debug("process_level: %s" % data_format)
    standard['sensor_serial_number'] = ''
    standard['instrument'] = instrument
    standard['comments'] = ''
    standard['structure_type'] = ''
    standard['corner_frequency'] = np.nan
    standard['source_format'] = 'geonet'

    # get format specific metadata
    format_specific['sensor_bit_resolution'] = resolution

    hdr['coordinates'] = coordinates
    hdr['standard'] = standard
    hdr['format_specific'] = format_specific

    return hdr
예제 #13
0
def _read_header_lines(filename, offset):
    """Read the header lines for each channel.

    Args:
        filename (str):
            Input BHRC file name.
        offset (int):
            Number of lines to skip from the beginning of the file.

    Returns:
        tuple: (header dictionary containing Stats dictionary with
        extra sub-dicts, updated offset rows)
    """
    with open(filename, "rt", encoding="utf-8") as f:
        for _ in range(offset):
            next(f)
        lines = [next(f) for x in range(TEXT_HDR_ROWS)]

    offset += TEXT_HDR_ROWS

    header = {}
    standard = {}
    coords = {}
    format_specific = {}

    # get the sensor azimuth with respect to the earthquake
    # this data has been rotated so that the longitudinal channel (L)
    # is oriented at the sensor azimuth, and the transverse (T) is
    # 90 degrees off from that.
    station_info = lines[7][lines[7].index("Station"):]
    float_strings = re.findall(FLOATRE, station_info)
    (lat_str, lon_str, alt_str, lstr, tstr) = float_strings[0:5]
    component = lines[4].strip()
    if component == "V":
        angle = np.nan
    elif component == "L":
        angle = float(lstr)
    else:
        angle = float(tstr)
    coords = {
        "latitude": float(lat_str),
        "longitude": float(lon_str),
        "elevation": float(alt_str),
    }

    # fill out the standard dictionary
    standard["source"] = SOURCE
    standard["source_format"] = SOURCE_FORMAT
    standard["instrument"] = lines[1].split("=")[1].strip()
    standard["sensor_serial_number"] = ""
    volstr = lines[0].split()[1].strip()
    if volstr not in LEVELS:
        raise KeyError(f"Volume {volstr} files are not supported.")
    standard["process_level"] = PROCESS_LEVELS[LEVELS[volstr]]
    standard["process_time"] = ""
    station_name = lines[7][0:lines[7].index("Station")].strip()
    standard["station_name"] = station_name
    standard["structure_type"] = ""
    standard["corner_frequency"] = np.nan
    standard["units"] = "acc"
    period_str, damping_str = re.findall(FLOATRE, lines[9])
    standard["instrument_period"] = float(period_str)
    if standard["instrument_period"] == 0:
        standard["instrument_period"] = np.nan
    standard["instrument_damping"] = float(damping_str)
    standard["horizontal_orientation"] = angle
    standard["vertical_orientation"] = np.nan
    standard["comments"] = ""
    head, tail = os.path.split(filename)
    standard["source_file"] = tail or os.path.basename(head)

    # this field can be used for instrument correction
    # when data is in counts
    standard["instrument_sensitivity"] = np.nan

    # fill out the stats stuff
    # we don't know the start of the trace
    header["starttime"] = UTCDateTime(1970, 1, 1)
    npts_str, dur_str = re.findall(FLOATRE, lines[10])
    header["npts"] = int(npts_str)
    header["duration"] = float(dur_str)
    header["delta"] = header["duration"] / (header["npts"] - 1)
    header["sampling_rate"] = 1 / header["delta"]
    if np.isnan(angle):
        header["channel"] = get_channel_name(
            header["sampling_rate"],
            is_acceleration=True,
            is_vertical=True,
            is_north=False,
        )
    elif (angle > 315 or angle < 45) or (angle > 135 and angle < 225):
        header["channel"] = get_channel_name(
            header["sampling_rate"],
            is_acceleration=True,
            is_vertical=False,
            is_north=True,
        )
    else:
        header["channel"] = get_channel_name(
            header["sampling_rate"],
            is_acceleration=True,
            is_vertical=False,
            is_north=False,
        )

    standard["units_type"] = get_units_type(header["channel"])

    part1 = lines[0].split(":")[1]
    stationcode = part1.split("/")[0].strip()
    header["station"] = stationcode
    header["location"] = "--"
    header["network"] = NETWORK

    header["coordinates"] = coords
    header["standard"] = standard
    header["format_specific"] = format_specific

    offset += INT_HDR_ROWS
    offset += FLOAT_HDR_ROWS

    return (header, offset)
예제 #14
0
def _read_header(hdr_data, station, name, component, data_format,
                 instrument, resolution):
    """Construct stats dictionary from header lines.

    Args:
        hdr_data (ndarray):
            (10,10) numpy array containing header data.
        station (str):
            Station code obtained from previous text portion of header.
        location (str):
            Location string obtained from previous text portion of header.
        component (str):
            Component direction (N18E, S72W, etc.)

    Returns:
        Dictionary containing fields:
            - network "NZ"
            - station
            - channel H1,H2,or Z.
            - location
            - sampling_rate Samples per second.
            - delta Interval between samples (seconds)
            - calib Calibration factor (always 1.0)
            - npts Number of samples in record.
            - starttime Datetime object containing start of record.
            - standard:
              - station_name
              - units "acc"
              - source 'New Zealand Institute of Geological and Nuclear
                Science'
              - horizontal_orientation
              - instrument_period
              - instrument_damping
              - processing_time
              - process_level
              - sensor_serial_number
              - instrument
              - comments
              - structure_type
              - corner_frequency
              - source_format
            - coordinates:
              - lat Latitude of station.
              - lon Longitude of station.
              - elevation Elevation of station.
            - format_specific:
              - sensor_bit_resolution

    """
    hdr = {}
    standard = {}
    coordinates = {}
    format_specific = {}
    hdr['station'] = station
    standard['station_name'] = name

    # Note: Original sample interval (s): hdr_data[6, 4]

    # Sample inverval (s)
    hdr['delta'] = hdr_data[6, 5]
    hdr['sampling_rate'] = 1 / hdr['delta']

    hdr['calib'] = 1.0
    if data_format == 'V1':
        hdr['npts'] = int(hdr_data[3, 0])
    else:
        hdr['npts'] = int(hdr_data[3, 3])
    hdr['network'] = 'NZ'
    standard['units'] = 'acc'
    standard['source'] = ('New Zealand Institute of Geological and '
                          'Nuclear Science')
    logging.debug('component: %s' % component)
    standard['vertical_orientation'] = np.nan
    if component.lower() in ['up', 'down']:
        standard['horizontal_orientation'] = np.nan
        hdr['channel'] = get_channel_name(
            hdr['delta'],
            is_acceleration=True,
            is_vertical=True,
            is_north=False)
    else:
        angle = _get_channel(component)
        logging.debug('angle: %s' % angle)
        standard['horizontal_orientation'] = float(angle)
        if (angle > 315 or angle < 45) or (angle > 135 and angle < 225):
            hdr['channel'] = get_channel_name(
                hdr['delta'],
                is_acceleration=True,
                is_vertical=False,
                is_north=True)
        else:
            hdr['channel'] = get_channel_name(
                hdr['delta'],
                is_acceleration=True,
                is_vertical=False,
                is_north=False)

    logging.debug('channel: %s' % hdr['channel'])
    hdr['location'] = '--'

    # figure out the start time
    milliseconds = hdr_data[3, 9]
    seconds = int(milliseconds / 1000)
    microseconds = int(np.round(milliseconds / 1000.0 - seconds))
    year = int(hdr_data[0, 8])
    month = int(hdr_data[0, 9])
    day = int(hdr_data[1, 8])
    hour = int(hdr_data[1, 9])
    minute = int(hdr_data[3, 8])
    hdr['starttime'] = datetime(
        year, month, day, hour, minute, seconds, microseconds)

    # figure out station coordinates
    latdg = hdr_data[2, 0]
    latmn = hdr_data[2, 1]
    latsc = hdr_data[2, 2]
    coordinates['latitude'] = _dms_to_dd(latdg, latmn, latsc) * -1
    londg = hdr_data[2, 3]
    lonmn = hdr_data[2, 4]
    lonsc = hdr_data[2, 5]
    coordinates['longitude'] = _dms_to_dd(londg, lonmn, lonsc)
    logging.warning('Setting elevation to 0.0')
    coordinates['elevation'] = 0.0

    # get other standard metadata
    standard['units_type'] = get_units_type(hdr['channel'])
    standard['instrument_period'] = 1 / hdr_data[4, 0]
    standard['instrument_damping'] = hdr_data[4, 1]
    standard['process_time'] = ''
    standard['process_level'] = PROCESS_LEVELS[data_format]
    logging.debug("process_level: %s" % data_format)
    standard['sensor_serial_number'] = ''
    standard['instrument'] = instrument
    standard['comments'] = ''
    standard['structure_type'] = ''
    standard['corner_frequency'] = np.nan
    standard['source_format'] = 'geonet'

    # this field can be used for instrument correction
    # when data is in counts
    standard['instrument_sensitivity'] = np.nan

    # get format specific metadata
    format_specific['sensor_bit_resolution'] = resolution

    hdr['coordinates'] = coordinates
    hdr['standard'] = standard
    hdr['format_specific'] = format_specific

    return hdr
예제 #15
0
def _read_header_lines(filename, offset):
    """Read the header lines for each channel.

    Args:
        filename (str): 
            Input BHRC file name.
        offset (int): 
            Number of lines to skip from the beginning of the file.

    Returns:
        tuple: (header dictionary containing Stats dictionary with extra sub-dicts, 
                updated offset rows)
    """
    with open(filename, 'rt') as f:
        for _ in range(offset):
            next(f)
        lines = [next(f) for x in range(TEXT_HDR_ROWS)]

    offset += TEXT_HDR_ROWS

    header = {}
    standard = {}
    coords = {}
    format_specific = {}

    # get the sensor azimuth with respect to the earthquake
    # this data has been rotated so that the longitudinal channel (L)
    # is oriented at the sensor azimuth, and the transverse (T) is
    # 90 degrees off from that.
    station_info = lines[7][lines[7].index('Station'):]
    (lat_str, lon_str, alt_str, lstr, tstr) = re.findall(FLOATRE, station_info)
    component = lines[4].strip()
    if component == 'V':
        angle = np.nan
    elif component == 'L':
        angle = float(lstr)
    else:
        angle = float(tstr)
    coords = {
        'latitude': float(lat_str),
        'longitude': float(lon_str),
        'elevation': float(alt_str)
    }

    # fill out the standard dictionary
    standard['source'] = SOURCE
    standard['source_format'] = SOURCE_FORMAT
    standard['instrument'] = lines[1].split('=')[1].strip()
    standard['sensor_serial_number'] = ''
    volstr = lines[0].split()[1].strip()
    if volstr not in LEVELS:
        raise KeyError('Volume %s files are not supported.' % volstr)
    standard['process_level'] = PROCESS_LEVELS[LEVELS[volstr]]
    standard['process_time'] = ''
    station_name = lines[7][0:lines[7].index('Station')].strip()
    standard['station_name'] = station_name
    standard['structure_type'] = ''
    standard['corner_frequency'] = np.nan
    standard['units'] = 'acc'
    period_str, damping_str = re.findall(FLOATRE, lines[9])
    standard['instrument_period'] = float(period_str)
    standard['instrument_damping'] = float(damping_str)
    standard['horizontal_orientation'] = angle
    standard['comments'] = ''

    # fill out the stats stuff
    # we don't know the start of the trace
    header['starttime'] = UTCDateTime(1970, 1, 1)
    npts_str, dur_str = re.findall(FLOATRE, lines[10])
    header['npts'] = int(npts_str)
    header['duration'] = float(dur_str)
    header['delta'] = header['duration'] / (header['npts'] - 1)
    header['sampling_rate'] = 1 / header['delta']
    if np.isnan(angle):
        header['channel'] = get_channel_name(header['sampling_rate'],
                                             is_acceleration=True,
                                             is_vertical=True,
                                             is_north=False)
    elif (angle > 315 or angle < 45) or (angle > 135 and angle < 225):
        header['channel'] = get_channel_name(header['sampling_rate'],
                                             is_acceleration=True,
                                             is_vertical=False,
                                             is_north=True)
    else:
        header['channel'] = get_channel_name(header['sampling_rate'],
                                             is_acceleration=True,
                                             is_vertical=False,
                                             is_north=False)

    part1 = lines[0].split(':')[1]
    stationcode = part1.split('/')[0].strip()
    header['station'] = stationcode
    header['location'] = '--'
    header['network'] = NETWORK

    header['coordinates'] = coords
    header['standard'] = standard
    header['format_specific'] = format_specific

    offset += INT_HDR_ROWS
    offset += FLOAT_HDR_ROWS

    return (header, offset)
예제 #16
0
def _get_header_info(int_data, flt_data, lines, cmt_data, location=''):
    """Return stats structure from various headers.

    Output is a dictionary like this:
     - network (str): Default is '--'. Determined using COSMOS_NETWORKS
     - station (str)
     - channel (str): Determined using COSMOS_ORIENTATIONS
     - location (str): Set to location index of sensor site at station.
            If not a multi-site array, default is '--'.
     - starttime (datetime)
     - duration (float)
     - sampling_rate (float)
     - delta (float)
     - npts (int)
     - coordinates:
       - latitude (float)
       - longitude (float)
       - elevation (float)
    - standard (Defaults are either np.nan or '')
      - horizontal_orientation (float): Rotation from north (degrees)
      - instrument_period (float): Period of sensor (Hz)
      - instrument_damping (float): Fraction of critical
      - process_time (datetime): Reported date of processing
      - process_level: Either 'V0', 'V1', 'V2', or 'V3'
      - station_name (str): Long form station description
      - sensor_serial_number (str): Reported sensor serial
      - instrument (str): See SENSOR_TYPES
      - comments (str): Processing comments
      - structure_type (str): See BUILDING_TYPES
      - corner_frequency (float): Sensor corner frequency (Hz)
      - units (str): See UNITS
      - source (str): Network source description
      - source_format (str): Always cosmos
    - format_specific
      - physical_units (str): See PHYSICAL_UNITS
      - v30 (float): Site geology V30 (km/s)
      - least_significant_bit: Recorder LSB in micro-volts (uv/count)
      - low_filter_type (str): Filter used for low frequency
            V2 filtering (see FILTERS)
      - low_filter_corner (float): Filter corner for low frequency
            V2 filtering (Hz)
      - low_filter_decay (float): Filter decay for low frequency
            V2 filtering (dB/octabe)
      - high_filter_type (str): Filter used for high frequency
            V2 filtering (see FILTERS)
      - high_filter_corner (float): Filter corner for high frequency
            V2 filtering (Hz)
      - high_filter_decay (float): Filter decay for high frequency
            V2 filtering (dB/octabe)
      - maximum (float): Maximum value
      - maximum_time (float): Time at which maximum occurs
      - station_code (int): Code for structure_type
      - record_flag (str): Either 'No problem', 'Fixed', 'Unfixed problem'.
            Should be described in more depth in comments.
      - scaling_factor (float): Scaling used for converting acceleration
            from g/10 to cm/s/s
      - sensor_sensitivity (float): Sensitvity in volts/g

    Args:
        int_data (ndarray): Array of integer data
        flt_data (ndarray): Array of float data
        lines (list): List of text headers (str)
        cmt_data (ndarray): Array of comments (str)

    Returns:
        dictionary: Dictionary of header/metadata information
    """
    hdr = {}
    coordinates = {}
    standard = {}
    format_specific = {}
    # Get unknown parameter number
    try:
        unknown = int(lines[12][64:71])
    except ValueError:
        unknown = -999
    # required metadata
    network_num = int(int_data[10])
    # Get network from cosmos table or fdsn code sheet
    if network_num in COSMOS_NETWORKS:
        network = COSMOS_NETWORKS[network_num][0]
        source = COSMOS_NETWORKS[network_num][1]
        if network == '':
            network = COSMOS_NETWORKS[network_num][2]
    else:
        network_code = lines[4][25:27].upper()
        if network_code in CODES:
            network = network_code
            idx = np.argwhere(CODES == network_code)[0][0]
            source = SOURCES1[idx].decode(
                'utf-8') + ', ' + SOURCES2[idx].decode('utf-8')
        else:
            network = 'ZZ'
            source = ''
    hdr['network'] = network
    logging.debug('network: %s' % network)
    hdr['station'] = lines[4][28:34].strip()
    logging.debug('station: %s' % hdr['station'])
    horizontal_angle = int(int_data[53])
    logging.debug('horizontal_angle: %s' % horizontal_angle)
    if horizontal_angle not in VALID_AZIMUTH_INTS:
        logging.warning("Horizontal_angle in COSMOS header is not valid.")
    horizontal_angle = float(horizontal_angle)

    # Store delta and duration. Use them to calculate npts and sampling_rate

    # NOTE: flt_data[33] is the delta of the V0 format, and if we are reading
    # a V1 or V2 format then it may have been resampled. We should consider
    # adding flt_data[33] delta to the provenance record at some point.

    delta = float(flt_data[61]) * MSEC_TO_SEC
    if delta != unknown:
        hdr['delta'] = delta
        hdr['sampling_rate'] = 1 / delta

    # Determine the angle based upon the cosmos table
    # Set horizontal angles other than N,S,E,W to H1 and H2
    # Missing angle results in the channel number
    if horizontal_angle != unknown:
        if horizontal_angle in COSMOS_ORIENTATIONS:
            channel = COSMOS_ORIENTATIONS[horizontal_angle][1].upper()
            if channel == 'UP' or channel == 'DOWN' or channel == 'VERT':
                channel = get_channel_name(hdr['sampling_rate'],
                                           is_acceleration=True,
                                           is_vertical=True,
                                           is_north=False)
        elif horizontal_angle >= 0 and horizontal_angle <= 360:
            if (horizontal_angle > 315 or horizontal_angle < 45
                    or (horizontal_angle > 135 and horizontal_angle < 225)):
                channel = get_channel_name(hdr['sampling_rate'],
                                           is_acceleration=True,
                                           is_vertical=False,
                                           is_north=True)
            else:
                channel = get_channel_name(hdr['sampling_rate'],
                                           is_acceleration=True,
                                           is_vertical=False,
                                           is_north=False)
        horizontal_orientation = horizontal_angle
    else:
        errstr = ('Not enough information to distinguish horizontal from '
                  'vertical channels.')
        raise GMProcessException('COSMOS: ' + errstr)
    hdr['channel'] = channel
    logging.debug('channel: %s' % hdr['channel'])
    if location == '':
        location = int(int_data[55])
        location = str(_check_assign(location, unknown, '--'))
        if len(location) < 2:
            location = location.zfill(2)
        hdr['location'] = location
    else:
        hdr['location'] = location
    year = int(int_data[39])
    month = int(int_data[41])
    day = int(int_data[42])
    hour = int(int_data[43])
    minute = int(int_data[44])
    second = float(flt_data[29])
    # If anything more than seconds is excluded
    # It is considered inadequate time information
    if second == unknown:
        try:
            hdr['starttime'] = datetime(year, month, day, hour, minute)
        except Exception:
            raise GMProcessException(
                'COSMOS: Inadequate start time information.')
    else:
        second = second
        microsecond = int((second - int(second)) * 1e6)
        try:
            hdr['starttime'] = datetime(year, month, day, hour, minute,
                                        int(second), microsecond)
        except Exception:
            raise GMProcessException(
                'COSMOS: Inadequate start time information.')

    if flt_data[62] != unknown:
        # COSMOS **defines** "length" as npts*dt (note this is a bit unusual)
        cosmos_length = flt_data[62]
        npts = int(cosmos_length / delta)
        hdr['duration'] = (npts - 1) * delta
        hdr['npts'] = npts
    else:
        raise ValueError('COSMOS file does not specify length.')

    # coordinate information
    coordinates['latitude'] = float(flt_data[0])
    coordinates['longitude'] = float(flt_data[1])
    coordinates['elevation'] = float(flt_data[2])
    for key in coordinates:
        if coordinates[key] == unknown:
            warnings.warn('Missing %r. Setting to np.nan.' % key, Warning)
            coordinates[key] = np.nan
    hdr['coordinates'] = coordinates

    # standard metadata
    standard['source'] = source
    standard['horizontal_orientation'] = horizontal_orientation
    station_name = lines[4][40:-1].strip()
    standard['station_name'] = station_name
    instrument_frequency = float(flt_data[39])
    standard['instrument_period'] = 1.0 / _check_assign(
        instrument_frequency, unknown, np.nan)
    instrument_damping = float(flt_data[40])
    standard['instrument_damping'] = _check_assign(instrument_damping, unknown,
                                                   np.nan)
    process_line = lines[10][10:40]
    if process_line.find('-') >= 0 or process_line.find('/') >= 0:
        if process_line.find('-') >= 0:
            delimeter = '-'
        elif process_line.find('/') >= 0:
            delimeter = '/'
        try:
            date = process_line.split(delimeter)
            month = int(date[0][-2:])
            day = int(date[1])
            year = int(date[2][:4])
            time = process_line.split(':')
            hour = int(time[0][-2:])
            minute = int(time[1])
            second = float(time[2][:2])
            microsecond = int((second - int(second)) * 1e6)
            etime = datetime(year, month, day, hour, minute, int(second),
                             microsecond)
            standard['process_time'] = etime.strftime(TIMEFMT)
        except Exception:
            standard['process_time'] = ''
    else:
        standard['process_time'] = ''
    process_level = int(int_data[0])
    if process_level == 0:
        standard['process_level'] = PROCESS_LEVELS['V0']
    elif process_level == 1:
        standard['process_level'] = PROCESS_LEVELS['V1']
    elif process_level == 2:
        standard['process_level'] = PROCESS_LEVELS['V2']
    elif process_level == 3:
        standard['process_level'] = PROCESS_LEVELS['V3']
    else:
        standard['process_level'] = PROCESS_LEVELS['V1']
    logging.debug("process_level: %s" % process_level)
    serial = int(int_data[52])
    if serial != unknown:
        standard['sensor_serial_number'] = str(
            _check_assign(serial, unknown, ''))
    else:
        standard['sensor_serial_number'] = ''
    instrument = int(int_data[51])
    if instrument != unknown and instrument in SENSOR_TYPES:
        standard['instrument'] = SENSOR_TYPES[instrument]
    else:
        standard['instrument'] = lines[6][57:-1].strip()
    structure_type = int(int_data[18])
    if structure_type != unknown and structure_type in BUILDING_TYPES:
        standard['structure_type'] = BUILDING_TYPES[structure_type]
    else:
        standard['structure_type'] = ''
    frequency = float(flt_data[25])
    standard['corner_frequency'] = _check_assign(frequency, unknown, np.nan)
    physical_parameter = int(int_data[2])
    units = int(int_data[1])
    if units != unknown and units in UNITS:
        standard['units'] = UNITS[units]
    else:
        if physical_parameter in [2, 4, 7, 10, 11, 12, 23]:
            standard['units'] = 'acc'
        elif physical_parameter in [5, 8, 24]:
            standard['units'] = 'vel'
        elif physical_parameter in [6, 9, 25]:
            standard['units'] = 'disp'
    standard['source_format'] = 'cosmos'
    standard['comments'] = ', '.join(cmt_data)

    # format specific metadata
    if physical_parameter in PHYSICAL_UNITS:
        physical_parameter = PHYSICAL_UNITS[physical_parameter][0]
    format_specific['physical_units'] = physical_parameter
    v30 = float(flt_data[3])
    format_specific['v30'] = _check_assign(v30, unknown, np.nan)
    least_significant_bit = float(flt_data[21])
    format_specific['least_significant_bit'] = _check_assign(
        least_significant_bit, unknown, np.nan)
    low_filter_type = int(int_data[60])
    if low_filter_type in FILTERS:
        format_specific['low_filter_type'] = FILTERS[low_filter_type]
    else:
        format_specific['low_filter_type'] = ''
    low_filter_corner = float(flt_data[53])
    format_specific['low_filter_corner'] = _check_assign(
        low_filter_corner, unknown, np.nan)
    low_filter_decay = float(flt_data[54])
    format_specific['low_filter_decay'] = _check_assign(
        low_filter_decay, unknown, np.nan)
    high_filter_type = int(int_data[61])
    if high_filter_type in FILTERS:
        format_specific['high_filter_type'] = FILTERS[high_filter_type]
    else:
        format_specific['high_filter_type'] = ''
    high_filter_corner = float(flt_data[56])
    format_specific['high_filter_corner'] = _check_assign(
        high_filter_corner, unknown, np.nan)
    high_filter_decay = float(flt_data[57])
    format_specific['high_filter_decay'] = _check_assign(
        high_filter_decay, unknown, np.nan)
    maximum = float(flt_data[63])
    format_specific['maximum'] = _check_assign(maximum, unknown, np.nan)
    maximum_time = float(flt_data[64])
    format_specific['maximum_time'] = _check_assign(maximum_time, unknown,
                                                    np.nan)
    format_specific['station_code'] = _check_assign(structure_type, unknown,
                                                    np.nan)
    record_flag = int(int_data[75])
    if record_flag == 0:
        format_specific['record_flag'] = 'No problem'
    elif record_flag == 1:
        format_specific['record_flag'] = 'Fixed'
    elif record_flag == 2:
        format_specific['record_flag'] = 'Unfixed problem'
    else:
        format_specific['record_flag'] = ''
    scaling_factor = float(flt_data[87])
    format_specific['scaling_factor'] = _check_assign(scaling_factor, unknown,
                                                      np.nan)
    scaling_factor = float(flt_data[41])
    format_specific['sensor_sensitivity'] = _check_assign(
        scaling_factor, unknown, np.nan)
    # Set dictionary
    hdr['standard'] = standard
    hdr['coordinates'] = coordinates
    hdr['format_specific'] = format_specific
    return hdr
예제 #17
0
def _get_header_info(int_data, flt_data, lines, cmt_data, location=""):
    """Return stats structure from various headers.

    Output is a dictionary like this:
     - network (str): Default is '--'. Determined using COSMOS_NETWORKS
     - station (str)
     - channel (str): Determined using COSMOS_ORIENTATIONS
     - location (str): Set to location index of sensor site at station.
            If not a multi-site array, default is '--'.
     - starttime (datetime)
     - duration (float)
     - sampling_rate (float)
     - delta (float)
     - npts (int)
     - coordinates:
       - latitude (float)
       - longitude (float)
       - elevation (float)
    - standard (Defaults are either np.nan or '')
      - horizontal_orientation (float): Rotation from north (degrees)
      - instrument_period (float): Period of sensor (Hz)
      - instrument_damping (float): Fraction of critical
      - process_time (datetime): Reported date of processing
      - process_level: Either 'V0', 'V1', 'V2', or 'V3'
      - station_name (str): Long form station description
      - sensor_serial_number (str): Reported sensor serial
      - instrument (str): See SENSOR_TYPES
      - comments (str): Processing comments
      - structure_type (str): See BUILDING_TYPES
      - corner_frequency (float): Sensor corner frequency (Hz)
      - units (str): See UNITS
      - source (str): Network source description
      - source_format (str): Always cosmos
    - format_specific
      - physical_units (str): See PHYSICAL_UNITS
      - v30 (float): Site geology V30 (km/s)
      - least_significant_bit: Recorder LSB in micro-volts (uv/count)
      - low_filter_type (str): Filter used for low frequency
            V2 filtering (see FILTERS)
      - low_filter_corner (float): Filter corner for low frequency
            V2 filtering (Hz)
      - low_filter_decay (float): Filter decay for low frequency
            V2 filtering (dB/octabe)
      - high_filter_type (str): Filter used for high frequency
            V2 filtering (see FILTERS)
      - high_filter_corner (float): Filter corner for high frequency
            V2 filtering (Hz)
      - high_filter_decay (float): Filter decay for high frequency
            V2 filtering (dB/octabe)
      - maximum (float): Maximum value
      - maximum_time (float): Time at which maximum occurs
      - station_code (int): Code for structure_type
      - record_flag (str): Either 'No problem', 'Fixed', 'Unfixed problem'.
            Should be described in more depth in comments.
      - scaling_factor (float): Scaling used for converting acceleration
            from g/10 to cm/s/s
      - sensor_sensitivity (float): Sensitvity in volts/g

    Args:
        int_data (ndarray):
            Array of integer data.
        flt_data (ndarray):
            Array of float data.
        lines (list):
            List of text headers (str).
        cmt_data (ndarray):
            Array of comments (str).

    Returns:
        dictionary: Dictionary of header/metadata information
    """
    hdr = {}
    coordinates = {}
    standard = {}
    format_specific = {}
    # Get unknown parameter number
    try:
        unknown = int(lines[12][64:71])
    except ValueError:
        unknown = -999
    # required metadata
    network_num = int(int_data[10])
    # Get network from cosmos table or fdsn code sheet
    if network_num in COSMOS_NETWORKS:
        network = COSMOS_NETWORKS[network_num][0]
        source = COSMOS_NETWORKS[network_num][1]
        if network == "":
            network = COSMOS_NETWORKS[network_num][2]
    else:
        network_code = lines[4][25:27].upper()
        if network_code in CODES:
            network = network_code
            idx = np.argwhere(CODES == network_code)[0][0]
            source = (
                SOURCES1[idx].decode("utf-8") + ", " + SOURCES2[idx].decode("utf-8")
            )
        else:
            network = "--"
            source = ""
    hdr["network"] = network
    logging.debug(f"network: {network}")
    hdr["station"] = lines[4][28:34].strip()
    logging.debug(f"station: {hdr['station']}")

    # the channel orientation can be either relative to true north (idx 53)
    # or relative to sensor orientation (idx 54).
    horizontal_angle = int(int_data[53])
    logging.debug(f"horizontal_angle: {horizontal_angle}")
    if horizontal_angle not in VALID_AZIMUTH_INTS:
        angles = np.array(int_data[19:21]).astype(np.float32)
        angles[angles == unknown] = np.nan
        if np.isnan(angles).all():
            logging.warning("Horizontal_angle in COSMOS header is not valid.")
        else:
            ref = angles[~np.isnan(angles)][0]
            horizontal_angle = int(int_data[54])
            if horizontal_angle not in VALID_AZIMUTH_INTS:
                logging.warning("Horizontal_angle in COSMOS header is not valid.")
            else:
                horizontal_angle += ref
                if horizontal_angle > 360:
                    horizontal_angle -= 360

    horizontal_angle = float(horizontal_angle)

    # Store delta and duration. Use them to calculate npts and sampling_rate

    # NOTE: flt_data[33] is the delta of the V0 format, and if we are reading
    # a V1 or V2 format then it may have been resampled. We should consider
    # adding flt_data[33] delta to the provenance record at some point.

    delta = float(flt_data[61]) * MSEC_TO_SEC
    if delta != unknown:
        hdr["delta"] = delta
        hdr["sampling_rate"] = 1 / delta

    # Determine the angle based upon the cosmos table
    # Set horizontal angles other than N,S,E,W to H1 and H2
    # Missing angle results in the channel number
    if horizontal_angle != unknown:
        if horizontal_angle in COSMOS_ORIENTATIONS:
            channel = COSMOS_ORIENTATIONS[horizontal_angle][1].upper()
            if channel == "UP" or channel == "DOWN" or channel == "VERT":
                channel = get_channel_name(
                    hdr["sampling_rate"],
                    is_acceleration=True,
                    is_vertical=True,
                    is_north=False,
                )
                horizontal_angle = 360.0
            elif channel == "RADL" or channel == "LONG" or channel == "H1":
                channel = get_channel_name(
                    hdr["sampling_rate"],
                    is_acceleration=True,
                    is_vertical=False,
                    is_north=True,
                )
                horizontal_angle = 0.0
            elif channel == "TRAN" or channel == "TANG" or channel == "H2":
                channel = get_channel_name(
                    hdr["sampling_rate"],
                    is_acceleration=True,
                    is_vertical=False,
                    is_north=False,
                )
                horizontal_angle = 90.0
            else:  # For the occassional 'OTHR' channel
                raise ValueError("Channel name is not valid.")

        elif horizontal_angle >= 0 and horizontal_angle <= 360:
            if (
                horizontal_angle > 315
                or horizontal_angle < 45
                or (horizontal_angle > 135 and horizontal_angle < 225)
            ):
                channel = get_channel_name(
                    hdr["sampling_rate"],
                    is_acceleration=True,
                    is_vertical=False,
                    is_north=True,
                )
            else:
                channel = get_channel_name(
                    hdr["sampling_rate"],
                    is_acceleration=True,
                    is_vertical=False,
                    is_north=False,
                )
        horizontal_orientation = horizontal_angle
    else:
        errstr = (
            "Not enough information to distinguish horizontal from "
            "vertical channels."
        )
        raise BaseException("COSMOS: " + errstr)
    hdr["channel"] = channel
    logging.debug(f"channel: {hdr['channel']}")
    if location == "":
        location = int(int_data[55])
        location = str(_check_assign(location, unknown, "--"))
        if len(location) < 2:
            location = location.zfill(2)
        hdr["location"] = location
    else:
        hdr["location"] = location
    year = int(int_data[39])
    month = int(int_data[41])
    day = int(int_data[42])
    hour = int(int_data[43])
    minute = int(int_data[44])
    second = float(flt_data[29])
    # If anything more than seconds is excluded
    # It is considered inadequate time information
    if second == unknown:
        try:
            hdr["starttime"] = datetime(year, month, day, hour, minute)
        except BaseException:
            raise BaseException("COSMOS: Inadequate start time information.")
    else:
        second = second
        microsecond = int((second - int(second)) * 1e6)
        try:
            hdr["starttime"] = datetime(
                year, month, day, hour, minute, int(second), microsecond
            )
        except BaseException:
            raise BaseException("COSMOS: Inadequate start time information.")

    if flt_data[62] != unknown:
        # COSMOS **defines** "length" as npts*dt (note this is a bit unusual)
        cosmos_length = flt_data[62]
        npts = int(cosmos_length / delta)
        hdr["duration"] = (npts - 1) * delta
        hdr["npts"] = npts
    else:
        raise ValueError("COSMOS file does not specify length.")

    # coordinate information
    coordinates["latitude"] = float(flt_data[0])
    coordinates["longitude"] = float(flt_data[1])
    coordinates["elevation"] = float(flt_data[2])
    for key in coordinates:
        if coordinates[key] == unknown:
            if key != "elevation":
                logging.warning(f"Missing {key!r}. Setting to np.nan.", Warning)
                coordinates[key] = np.nan
            else:
                logging.warning(f"Missing {key!r}. Setting to 0.0.", Warning)
                coordinates[key] = 0.0

    hdr["coordinates"] = coordinates

    # standard metadata
    standard["units_type"] = get_units_type(channel)
    standard["source"] = source
    standard["horizontal_orientation"] = horizontal_orientation
    standard["vertical_orientation"] = np.nan
    station_name = lines[4][40:-1].strip()
    standard["station_name"] = station_name
    instrument_frequency = float(flt_data[39])
    if instrument_frequency == 0:
        standard["instrument_period"] = np.nan
        logging.warning("Instrument Frequency == 0")
    else:
        inst_freq = _check_assign(instrument_frequency, unknown, np.nan)
        standard["instrument_period"] = 1.0 / inst_freq
    instrument_damping = float(flt_data[40])
    standard["instrument_damping"] = _check_assign(instrument_damping, unknown, np.nan)
    process_line = lines[10][10:40]
    if process_line.find("-") >= 0 or process_line.find("/") >= 0:
        if process_line.find("-") >= 0:
            delimeter = "-"
        elif process_line.find("/") >= 0:
            delimeter = "/"
        try:
            date = process_line.split(delimeter)
            month = int(date[0][-2:])
            day = int(date[1])
            year = int(date[2][:4])
            time = process_line.split(":")
            hour = int(time[0][-2:])
            minute = int(time[1])
            second = float(time[2][:2])
            microsecond = int((second - int(second)) * 1e6)
            etime = datetime(year, month, day, hour, minute, int(second), microsecond)
            standard["process_time"] = etime.strftime(TIMEFMT)
        except BaseException:
            standard["process_time"] = ""
    else:
        standard["process_time"] = ""
    process_level = int(int_data[0])
    if process_level == 0:
        standard["process_level"] = PROCESS_LEVELS["V0"]
    elif process_level == 1:
        standard["process_level"] = PROCESS_LEVELS["V1"]
    elif process_level == 2:
        standard["process_level"] = PROCESS_LEVELS["V2"]
    elif process_level == 3:
        standard["process_level"] = PROCESS_LEVELS["V3"]
    else:
        standard["process_level"] = PROCESS_LEVELS["V1"]
    logging.debug(f"process_level: {process_level}")
    serial = int(int_data[52])
    if serial != unknown:
        standard["sensor_serial_number"] = str(_check_assign(serial, unknown, ""))
    else:
        standard["sensor_serial_number"] = ""
    instrument = int(int_data[51])
    if instrument != unknown and instrument in SENSOR_TYPES:
        standard["instrument"] = SENSOR_TYPES[instrument]
    else:
        standard["instrument"] = lines[6][57:-1].strip()
    structure_type = int(int_data[18])
    if structure_type != unknown and structure_type in BUILDING_TYPES:
        standard["structure_type"] = BUILDING_TYPES[structure_type]
    else:
        standard["structure_type"] = ""
    frequency = float(flt_data[25])
    standard["corner_frequency"] = _check_assign(frequency, unknown, np.nan)
    physical_parameter = int(int_data[2])
    units = int(int_data[1])
    if units != unknown and units in UNITS:
        standard["units_type"] = UNITS[units]
    else:
        if physical_parameter in [2, 4, 7, 10, 11, 12, 23]:
            standard["units_type"] = "acc"
        elif physical_parameter in [5, 8, 24]:
            standard["units_type"] = "vel"
        elif physical_parameter in [6, 9, 25]:
            standard["units_type"] = "disp"
    standard["source_format"] = "cosmos"
    standard["comments"] = ", ".join(cmt_data)

    # format specific metadata
    if physical_parameter in PHYSICAL_UNITS:
        physical_parameter = PHYSICAL_UNITS[physical_parameter][0]
    format_specific["physical_units"] = physical_parameter
    v30 = float(flt_data[3])
    format_specific["v30"] = _check_assign(v30, unknown, np.nan)
    least_significant_bit = float(flt_data[21])
    format_specific["least_significant_bit"] = _check_assign(
        least_significant_bit, unknown, np.nan
    )
    gain = float(flt_data[46])
    format_specific["gain"] = _check_assign(gain, unknown, np.nan)
    low_filter_type = int(int_data[60])
    if low_filter_type in FILTERS:
        format_specific["low_filter_type"] = FILTERS[low_filter_type]
    else:
        format_specific["low_filter_type"] = ""
    low_filter_corner = float(flt_data[53])
    format_specific["low_filter_corner"] = _check_assign(
        low_filter_corner, unknown, np.nan
    )
    low_filter_decay = float(flt_data[54])
    format_specific["low_filter_decay"] = _check_assign(
        low_filter_decay, unknown, np.nan
    )
    high_filter_type = int(int_data[61])
    if high_filter_type in FILTERS:
        format_specific["high_filter_type"] = FILTERS[high_filter_type]
    else:
        format_specific["high_filter_type"] = ""
    high_filter_corner = float(flt_data[56])
    format_specific["high_filter_corner"] = _check_assign(
        high_filter_corner, unknown, np.nan
    )
    high_filter_decay = float(flt_data[57])
    format_specific["high_filter_decay"] = _check_assign(
        high_filter_decay, unknown, np.nan
    )
    maximum = float(flt_data[63])
    format_specific["maximum"] = _check_assign(maximum, unknown, np.nan)
    maximum_time = float(flt_data[64])
    format_specific["maximum_time"] = _check_assign(maximum_time, unknown, np.nan)
    format_specific["station_code"] = _check_assign(structure_type, unknown, np.nan)
    record_flag = int(int_data[75])
    if record_flag == 0:
        format_specific["record_flag"] = "No problem"
    elif record_flag == 1:
        format_specific["record_flag"] = "Fixed"
    elif record_flag == 2:
        format_specific["record_flag"] = "Unfixed problem"
    else:
        format_specific["record_flag"] = ""

    scaling_factor = float(flt_data[87])
    format_specific["scaling_factor"] = _check_assign(scaling_factor, unknown, np.nan)
    sensor_sensitivity = float(flt_data[41])
    format_specific["sensor_sensitivity"] = _check_assign(
        sensor_sensitivity, unknown, np.nan
    )

    # for V0 files, set a standard field called instrument_sensitivity
    ctov = least_significant_bit / MICRO_TO_VOLT
    vtog = 1 / format_specific["sensor_sensitivity"]
    if not np.isnan(format_specific["gain"]):
        gain = format_specific["gain"]
    else:
        gain = 1.0
    if gain == 0:
        fmt = "%s.%s.%s.%s"
        tpl = (hdr["network"], hdr["station"], hdr["channel"], hdr["location"])
        nscl = fmt % tpl
        raise ValueError(f"Gain of 0 discovered for NSCL: {nscl}")
    denom = ctov * vtog * (1.0 / gain) * sp.g
    standard["instrument_sensitivity"] = 1 / denom
    standard["volts_to_counts"] = ctov

    # Set dictionary
    hdr["standard"] = standard
    hdr["coordinates"] = coordinates
    hdr["format_specific"] = format_specific
    return hdr
예제 #18
0
def _read_header(hdr_data, station, name, component, data_format, instrument,
                 resolution):
    """Construct stats dictionary from header lines.

    Args:
        hdr_data (ndarray):
            (10,10) numpy array containing header data.
        station (str):
            Station code obtained from previous text portion of header.
        location (str):
            Location string obtained from previous text portion of header.
        component (str):
            Component direction (N18E, S72W, etc.)

    Returns:
        Dictionary containing fields:
            - network "NZ"
            - station
            - channel H1,H2,or Z.
            - location
            - sampling_rate Samples per second.
            - delta Interval between samples (seconds)
            - calib Calibration factor (always 1.0)
            - npts Number of samples in record.
            - starttime Datetime object containing start of record.
            - standard:
              - station_name
              - units "acc"
              - source 'New Zealand Institute of Geological and Nuclear
                Science'
              - horizontal_orientation
              - instrument_period
              - instrument_damping
              - processing_time
              - process_level
              - sensor_serial_number
              - instrument
              - comments
              - structure_type
              - corner_frequency
              - source_format
            - coordinates:
              - lat Latitude of station.
              - lon Longitude of station.
              - elevation Elevation of station.
            - format_specific:
              - sensor_bit_resolution

    """
    hdr = {}
    standard = {}
    coordinates = {}
    format_specific = {}
    hdr["station"] = station
    standard["station_name"] = name

    # Note: Original sample interval (s): hdr_data[6, 4]

    # Sample inverval (s)
    hdr["delta"] = hdr_data[6, 5]
    hdr["sampling_rate"] = 1 / hdr["delta"]

    hdr["calib"] = 1.0
    if data_format == "V1":
        hdr["npts"] = int(hdr_data[3, 0])
    else:
        hdr["npts"] = int(hdr_data[3, 3])
    hdr["network"] = "NZ"
    standard["units_type"] = "acc"
    standard["units"] = "cm/s/s"
    standard[
        "source"] = "New Zealand Institute of Geological and Nuclear Science"
    logging.debug(f"component: {component}")
    standard["vertical_orientation"] = np.nan
    if component.lower() in ["up", "down"]:
        standard["horizontal_orientation"] = np.nan
        hdr["channel"] = get_channel_name(hdr["delta"],
                                          is_acceleration=True,
                                          is_vertical=True,
                                          is_north=False)
    else:
        angle = _get_channel(component)
        logging.debug(f"angle: {angle}")
        standard["horizontal_orientation"] = float(angle)
        if (angle > 315 or angle < 45) or (angle > 135 and angle < 225):
            hdr["channel"] = get_channel_name(hdr["delta"],
                                              is_acceleration=True,
                                              is_vertical=False,
                                              is_north=True)
        else:
            hdr["channel"] = get_channel_name(hdr["delta"],
                                              is_acceleration=True,
                                              is_vertical=False,
                                              is_north=False)

    logging.debug(f"channel: {hdr['channel']}")
    hdr["location"] = "--"

    # figure out the start time
    milliseconds = hdr_data[3, 9]
    seconds = int(milliseconds / 1000)
    microseconds = int(np.round(milliseconds / 1000.0 - seconds))
    year = int(hdr_data[0, 8])
    month = int(hdr_data[0, 9])
    day = int(hdr_data[1, 8])
    hour = int(hdr_data[1, 9])
    minute = int(hdr_data[3, 8])
    hdr["starttime"] = datetime(year, month, day, hour, minute, seconds,
                                microseconds)

    # figure out station coordinates
    latdg = hdr_data[2, 0]
    latmn = hdr_data[2, 1]
    latsc = hdr_data[2, 2]
    coordinates["latitude"] = _dms_to_dd(latdg, latmn, latsc) * -1
    londg = hdr_data[2, 3]
    lonmn = hdr_data[2, 4]
    lonsc = hdr_data[2, 5]
    coordinates["longitude"] = _dms_to_dd(londg, lonmn, lonsc)
    logging.warning("Setting elevation to 0.0")
    coordinates["elevation"] = 0.0

    # get other standard metadata
    standard["units_type"] = get_units_type(hdr["channel"])
    standard["instrument_period"] = 1 / hdr_data[4, 0]
    standard["instrument_damping"] = hdr_data[4, 1]
    standard["process_time"] = ""
    standard["process_level"] = PROCESS_LEVELS[data_format]
    logging.debug(f"process_level: {data_format}")
    standard["sensor_serial_number"] = ""
    standard["instrument"] = instrument
    standard["comments"] = ""
    standard["structure_type"] = ""
    standard["corner_frequency"] = np.nan
    standard["source_format"] = "geonet"

    # these fields can be used for instrument correction
    # when data is in counts
    standard["instrument_sensitivity"] = np.nan
    standard["volts_to_counts"] = np.nan

    # get format specific metadata
    format_specific["sensor_bit_resolution"] = resolution

    hdr["coordinates"] = coordinates
    hdr["standard"] = standard
    hdr["format_specific"] = format_specific

    return hdr
def _get_header_info(int_data, flt_data, lines, cmt_data, location=''):
    """Return stats structure from various headers.

    Output is a dictionary like this:
     - network (str): Default is '--'. Determined using COSMOS_NETWORKS
     - station (str)
     - channel (str): Determined using COSMOS_ORIENTATIONS
     - location (str): Set to location index of sensor site at station.
            If not a multi-site array, default is '--'.
     - starttime (datetime)
     - duration (float)
     - sampling_rate (float)
     - delta (float)
     - npts (int)
     - coordinates:
       - latitude (float)
       - longitude (float)
       - elevation (float)
    - standard (Defaults are either np.nan or '')
      - horizontal_orientation (float): Rotation from north (degrees)
      - instrument_period (float): Period of sensor (Hz)
      - instrument_damping (float): Fraction of critical
      - process_time (datetime): Reported date of processing
      - process_level: Either 'V0', 'V1', 'V2', or 'V3'
      - station_name (str): Long form station description
      - sensor_serial_number (str): Reported sensor serial
      - instrument (str): See SENSOR_TYPES
      - comments (str): Processing comments
      - structure_type (str): See BUILDING_TYPES
      - corner_frequency (float): Sensor corner frequency (Hz)
      - units (str): See UNITS
      - source (str): Network source description
      - source_format (str): Always cosmos
    - format_specific
      - physical_units (str): See PHYSICAL_UNITS
      - v30 (float): Site geology V30 (km/s)
      - least_significant_bit: Recorder LSB in micro-volts (uv/count)
      - low_filter_type (str): Filter used for low frequency
            V2 filtering (see FILTERS)
      - low_filter_corner (float): Filter corner for low frequency
            V2 filtering (Hz)
      - low_filter_decay (float): Filter decay for low frequency
            V2 filtering (dB/octabe)
      - high_filter_type (str): Filter used for high frequency
            V2 filtering (see FILTERS)
      - high_filter_corner (float): Filter corner for high frequency
            V2 filtering (Hz)
      - high_filter_decay (float): Filter decay for high frequency
            V2 filtering (dB/octabe)
      - maximum (float): Maximum value
      - maximum_time (float): Time at which maximum occurs
      - station_code (int): Code for structure_type
      - record_flag (str): Either 'No problem', 'Fixed', 'Unfixed problem'.
            Should be described in more depth in comments.
      - scaling_factor (float): Scaling used for converting acceleration
            from g/10 to cm/s/s
      - sensor_sensitivity (float): Sensitvity in volts/g

    Args:
        int_data (ndarray): Array of integer data
        flt_data (ndarray): Array of float data
        lines (list): List of text headers (str)
        cmt_data (ndarray): Array of comments (str)

    Returns:
        dictionary: Dictionary of header/metadata information
    """
    hdr = {}
    coordinates = {}
    standard = {}
    format_specific = {}
    # Get unknown parameter number
    try:
        unknown = int(lines[12][64:71])
    except ValueError:
        unknown = -999
    # required metadata
    network_num = int(int_data[10])
    # Get network from cosmos table or fdsn code sheet
    if network_num in COSMOS_NETWORKS:
        network = COSMOS_NETWORKS[network_num][0]
        source = COSMOS_NETWORKS[network_num][1]
        if network == '':
            network = COSMOS_NETWORKS[network_num][2]
    else:
        network_code = lines[4][25:27].upper()
        if network_code in CODES:
            network = network_code
            idx = np.argwhere(CODES == network_code)[0][0]
            source = SOURCES1[idx].decode(
                'utf-8') + ', ' + SOURCES2[idx].decode('utf-8')
        else:
            network = 'ZZ'
            source = ''
    hdr['network'] = network
    logging.debug('network: %s' % network)
    hdr['station'] = lines[4][28:34].strip()
    logging.debug('station: %s' % hdr['station'])
    horizontal_angle = int(int_data[53])
    logging.debug('horizontal_angle: %s' % horizontal_angle)
    if horizontal_angle not in VALID_AZIMUTH_INTS:
        logging.warning("Horizontal_angle in COSMOS header is not valid.")
    horizontal_angle = float(horizontal_angle)

    # Store delta and duration. Use them to calculate npts and sampling_rate

    # NOTE: flt_data[33] is the delta of the V0 format, and if we are reading
    # a V1 or V2 format then it may have been resampled. We should consider
    # adding flt_data[33] delta to the provenance record at some point.

    delta = float(flt_data[61]) * MSEC_TO_SEC
    if delta != unknown:
        hdr['delta'] = delta
        hdr['sampling_rate'] = 1 / delta

    # Determine the angle based upon the cosmos table
    # Set horizontal angles other than N,S,E,W to H1 and H2
    # Missing angle results in the channel number
    if horizontal_angle != unknown:
        if horizontal_angle in COSMOS_ORIENTATIONS:
            channel = COSMOS_ORIENTATIONS[horizontal_angle][1].upper()
            if channel == 'UP' or channel == 'DOWN' or channel == 'VERT':
                channel = get_channel_name(
                    hdr['sampling_rate'],
                    is_acceleration=True,
                    is_vertical=True,
                    is_north=False)
        elif horizontal_angle >= 0 and horizontal_angle <= 360:
            if (
                horizontal_angle > 315
                or horizontal_angle < 45
                or (horizontal_angle > 135 and horizontal_angle < 225)
            ):
                channel = get_channel_name(
                    hdr['sampling_rate'],
                    is_acceleration=True,
                    is_vertical=False,
                    is_north=True)
            else:
                channel = get_channel_name(
                    hdr['sampling_rate'],
                    is_acceleration=True,
                    is_vertical=False,
                    is_north=False)
        horizontal_orientation = horizontal_angle
    else:
        errstr = ('Not enough information to distinguish horizontal from '
                  'vertical channels.')
        raise GMProcessException('COSMOS: ' + errstr)
    hdr['channel'] = channel
    logging.debug('channel: %s' % hdr['channel'])
    if location == '':
        location = int(int_data[55])
        location = str(_check_assign(location, unknown, '--'))
        if len(location) < 2:
            location = location.zfill(2)
        hdr['location'] = location
    else:
        hdr['location'] = location
    year = int(int_data[39])
    month = int(int_data[41])
    day = int(int_data[42])
    hour = int(int_data[43])
    minute = int(int_data[44])
    second = float(flt_data[29])
    # If anything more than seconds is excluded
    # It is considered inadequate time information
    if second == unknown:
        try:
            hdr['starttime'] = datetime(
                year, month, day, hour, minute)
        except Exception:
            raise GMProcessException(
                'COSMOS: Inadequate start time information.')
    else:
        second = second
        microsecond = int((second - int(second)) * 1e6)
        try:
            hdr['starttime'] = datetime(
                year, month, day, hour, minute, int(second), microsecond)
        except Exception:
            raise GMProcessException(
                'COSMOS: Inadequate start time information.')

    if flt_data[62] != unknown:
        # COSMOS **defines** "length" as npts*dt (note this is a bit unusual)
        cosmos_length = flt_data[62]
        npts = int(cosmos_length / delta)
        hdr['duration'] = (npts - 1) * delta
        hdr['npts'] = npts
    else:
        raise ValueError('COSMOS file does not specify length.')

    # coordinate information
    coordinates['latitude'] = float(flt_data[0])
    coordinates['longitude'] = float(flt_data[1])
    coordinates['elevation'] = float(flt_data[2])
    for key in coordinates:
        if coordinates[key] == unknown:
            warnings.warn('Missing %r. Setting to np.nan.' % key, Warning)
            coordinates[key] = np.nan
    hdr['coordinates'] = coordinates

    # standard metadata
    standard['source'] = source
    standard['horizontal_orientation'] = horizontal_orientation
    station_name = lines[4][40:-1].strip()
    standard['station_name'] = station_name
    instrument_frequency = float(flt_data[39])
    standard['instrument_period'] = 1.0 / _check_assign(instrument_frequency,
                                                        unknown, np.nan)
    instrument_damping = float(flt_data[40])
    standard['instrument_damping'] = _check_assign(instrument_damping,
                                                   unknown, np.nan)
    process_line = lines[10][10:40]
    if process_line.find('-') >= 0 or process_line.find('/') >= 0:
        if process_line.find('-') >= 0:
            delimeter = '-'
        elif process_line.find('/') >= 0:
            delimeter = '/'
        try:
            date = process_line.split(delimeter)
            month = int(date[0][-2:])
            day = int(date[1])
            year = int(date[2][:4])
            time = process_line.split(':')
            hour = int(time[0][-2:])
            minute = int(time[1])
            second = float(time[2][:2])
            microsecond = int((second - int(second)) * 1e6)
            etime = datetime(year, month, day, hour, minute,
                             int(second), microsecond)
            standard['process_time'] = etime.strftime(TIMEFMT)
        except Exception:
            standard['process_time'] = ''
    else:
        standard['process_time'] = ''
    process_level = int(int_data[0])
    if process_level == 0:
        standard['process_level'] = PROCESS_LEVELS['V0']
    elif process_level == 1:
        standard['process_level'] = PROCESS_LEVELS['V1']
    elif process_level == 2:
        standard['process_level'] = PROCESS_LEVELS['V2']
    elif process_level == 3:
        standard['process_level'] = PROCESS_LEVELS['V3']
    else:
        standard['process_level'] = PROCESS_LEVELS['V1']
    logging.debug("process_level: %s" % process_level)
    serial = int(int_data[52])
    if serial != unknown:
        standard['sensor_serial_number'] = str(_check_assign(
            serial, unknown, ''))
    else:
        standard['sensor_serial_number'] = ''
    instrument = int(int_data[51])
    if instrument != unknown and instrument in SENSOR_TYPES:
        standard['instrument'] = SENSOR_TYPES[instrument]
    else:
        standard['instrument'] = lines[6][57:-1].strip()
    structure_type = int(int_data[18])
    if structure_type != unknown and structure_type in BUILDING_TYPES:
        standard['structure_type'] = BUILDING_TYPES[structure_type]
    else:
        standard['structure_type'] = ''
    frequency = float(flt_data[25])
    standard['corner_frequency'] = _check_assign(frequency, unknown, np.nan)
    physical_parameter = int(int_data[2])
    units = int(int_data[1])
    if units != unknown and units in UNITS:
        standard['units'] = UNITS[units]
    else:
        if physical_parameter in [2, 4, 7, 10, 11, 12, 23]:
            standard['units'] = 'acc'
        elif physical_parameter in [5, 8, 24]:
            standard['units'] = 'vel'
        elif physical_parameter in [6, 9, 25]:
            standard['units'] = 'disp'
    standard['source_format'] = 'cosmos'
    standard['comments'] = ', '.join(cmt_data)

    # format specific metadata
    if physical_parameter in PHYSICAL_UNITS:
        physical_parameter = PHYSICAL_UNITS[physical_parameter][0]
    format_specific['physical_units'] = physical_parameter
    v30 = float(flt_data[3])
    format_specific['v30'] = _check_assign(v30, unknown, np.nan)
    least_significant_bit = float(flt_data[21])
    format_specific['least_significant_bit'] = _check_assign(
        least_significant_bit, unknown, np.nan)
    low_filter_type = int(int_data[60])
    if low_filter_type in FILTERS:
        format_specific['low_filter_type'] = FILTERS[low_filter_type]
    else:
        format_specific['low_filter_type'] = ''
    low_filter_corner = float(flt_data[53])
    format_specific['low_filter_corner'] = _check_assign(
        low_filter_corner, unknown, np.nan)
    low_filter_decay = float(flt_data[54])
    format_specific['low_filter_decay'] = _check_assign(
        low_filter_decay, unknown, np.nan)
    high_filter_type = int(int_data[61])
    if high_filter_type in FILTERS:
        format_specific['high_filter_type'] = FILTERS[high_filter_type]
    else:
        format_specific['high_filter_type'] = ''
    high_filter_corner = float(flt_data[56])
    format_specific['high_filter_corner'] = _check_assign(
        high_filter_corner, unknown, np.nan)
    high_filter_decay = float(flt_data[57])
    format_specific['high_filter_decay'] = _check_assign(
        high_filter_decay, unknown, np.nan)
    maximum = float(flt_data[63])
    format_specific['maximum'] = _check_assign(maximum, unknown, np.nan)
    maximum_time = float(flt_data[64])
    format_specific['maximum_time'] = _check_assign(
        maximum_time, unknown, np.nan)
    format_specific['station_code'] = _check_assign(
        structure_type, unknown, np.nan)
    record_flag = int(int_data[75])
    if record_flag == 0:
        format_specific['record_flag'] = 'No problem'
    elif record_flag == 1:
        format_specific['record_flag'] = 'Fixed'
    elif record_flag == 2:
        format_specific['record_flag'] = 'Unfixed problem'
    else:
        format_specific['record_flag'] = ''
    scaling_factor = float(flt_data[87])
    format_specific['scaling_factor'] = _check_assign(
        scaling_factor, unknown, np.nan)
    scaling_factor = float(flt_data[41])
    format_specific['sensor_sensitivity'] = _check_assign(
        scaling_factor, unknown, np.nan)
    # Set dictionary
    hdr['standard'] = standard
    hdr['coordinates'] = coordinates
    hdr['format_specific'] = format_specific
    return hdr
def read_knet(filename):
    """Read Japanese KNET strong motion file.

    Args:
        filename (str): Path to possible KNET data file.
        kwargs (ref): Other arguments will be ignored.
    Returns:
        Stream: Obspy Stream containing three channels of acceleration data
            (cm/s**2).
    """
    logging.debug("Starting read_knet.")
    if not is_knet(filename):
        raise Exception('%s is not a valid KNET file' % filename)

    # Parse the header portion of the file
    with open(filename, 'rt') as f:
        lines = [next(f) for x in range(TEXT_HDR_ROWS)]

    hdr = {}
    coordinates = {}
    standard = {}
    hdr['network'] = 'BO'
    hdr['station'] = lines[5].split()[2]
    logging.debug('station: %s' % hdr['station'])
    standard['station_name'] = ''

    # according to the powers that defined the Network.Station.Channel.Location
    # "standard", Location is a two character field.  Most data providers,
    # including KNET here, don't provide this.  We'll flag it as "--".
    hdr['location'] = '--'

    coordinates['latitude'] = float(lines[6].split()[2])
    coordinates['longitude'] = float(lines[7].split()[2])
    coordinates['elevation'] = float(lines[8].split()[2])

    hdr['sampling_rate'] = float(
        re.search('\\d+', lines[10].split()[2]).group())
    hdr['delta'] = 1 / hdr['sampling_rate']
    standard['units'] = 'acc'

    dir_string = lines[12].split()[1].strip()
    # knet files have directions listed as N-S, E-W, or U-D,
    # whereas in kiknet those directions are '4', '5', or '6'.
    if dir_string in ['N-S', '1', '4']:
        hdr['channel'] = get_channel_name(
            hdr['sampling_rate'],
            is_acceleration=True,
            is_vertical=False,
            is_north=True)
    elif dir_string in ['E-W', '2', '5']:
        hdr['channel'] = get_channel_name(
            hdr['sampling_rate'],
            is_acceleration=True,
            is_vertical=False,
            is_north=False)
    elif dir_string in ['U-D', '3', '6']:
        hdr['channel'] = get_channel_name(
            hdr['sampling_rate'],
            is_acceleration=True,
            is_vertical=True,
            is_north=False)
    else:
        raise Exception('KNET: Could not parse direction %s' %
                        lines[12].split()[1])

    logging.debug('channel: %s' % hdr['channel'])
    scalestr = lines[13].split()[2]
    parts = scalestr.split('/')
    num = float(parts[0].replace('(gal)', ''))
    den = float(parts[1])
    calib = num / den
    hdr['calib'] = calib

    duration = float(lines[11].split()[2])

    hdr['npts'] = int(duration * hdr['sampling_rate'])

    timestr = ' '.join(lines[9].split()[2:4])
    # The K-NET and KiK-Net data logger adds a 15s time delay
    # this is removed here
    sttime = datetime.strptime(timestr, TIMEFMT) - timedelta(seconds=15.0)
    # Shift the time to utc (Japanese time is 9 hours ahead)
    sttime = sttime - timedelta(seconds=9 * 3600.)
    hdr['starttime'] = sttime

    # read in the data - there is a max of 8 columns per line
    # the code below handles the case when last line has
    # less than 8 columns
    if hdr['npts'] % COLS_PER_LINE != 0:
        nrows = int(np.floor(hdr['npts'] / COLS_PER_LINE))
        nrows2 = 1
    else:
        nrows = int(np.ceil(hdr['npts'] / COLS_PER_LINE))
        nrows2 = 0
    data = np.genfromtxt(filename, skip_header=TEXT_HDR_ROWS,
                         max_rows=nrows, filling_values=np.nan)
    data = data.flatten()
    if nrows2:
        skip_header = TEXT_HDR_ROWS + nrows
        data2 = np.genfromtxt(filename, skip_header=skip_header,
                              max_rows=nrows2, filling_values=np.nan)
        data = np.hstack((data, data2))
        nrows += nrows2

    # apply the correction factor we're given in the header
    data *= calib

    # fill out the rest of the standard dictionary
    standard['horizontal_orientation'] = np.nan
    standard['instrument_period'] = np.nan
    standard['instrument_damping'] = np.nan
    standard['process_time'] = ''
    standard['process_level'] = PROCESS_LEVELS['V1']
    standard['sensor_serial_number'] = ''
    standard['instrument'] = ''
    standard['comments'] = ''
    standard['structure_type'] = ''
    if dir_string in ['1', '2', '3']:
        standard['structure_type'] = 'borehole'

    standard['corner_frequency'] = np.nan
    standard['units'] = 'acc'
    standard['source'] = SRC
    standard['source_format'] = 'knet'
    head, tail = os.path.split(filename)
    standard['source_file'] = tail or os.path.basename(head)

    hdr['coordinates'] = coordinates
    hdr['standard'] = standard

    # create a Trace from the data and metadata
    trace = StationTrace(data.copy(), Stats(hdr.copy()))
    response = {'input_units': 'counts', 'output_units': 'cm/s^2'}
    trace.setProvenance('remove_response', response)

    stream = StationStream(traces=[trace])
    return [stream]
예제 #21
0
def _read_header(lines, filename, table):
    header = {}
    standard = {}
    coords = {}
    format_specific = {}

    # fill out the standard dictionary
    standard['source'] = SOURCE
    standard['source_format'] = SOURCE_FORMAT
    standard['instrument'] = ''
    standard['sensor_serial_number'] = ''
    standard['process_level'] = PROCESS_LEVELS['V1']
    standard['process_time'] = lines[0].split(':')[1].strip()
    # station name line can look like this:
    # VI�A DEL MAR CENTRO S/N 675
    sparts = lines[5].split()
    station_name = ' '.join(sparts[0:sparts.index('S/N')])
    standard['station_name'] = station_name

    # this table gives us station coordinates and structure type
    station_row = table[table['Name'] == station_name]
    if not len(station_row):
        logging.warning('Unknown structure type.')
        standard['structure_type'] = ''
    else:
        row = station_row.iloc[0]
        standard['structure_type'] = row['Structure Type']
    standard['corner_frequency'] = np.nan
    standard['units'] = 'cm/s^2'
    standard['units_type'] = 'acc'

    inst_dict = {}
    for part in lines[9].split(','):
        key, value = part.split('=')
        fvalue_str = re.search(FLOATRE, value.strip()).group()
        inst_dict[key.strip()] = float(fvalue_str)

    standard['instrument_period'] = inst_dict['INSTR PERIOD']
    standard['instrument_damping'] = inst_dict['DAMPING']
    standard['horizontal_orientation'] = np.nan
    standard['comments'] = ' '.join(lines[11:13]).replace('\n', '')
    head, tail = os.path.split(filename)
    standard['source_file'] = tail or os.path.basename(head)

    # this field can be used for instrument correction
    # when data is in counts
    standard['instrument_sensitivity'] = inst_dict['SENSITIVITY']

    # fill out the stats stuff
    try:
        stimestr = re.search(TIME_RE, lines[3]).group()
    except AttributeError:
        try:
            stimestr = re.search(TIME_RE2, lines[3]).group()
        except AttributeError:
            logging.warning('Setting time to epoch.')
            stimestr = '01/01/1970 00:00:00.000'

    # 2/27/2010 2:45:46.000 GMT
    stime = datetime.strptime(stimestr, TIMEFMT)

    # it appears that sometimes the trigger time is set to Jan 1, 1980
    # by default.
    if stime.year == 1980 and stime.month == 1 and stime.day == 1:
        fmt = 'Trigger time set to %s in file %s'
        logging.warning(fmt % (str(stime), standard['source_file']))

    header['starttime'] = stime
    npts, duration = re.findall(FLOATRE, lines[10])
    npts = int(npts)
    duration = float(duration)
    header['npts'] = npts
    header['delta'] = duration / (npts - 1)
    header['sampling_rate'] = (npts - 1) / duration
    header['duration'] = duration
    raw_channel = lines[6][9:11].strip()
    if raw_channel in NORTH_CHANNELS:
        channel = get_channel_name(header['sampling_rate'], True, False, True)
    elif raw_channel in WEST_CHANNELS:
        channel = get_channel_name(header['sampling_rate'], True, False, False)
    elif raw_channel in VERTICAL_CHANNELS:
        channel = get_channel_name(header['sampling_rate'], True, True, False)
    else:
        raise KeyError('Channel name %s not defined' % raw_channel)

    header['channel'] = channel
    header['station'] = lines[5].split()[-1]
    header['location'] = '--'
    header['network'] = NETWORK

    # these files seem to have all zeros for station coordinates!
    if not len(station_row):
        logging.warning('Could not find station match for %s' % station_name)
        coordparts = lines[4].split()
        lat = float(re.search(FLOATRE, coordparts[2]).group())
        lon = float(re.search(FLOATRE, coordparts[3]).group())
        if lon == 0 or lat == 0:
            logging.warning('Latitude or Longitude values are 0')
        if 'S' in coordparts[2]:
            lat = -1 * lat
        if 'W' in coordparts[3]:
            lon = -1 * lon
    else:
        row = station_row.iloc[0]
        lat = row['Lat']
        lon = row['Lon']

    altitude = 0.0
    logging.warn('Setting elevation to 0.0')
    coords = {'latitude': lat,
              'longitude': lon,
              'elevation': altitude}

    header['coordinates'] = coords
    header['standard'] = standard
    header['format_specific'] = format_specific

    return header
예제 #22
0
def read_cwb(filename, **kwargs):
    """Read Taiwan Central Weather Bureau strong motion file.

    Args:
        filename (str): Path to possible CWB data file.
        kwargs (ref): Other arguments will be ignored.

    Returns:
        Stream: Obspy Stream containing three channels of acceleration
        data (cm/s**2).
    """
    logging.debug("Starting read_cwb.")
    if not is_cwb(filename):
        raise Exception('%s is not a valid CWB strong motion data file.' %
                        filename)
    f = open(filename, 'rt')
    # according to the powers that defined the Network.Station.Channel.Location
    # "standard", Location is a two character field.  Most data providers,
    # including CWB here, don't provide this.  We'll flag it as "--".
    data = np.genfromtxt(filename,
                         skip_header=HDR_ROWS,
                         delimiter=[COLWIDTH] * NCOLS)  # time, Z, NS, EW

    hdr = _get_header_info(f, data)
    f.close()

    head, tail = os.path.split(filename)
    hdr['standard']['source_file'] = tail or os.path.basename(head)

    hdr_z = hdr.copy()
    hdr_z['channel'] = get_channel_name(hdr['sampling_rate'],
                                        is_acceleration=True,
                                        is_vertical=True,
                                        is_north=False)
    hdr_z['standard']['horizontal_orientation'] = np.nan
    hdr_z['standard']['vertical_orientation'] = np.nan
    hdr_z['standard']['units_type'] = get_units_type(hdr_z['channel'])

    hdr_h1 = hdr.copy()
    hdr_h1['channel'] = get_channel_name(hdr['sampling_rate'],
                                         is_acceleration=True,
                                         is_vertical=False,
                                         is_north=True)
    hdr_h1['standard']['horizontal_orientation'] = np.nan
    hdr_h1['standard']['vertical_orientation'] = np.nan
    hdr_h1['standard']['units_type'] = get_units_type(hdr_h1['channel'])

    hdr_h2 = hdr.copy()
    hdr_h2['channel'] = get_channel_name(hdr['sampling_rate'],
                                         is_acceleration=True,
                                         is_vertical=False,
                                         is_north=False)
    hdr_h2['standard']['horizontal_orientation'] = np.nan
    hdr_h2['standard']['vertical_orientation'] = np.nan
    hdr_h2['standard']['units_type'] = get_units_type(hdr_h2['channel'])

    stats_z = Stats(hdr_z)
    stats_h1 = Stats(hdr_h1)
    stats_h2 = Stats(hdr_h2)

    response = {'input_units': 'counts', 'output_units': 'cm/s^2'}
    trace_z = StationTrace(data=data[:, 1], header=stats_z)
    trace_z.setProvenance('remove_response', response)

    trace_h1 = StationTrace(data=data[:, 2], header=stats_h1)
    trace_h1.setProvenance('remove_response', response)

    trace_h2 = StationTrace(data=data[:, 3], header=stats_h2)
    trace_h2.setProvenance('remove_response', response)

    stream = StationStream([trace_z, trace_h1, trace_h2])
    return [stream]
예제 #23
0
def read_knet(filename):
    """Read Japanese KNET strong motion file.

    Args:
        filename (str): Path to possible KNET data file.
        kwargs (ref): Other arguments will be ignored.
    Returns:
        Stream: Obspy Stream containing three channels of acceleration data
            (cm/s**2).
    """
    logging.debug("Starting read_knet.")
    if not is_knet(filename):
        raise Exception('%s is not a valid KNET file' % filename)

    # Parse the header portion of the file
    with open(filename, 'rt') as f:
        lines = [next(f) for x in range(TEXT_HDR_ROWS)]

    hdr = {}
    coordinates = {}
    standard = {}
    hdr['network'] = 'BO'
    hdr['station'] = lines[5].split()[2]
    logging.debug('station: %s' % hdr['station'])
    standard['station_name'] = ''

    # according to the powers that defined the Network.Station.Channel.Location
    # "standard", Location is a two character field.  Most data providers,
    # including KNET here, don't provide this.  We'll flag it as "--".
    hdr['location'] = '--'

    coordinates['latitude'] = float(lines[6].split()[2])
    coordinates['longitude'] = float(lines[7].split()[2])
    coordinates['elevation'] = float(lines[8].split()[2])

    hdr['sampling_rate'] = float(
        re.search('\\d+', lines[10].split()[2]).group())
    hdr['delta'] = 1 / hdr['sampling_rate']
    standard['units'] = 'acc'

    dir_string = lines[12].split()[1].strip()
    # knet files have directions listed as N-S, E-W, or U-D,
    # whereas in kiknet those directions are '4', '5', or '6'.
    if dir_string in ['N-S', '1', '4']:
        hdr['channel'] = get_channel_name(
            hdr['sampling_rate'],
            is_acceleration=True,
            is_vertical=False,
            is_north=True)
    elif dir_string in ['E-W', '2', '5']:
        hdr['channel'] = get_channel_name(
            hdr['sampling_rate'],
            is_acceleration=True,
            is_vertical=False,
            is_north=False)
    elif dir_string in ['U-D', '3', '6']:
        hdr['channel'] = get_channel_name(
            hdr['sampling_rate'],
            is_acceleration=True,
            is_vertical=True,
            is_north=False)
    else:
        raise Exception('KNET: Could not parse direction %s' %
                        lines[12].split()[1])

    logging.debug('channel: %s' % hdr['channel'])
    scalestr = lines[13].split()[2]
    parts = scalestr.split('/')
    num = float(parts[0].replace('(gal)', ''))
    den = float(parts[1])
    calib = num / den
    hdr['calib'] = calib

    duration = float(lines[11].split()[2])

    hdr['npts'] = int(duration * hdr['sampling_rate'])

    timestr = ' '.join(lines[9].split()[2:4])
    # The K-NET and KiK-Net data logger adds a 15s time delay
    # this is removed here
    sttime = datetime.strptime(timestr, TIMEFMT) - timedelta(seconds=15.0)
    # Shift the time to utc (Japanese time is 9 hours ahead)
    sttime = sttime - timedelta(seconds=9 * 3600.)
    hdr['starttime'] = sttime

    # read in the data - there is a max of 8 columns per line
    # the code below handles the case when last line has
    # less than 8 columns
    if hdr['npts'] % COLS_PER_LINE != 0:
        nrows = int(np.floor(hdr['npts'] / COLS_PER_LINE))
        nrows2 = 1
    else:
        nrows = int(np.ceil(hdr['npts'] / COLS_PER_LINE))
        nrows2 = 0
    data = np.genfromtxt(filename, skip_header=TEXT_HDR_ROWS,
                         max_rows=nrows, filling_values=np.nan)
    data = data.flatten()
    if nrows2:
        skip_header = TEXT_HDR_ROWS + nrows
        data2 = np.genfromtxt(filename, skip_header=skip_header,
                              max_rows=nrows2, filling_values=np.nan)
        data = np.hstack((data, data2))
        nrows += nrows2

    # apply the correction factor we're given in the header
    data *= calib

    # fill out the rest of the standard dictionary
    standard['horizontal_orientation'] = np.nan
    standard['instrument_period'] = np.nan
    standard['instrument_damping'] = np.nan
    standard['process_time'] = ''
    standard['process_level'] = PROCESS_LEVELS['V1']
    standard['sensor_serial_number'] = ''
    standard['instrument'] = ''
    standard['comments'] = ''
    standard['structure_type'] = ''
    if dir_string in ['1', '2', '3']:
        standard['structure_type'] = 'borehole'

    standard['corner_frequency'] = np.nan
    standard['units'] = 'acc'
    standard['source'] = SRC
    standard['source_format'] = 'knet'

    hdr['coordinates'] = coordinates
    hdr['standard'] = standard

    # create a Trace from the data and metadata
    trace = StationTrace(data.copy(), Stats(hdr.copy()))
    response = {'input_units': 'counts', 'output_units': 'cm/s^2'}
    trace.setProvenance('remove_response', response)

    stream = StationStream(traces=[trace])
    return [stream]
예제 #24
0
def _get_header_info(int_data, flt_data, lines, volume, location=''):
    """Return stats structure from various headers.

    Output is a dictionary like this:
     - network (str): 'LA'
     - station (str)
     - channel (str): Determined using COSMOS_ORIENTATIONS
     - location (str): Default is '--'
     - starttime (datetime)
     - duration (float)
     - sampling_rate (float)
     - npts (int)
     - coordinates:
       - latitude (float)
       - longitude (float)
       - elevation (float)
    - standard (Defaults are either np.nan or '')
      - horizontal_orientation (float): Rotation from north (degrees)
      - instrument_period (float): Period of sensor (Hz)
      - instrument_damping (float): Fraction of critical
      - process_time (datetime): Reported date of processing
      - process_level: Either 'V0', 'V1', 'V2', or 'V3'
      - station_name (str): Long form station description
      - sensor_serial_number (str): Reported sensor serial
      - instrument (str): See SENSOR_TYPES
      - comments (str): Processing comments
      - structure_type (str): See BUILDING_TYPES
      - corner_frequency (float): Sensor corner frequency (Hz)
      - units (str): See UNITS
      - source (str): Network source description
      - source_format (str): Always cosmos
    - format_specific
      - fractional_unit (float): Units of digitized acceleration
            in file (fractions of g)

    Args:
        int_data (ndarray): Array of integer data
        flt_data (ndarray): Array of float data
        lines (list): List of text headers (str)

    Returns:
        dictionary: Dictionary of header/metadata information
    """
    hdr = {}
    coordinates = {}
    standard = {}
    format_specific = {}
    if volume == 'V1':
        hdr['duration'] = flt_data[2]
        hdr['npts'] = int_data[27]
        hdr['sampling_rate'] = (hdr['npts'] - 1) / hdr['duration']

        # Get required parameter number
        hdr['network'] = 'LA'
        hdr['station'] = str(int_data[8])
        logging.debug('station: %s' % hdr['station'])
        horizontal_angle = int_data[26]
        logging.debug('horizontal: %s' % horizontal_angle)
        if (horizontal_angle in USC_ORIENTATIONS
                or (horizontal_angle >= 0 and horizontal_angle <= 360)):
            if horizontal_angle in USC_ORIENTATIONS:
                channel = USC_ORIENTATIONS[horizontal_angle][1].upper()
                if channel == 'UP' or channel == 'DOWN' or channel == 'VERT':
                    channel = get_channel_name(hdr['sampling_rate'],
                                               is_acceleration=True,
                                               is_vertical=True,
                                               is_north=False)
                horizontal_angle = 0.0
            elif (horizontal_angle > 315 or horizontal_angle < 45
                  or (horizontal_angle > 135 and horizontal_angle < 225)):
                channel = get_channel_name(hdr['sampling_rate'],
                                           is_acceleration=True,
                                           is_vertical=False,
                                           is_north=True)
            else:
                channel = get_channel_name(hdr['sampling_rate'],
                                           is_acceleration=True,
                                           is_vertical=False,
                                           is_north=False)
            horizontal_orientation = horizontal_angle
            hdr['channel'] = channel
            logging.debug('channel: %s' % hdr['channel'])
        else:
            errstr = ('USC: Not enough information to distinguish horizontal '
                      'from vertical channels.')
            raise BaseException(errstr)

        if location == '':
            hdr['location'] = '--'
        else:
            hdr['location'] = location
        month = str(int_data[21])
        day = str(int_data[22])
        year = str(int_data[23])
        time = str(int_data[24])
        tstr = month + '/' + day + '/' + year + '_' + time
        starttime = datetime.strptime(tstr, '%m/%d/%Y_%H%M')
        hdr['starttime'] = starttime

        # Get coordinates
        lat_deg = int_data[9]
        lat_min = int_data[10]
        lat_sec = int_data[11]
        lon_deg = int_data[12]
        lon_min = int_data[13]
        lon_sec = int_data[14]
        # Check for southern hemisphere, default is northern
        if lines[4].find('STATION USC#') >= 0:
            idx = lines[4].find('STATION USC#') + 12
            if 'S' in lines[4][idx:]:
                lat_sign = -1
            else:
                lat_sign = 1
        else:
            lat_sign = 1
        # Check for western hemisphere, default is western
        if lines[4].find('STATION USC#') >= 0:
            idx = lines[4].find('STATION USC#') + 12
            if 'W' in lines[4][idx:]:
                lon_sign = -1
            else:
                lon_sign = 1
        else:
            lon_sign = -1
        latitude = lat_sign * _dms2dd(lat_deg, lat_min, lat_sec)
        longitude = lon_sign * _dms2dd(lon_deg, lon_min, lon_sec)
        # Since sometimes longitudes are positive in this format for data in
        # the western hemisphere, we "fix" it here. Hopefully no one in the
        # eastern hemisphere uses this format!
        if longitude > 0:
            longitude = -longitude
        coordinates['latitude'] = latitude
        coordinates['longitude'] = longitude
        logging.warning('Setting elevation to 0.0')
        coordinates['elevation'] = 0.0
        # Get standard paramaters
        standard['units_type'] = get_units_type(hdr['channel'])
        standard['horizontal_orientation'] = float(horizontal_orientation)
        standard['vertical_orientation'] = np.nan
        standard['instrument_period'] = flt_data[0]
        standard['instrument_damping'] = flt_data[1]
        standard['process_time'] = ''
        station_line = lines[5]
        station_length = int(lines[5][72:74])
        name = station_line[:station_length]
        standard['station_name'] = name
        standard['sensor_serial_number'] = ''
        standard['instrument'] = ''
        standard['comments'] = ''
        standard['units'] = 'acc'
        standard['structure_type'] = ''
        standard['process_level'] = PROCESS_LEVELS['V1']
        standard['corner_frequency'] = np.nan
        standard['source'] = ('Los Angeles Basin Seismic Network, University '
                              'of Southern California')
        standard['source_format'] = 'usc'

        # this field can be used for instrument correction
        # when data is in counts
        standard['instrument_sensitivity'] = np.nan

        # Get format specific
        format_specific['fractional_unit'] = flt_data[4]

    # Set dictionary
    hdr['standard'] = standard
    hdr['coordinates'] = coordinates
    hdr['format_specific'] = format_specific
    return hdr
def read_cwb(filename, **kwargs):
    """Read Taiwan Central Weather Bureau strong motion file.

    Args:
        filename (str): Path to possible CWB data file.
        kwargs (ref): Other arguments will be ignored.

    Returns:
        Stream: Obspy Stream containing three channels of acceleration
        data (cm/s**2).
    """
    logging.debug("Starting read_cwb.")
    if not is_cwb(filename):
        raise Exception('%s is not a valid CWB strong motion data file.'
                        % filename)
    f = open(filename, 'rt')
    # according to the powers that defined the Network.Station.Channel.Location
    # "standard", Location is a two character field.  Most data providers,
    # including CWB here, don't provide this.  We'll flag it as "--".
    data = np.genfromtxt(filename, skip_header=HDR_ROWS,
                         delimiter=[COLWIDTH] * NCOLS)  # time, Z, NS, EW

    hdr = _get_header_info(f, data)
    f.close()

    head, tail = os.path.split(filename)
    hdr['standard']['source_file'] = tail or os.path.basename(head)

    hdr_z = hdr.copy()
    hdr_z['channel'] = get_channel_name(
        hdr['sampling_rate'],
        is_acceleration=True,
        is_vertical=True,
        is_north=False)
    hdr_z['standard']['horizontal_orientation'] = np.nan

    hdr_h1 = hdr.copy()
    hdr_h1['channel'] = get_channel_name(
        hdr['sampling_rate'],
        is_acceleration=True,
        is_vertical=False,
        is_north=True)
    hdr_h1['standard']['horizontal_orientation'] = np.nan

    hdr_h2 = hdr.copy()
    hdr_h2['channel'] = get_channel_name(
        hdr['sampling_rate'],
        is_acceleration=True,
        is_vertical=False,
        is_north=False)
    hdr_h2['standard']['horizontal_orientation'] = np.nan

    stats_z = Stats(hdr_z)
    stats_h1 = Stats(hdr_h1)
    stats_h2 = Stats(hdr_h2)

    response = {'input_units': 'counts', 'output_units': 'cm/s^2'}
    trace_z = StationTrace(data=data[:, 1], header=stats_z)
    trace_z.setProvenance('remove_response', response)

    trace_h1 = StationTrace(data=data[:, 2], header=stats_h1)
    trace_h1.setProvenance('remove_response', response)

    trace_h2 = StationTrace(data=data[:, 3], header=stats_h2)
    trace_h2.setProvenance('remove_response', response)

    stream = StationStream([trace_z, trace_h1, trace_h2])
    return [stream]
예제 #26
0
def _get_header_info(int_data, flt_data, lines, volume, location=""):
    """Return stats structure from various headers.

    Output is a dictionary like this:
     - network (str): 'LA'
     - station (str)
     - channel (str): Determined using COSMOS_ORIENTATIONS
     - location (str): Default is '--'
     - starttime (datetime)
     - duration (float)
     - sampling_rate (float)
     - npts (int)
     - coordinates:
       - latitude (float)
       - longitude (float)
       - elevation (float)
    - standard (Defaults are either np.nan or '')
      - horizontal_orientation (float): Rotation from north (degrees)
      - instrument_period (float): Period of sensor (Hz)
      - instrument_damping (float): Fraction of critical
      - process_time (datetime): Reported date of processing
      - process_level: Either 'V0', 'V1', 'V2', or 'V3'
      - station_name (str): Long form station description
      - sensor_serial_number (str): Reported sensor serial
      - instrument (str): See SENSOR_TYPES
      - comments (str): Processing comments
      - structure_type (str): See BUILDING_TYPES
      - corner_frequency (float): Sensor corner frequency (Hz)
      - units (str): See UNITS
      - source (str): Network source description
      - source_format (str): Always cosmos
    - format_specific
      - fractional_unit (float): Units of digitized acceleration
            in file (fractions of g)

    Args:
        int_data (ndarray): Array of integer data
        flt_data (ndarray): Array of float data
        lines (list): List of text headers (str)

    Returns:
        dictionary: Dictionary of header/metadata information
    """
    hdr = {}
    coordinates = {}
    standard = {}
    format_specific = {}
    if volume == "V1":
        hdr["duration"] = flt_data[2]
        hdr["npts"] = int_data[27]
        hdr["sampling_rate"] = (hdr["npts"] - 1) / hdr["duration"]

        # Get required parameter number
        hdr["network"] = "LA"
        hdr["station"] = str(int_data[8])
        logging.debug(f"station: {hdr['station']}")
        horizontal_angle = int_data[26]
        logging.debug(f"horizontal: {horizontal_angle}")
        if horizontal_angle in USC_ORIENTATIONS or (horizontal_angle >= 0 and
                                                    horizontal_angle <= 360):
            if horizontal_angle in USC_ORIENTATIONS:
                channel = USC_ORIENTATIONS[horizontal_angle][1].upper()
                if channel == "UP" or channel == "DOWN" or channel == "VERT":
                    channel = get_channel_name(
                        hdr["sampling_rate"],
                        is_acceleration=True,
                        is_vertical=True,
                        is_north=False,
                    )
                horizontal_angle = 0.0
            elif (horizontal_angle > 315 or horizontal_angle < 45
                  or (horizontal_angle > 135 and horizontal_angle < 225)):
                channel = get_channel_name(
                    hdr["sampling_rate"],
                    is_acceleration=True,
                    is_vertical=False,
                    is_north=True,
                )
            else:
                channel = get_channel_name(
                    hdr["sampling_rate"],
                    is_acceleration=True,
                    is_vertical=False,
                    is_north=False,
                )
            horizontal_orientation = horizontal_angle
            hdr["channel"] = channel
            logging.debug(f"channel: {hdr['channel']}")
        else:
            errstr = ("USC: Not enough information to distinguish horizontal "
                      "from vertical channels.")
            raise BaseException(errstr)

        if location == "":
            hdr["location"] = "--"
        else:
            hdr["location"] = location
        month = str(int_data[21])
        day = str(int_data[22])
        year = str(int_data[23])
        time = str(int_data[24])
        tstr = month + "/" + day + "/" + year + "_" + time
        starttime = datetime.strptime(tstr, "%m/%d/%Y_%H%M")
        hdr["starttime"] = starttime

        # Get coordinates
        lat_deg = int_data[9]
        lat_min = int_data[10]
        lat_sec = int_data[11]
        lon_deg = int_data[12]
        lon_min = int_data[13]
        lon_sec = int_data[14]
        # Check for southern hemisphere, default is northern
        if lines[4].find("STATION USC#") >= 0:
            idx = lines[4].find("STATION USC#") + 12
            if "S" in lines[4][idx:]:
                lat_sign = -1
            else:
                lat_sign = 1
        else:
            lat_sign = 1
        # Check for western hemisphere, default is western
        if lines[4].find("STATION USC#") >= 0:
            idx = lines[4].find("STATION USC#") + 12
            if "W" in lines[4][idx:]:
                lon_sign = -1
            else:
                lon_sign = 1
        else:
            lon_sign = -1
        latitude = lat_sign * _dms2dd(lat_deg, lat_min, lat_sec)
        longitude = lon_sign * _dms2dd(lon_deg, lon_min, lon_sec)
        # Since sometimes longitudes are positive in this format for data in
        # the western hemisphere, we "fix" it here. Hopefully no one in the
        # eastern hemisphere uses this format!
        if longitude > 0:
            longitude = -longitude
        coordinates["latitude"] = latitude
        coordinates["longitude"] = longitude
        logging.warning("Setting elevation to 0.0")
        coordinates["elevation"] = 0.0
        # Get standard paramaters
        standard["units_type"] = get_units_type(hdr["channel"])
        standard["horizontal_orientation"] = float(horizontal_orientation)
        standard["vertical_orientation"] = np.nan
        standard["instrument_period"] = flt_data[0]
        standard["instrument_damping"] = flt_data[1]
        standard["process_time"] = ""
        station_line = lines[5]
        station_length = int(lines[5][72:74])
        name = station_line[:station_length]
        standard["station_name"] = name
        standard["sensor_serial_number"] = ""
        standard["instrument"] = ""
        standard["comments"] = ""
        standard["units"] = "cm/s/s"
        standard["structure_type"] = ""
        standard["process_level"] = PROCESS_LEVELS["V1"]
        standard["corner_frequency"] = np.nan
        standard[
            "source"] = "Los Angeles Basin Seismic Network, University of Southern California"
        standard["source_format"] = "usc"

        # these fields can be used for instrument correction
        # when data is in counts
        standard["instrument_sensitivity"] = np.nan
        standard["volts_to_counts"] = np.nan

        # Get format specific
        format_specific["fractional_unit"] = flt_data[4]

    # Set dictionary
    hdr["standard"] = standard
    hdr["coordinates"] = coordinates
    hdr["format_specific"] = format_specific
    return hdr
예제 #27
0
def read_knet(filename, config=None, **kwargs):
    """Read Japanese KNET strong motion file.

    Args:
        filename (str):
            Path to possible KNET data file.
        config (dict):
            Dictionary containing configuration.
        kwargs (ref):
            Other arguments will be ignored.

    Returns:
        Stream: Obspy Stream containing three channels of acceleration data
            (cm/s**2).
    """
    logging.debug("Starting read_knet.")
    if not is_knet(filename, config):
        raise Exception(f"{filename} is not a valid KNET file")

    # Parse the header portion of the file
    with open(filename, "rt") as f:
        lines = [next(f) for x in range(TEXT_HDR_ROWS)]

    hdr = {}
    coordinates = {}
    standard = {}
    hdr["network"] = "BO"
    hdr["station"] = lines[5].split()[2]
    logging.debug(f"station: {hdr['station']}")
    standard["station_name"] = ""

    # according to the powers that defined the Network.Station.Channel.Location
    # "standard", Location is a two character field.  Most data providers,
    # including KNET here, don't provide this.  We'll flag it as "--".
    hdr["location"] = "--"

    coordinates["latitude"] = float(lines[6].split()[2])
    coordinates["longitude"] = float(lines[7].split()[2])
    coordinates["elevation"] = float(lines[8].split()[2])

    hdr["sampling_rate"] = float(
        re.search("\\d+", lines[10].split()[2]).group())
    hdr["delta"] = 1 / hdr["sampling_rate"]
    standard["units_type"] = "acc"
    standard["units_type"] = "cm/s/s"

    dir_string = lines[12].split()[1].strip()
    # knet files have directions listed as N-S, E-W, or U-D,
    # whereas in kiknet those directions are '4', '5', or '6'.
    if dir_string in ["N-S", "1", "4"]:
        hdr["channel"] = get_channel_name(hdr["sampling_rate"],
                                          is_acceleration=True,
                                          is_vertical=False,
                                          is_north=True)
    elif dir_string in ["E-W", "2", "5"]:
        hdr["channel"] = get_channel_name(
            hdr["sampling_rate"],
            is_acceleration=True,
            is_vertical=False,
            is_north=False,
        )
    elif dir_string in ["U-D", "3", "6"]:
        hdr["channel"] = get_channel_name(hdr["sampling_rate"],
                                          is_acceleration=True,
                                          is_vertical=True,
                                          is_north=False)
    else:
        raise Exception(
            f"KNET: Could not parse direction {lines[12].split()[1]}")

    logging.debug(f"channel: {hdr['channel']}")
    scalestr = lines[13].split()[2]
    parts = scalestr.split("/")
    num = float(parts[0].replace("(gal)", ""))
    den = float(parts[1])
    calib = num / den
    hdr["calib"] = calib

    duration = float(lines[11].split()[2])

    hdr["npts"] = int(duration * hdr["sampling_rate"])

    timestr = " ".join(lines[9].split()[2:4])
    # The K-NET and KiK-Net data logger adds a 15s time delay
    # this is removed here
    sttime = datetime.strptime(timestr, TIMEFMT) - timedelta(seconds=15.0)
    # Shift the time to utc (Japanese time is 9 hours ahead)
    sttime = sttime - timedelta(seconds=9 * 3600.0)
    hdr["starttime"] = sttime

    # read in the data - there is a max of 8 columns per line
    # the code below handles the case when last line has
    # less than 8 columns
    if hdr["npts"] % COLS_PER_LINE != 0:
        nrows = int(np.floor(hdr["npts"] / COLS_PER_LINE))
        nrows2 = 1
    else:
        nrows = int(np.ceil(hdr["npts"] / COLS_PER_LINE))
        nrows2 = 0
    data = np.genfromtxt(filename,
                         skip_header=TEXT_HDR_ROWS,
                         max_rows=nrows,
                         filling_values=np.nan)
    data = data.flatten()
    if nrows2:
        skip_header = TEXT_HDR_ROWS + nrows
        data2 = np.genfromtxt(filename,
                              skip_header=skip_header,
                              max_rows=nrows2,
                              filling_values=np.nan)
        data = np.hstack((data, data2))
        nrows += nrows2

    # apply the correction factor we're given in the header
    data *= calib

    # fill out the rest of the standard dictionary
    standard["units_type"] = get_units_type(hdr["channel"])
    standard["horizontal_orientation"] = np.nan
    standard["vertical_orientation"] = np.nan
    standard["instrument_period"] = np.nan
    standard["instrument_damping"] = np.nan
    standard["process_time"] = ""
    standard["process_level"] = PROCESS_LEVELS["V1"]
    standard["sensor_serial_number"] = ""
    standard["instrument"] = ""
    standard["comments"] = ""
    standard["structure_type"] = ""
    if dir_string in ["1", "2", "3"]:
        standard["structure_type"] = "borehole"

    standard["corner_frequency"] = np.nan
    standard["units"] = "acc"
    standard["source"] = SRC
    standard["source_format"] = "knet"
    head, tail = os.path.split(filename)
    standard["source_file"] = tail or os.path.basename(head)

    # these fields can be used for instrument correction
    # when data is in counts
    standard["instrument_sensitivity"] = np.nan
    standard["volts_to_counts"] = np.nan

    hdr["coordinates"] = coordinates
    hdr["standard"] = standard

    # create a Trace from the data and metadata
    trace = StationTrace(data.copy(), Stats(hdr.copy()))
    response = {"input_units": "counts", "output_units": "cm/s^2"}
    trace.setProvenance("remove_response", response)

    stream = StationStream(traces=[trace])
    return [stream]
예제 #28
0
def _get_header_info(filename,
                     any_structure=False,
                     accept_flagged=False,
                     location=''):
    """Return stats structure from various headers.

    Output is a dictionary like this:
     - network
     - station
     - channel
     - location (str): Set to floor the sensor is located on. If not a
            multi-sensor array, default is '--'. Can be set manually by
            the user.
     - starttime
     - sampling_rate
     - npts
     - coordinates:
       - latitude
       - longitude
       - elevation
    - standard
      - horizontal_orientation
      - instrument_period
      - instrument_damping
      - process_level
      - station_name
      - sensor_serial_number
      - instrument
      - comments
      - structure_type
      - corner_frequency
      - units
      - source
      - source_format
    - format_specific
      - vertical_orientation
      - building_floor (0=basement, 1=floor above basement, -1=1st sub-basement, etc.
      - bridge_number_spans
      - bridge_transducer_location ("free field",
                                    "at the base of a pier or abutment",
                                    "on an abutment",
                                    "on the deck at the top of a pier"
                                    "on the deck between piers or between an abutment and a pier."
        dam_transducer_location ("upstream or downstream free field",
                                 "at the base of the dam",
                                 "on the crest of the dam",
                                 on the abutment of the dam")
        construction_type ("Reinforced concrete gravity",
                           "Reinforced concrete arch",
                           "earth fill",
                           "other")

        filter_poles
        data_source
    """
    stats = {}
    standard = {}
    format_specific = {}
    coordinates = {}
    # read the ascii header lines
    with open(filename) as f:
        ascheader = [next(f).strip() for x in range(ASCII_HEADER_LINES)]

    standard['process_level'] = PROCESS_LEVELS[VALID_HEADERS[ascheader[0]]]
    logging.debug("process_level: %s" % standard['process_level'])

    # station code is in the third line
    stats['station'] = ''
    if len(ascheader[2]) >= 4:
        stats['station'] = ascheader[2][0:4].strip()
    logging.debug('station: %s' % stats['station'])

    standard['process_time'] = ''
    standard['station_name'] = ascheader[5][10:40].strip()
    # sometimes the data source has nothing in it,
    # most of the time it seems has has USGS in it
    # sometimes it's something like JPL/USGS, CDOT/USGS, etc.
    # if it's got USGS in it, let's just say network=US, otherwise "--"
    stats['network'] = 'ZZ'
    if ascheader[7].find('USGS') > -1:
        stats['network'] = 'US'

    try:
        standard['source'] = ascheader[7].split('=')[2].strip()
    except IndexError:
        standard['source'] = 'USGS'
    if standard['source'] == '':
        standard['source'] = 'USGS'
    standard['source_format'] = 'smc'

    # read integer header data

    intheader = np.genfromtxt(filename,
                              dtype=np.int32,
                              max_rows=INTEGER_HEADER_LINES,
                              skip_header=ASCII_HEADER_LINES,
                              delimiter=INT_HEADER_WIDTHS)
    # 8 columns per line
    # first line is start time information, and then inst. serial number
    missing_data = intheader[0, 0]
    year = intheader[0, 1]
    jday = intheader[0, 2]
    hour = intheader[0, 3]
    minute = intheader[0, 4]
    if (year != missing_data and jday != missing_data and hour != missing_data
            and minute != missing_data):

        # Handle second if missing
        second = 0
        if not intheader[0, 5] == missing_data:
            second = intheader[0, 5]

        # Handle microsecond if missing and convert milliseconds to microseconds
        microsecond = 0
        if not intheader[0, 6] == missing_data:
            microsecond = intheader[0, 6] / 1e3
        datestr = '%i %00i %i %i %i %i' % (year, jday, hour, minute, second,
                                           microsecond)

        stats['starttime'] = datetime.strptime(datestr, '%Y %j %H %M %S %f')
    else:
        logging.warning('No start time provided. '
                        'This must be set manually for network/station: '
                        '%s/%s.' % (stats['network'], stats['station']))
        standard['comments'] = 'Missing start time.'

    standard['sensor_serial_number'] = ''
    if intheader[1, 3] != missing_data:
        standard['sensor_serial_number'] = str(intheader[1, 3])

    # we never get a two character location code so floor location is used
    if location == '':
        location = intheader.flatten()[24]
        if location != missing_data:
            location = str(location)
            if len(location) < 2:
                location = location.zfill(2)
            stats['location'] = location
        else:
            stats['location'] = '--'
    else:
        stats['location'] = location

    # second line is information about number of channels, orientations
    # we care about orientations
    format_specific['vertical_orientation'] = np.nan
    if intheader[1, 4] != missing_data:
        format_specific['vertical_orientation'] = int(intheader[1, 4])

    standard['horizontal_orientation'] = np.nan
    if intheader[1, 5] != missing_data:
        standard['horizontal_orientation'] = float(intheader[1, 5])

    if intheader[1, 6] == missing_data or intheader[1, 6] not in INSTRUMENTS:
        standard['instrument'] = ''
    else:
        standard['instrument'] = INSTRUMENTS[intheader[1, 6]]

    num_comments = intheader[1, 7]

    # third line contains number of data points
    stats['npts'] = intheader[2, 0]
    problem_flag = intheader[2, 1]
    if problem_flag == 1:
        if not accept_flagged:
            fmt = 'SMC: Record found in file %s has a problem flag!'
            raise Exception(fmt % filename)
        else:
            logging.warning(
                'SMC: Data contains a problem flag for network/station: '
                '%s/%s. See comments.' % (stats['network'], stats['station']))
    stype = intheader[2, 2]
    if stype == missing_data:
        stype = np.nan
    elif stype not in STRUCTURES:
        # structure type is not defined and should will be considered 'other'
        stype = 4
    fmt = 'SMC: Record found in file %s is not a free-field sensor!'
    standard['structure_type'] = STRUCTURES[stype]
    if standard['structure_type'] == 'building' and not any_structure:
        raise Exception(fmt % filename)

    format_specific['building_floor'] = np.nan
    if intheader[3, 0] != missing_data:
        format_specific['building_floor'] = intheader[3, 0]

    format_specific['bridge_number_spans'] = np.nan
    if intheader[3, 1] != missing_data:
        format_specific['bridge_number_spans'] = intheader[3, 1]

    format_specific['bridge_transducer_location'] = BRIDGE_LOCATIONS[0]
    if intheader[3, 2] != missing_data:
        bridge_number = intheader[3, 2]
        format_specific['bridge_transducer_location'] = \
            BRIDGE_LOCATIONS[bridge_number]

    format_specific['dam_transducer_location'] = DAM_LOCATIONS[0]
    if intheader[3, 3] != missing_data:
        dam_number = intheader[3, 3]
        format_specific['dam_transducer_location'] = DAM_LOCATIONS[dam_number]

    c1 = format_specific['bridge_transducer_location'].find('free field') == -1
    c2 = format_specific['dam_transducer_location'].find('free field') == -1
    if (c1 or c2) and not any_structure:
        raise Exception(fmt % filename)

    format_specific['construction_type'] = CONSTRUCTION_TYPES[4]
    if intheader[3, 4] != missing_data:
        format_specific['construction_type'] = \
            CONSTRUCTION_TYPES[intheader[3, 4]]

    # station is repeated here if all numeric
    if not len(stats['station']):
        stats['station'] = '%i' % intheader[3, 5]

    # read float header data
    skip = ASCII_HEADER_LINES + INTEGER_HEADER_LINES
    floatheader = np.genfromtxt(filename,
                                max_rows=FLOAT_HEADER_LINES,
                                skip_header=skip,
                                delimiter=FLOAT_HEADER_WIDTHS)

    # float headers are 10 lines of 5 floats each
    missing_data = floatheader[0, 0]
    stats['sampling_rate'] = floatheader[0, 1]
    coordinates['latitude'] = floatheader[2, 0]
    # the documentation for SMC says that sometimes longitudes are
    # positive in the western hemisphere. Since it is very unlikely
    # any of these files exist for the eastern hemisphere, check for
    # positive longitudes and fix them.
    lon = floatheader[2, 1]
    if lon > 0:
        lon = -1 * lon
    coordinates['longitude'] = lon
    coordinates['elevation'] = 0
    if floatheader[2, 2] != missing_data:
        coordinates['elevation'] = floatheader[2, 2]

    # figure out the channel code
    if format_specific['vertical_orientation'] in [0, 180]:
        stats['channel'] = get_channel_name(stats['sampling_rate'],
                                            is_acceleration=True,
                                            is_vertical=True,
                                            is_north=False)
    else:
        ho = standard['horizontal_orientation']
        quad1 = ho > 315 and ho <= 360
        quad2 = ho > 0 and ho <= 45
        quad3 = ho > 135 and ho <= 225
        if quad1 or quad2 or quad3:
            stats['channel'] = get_channel_name(stats['sampling_rate'],
                                                is_acceleration=True,
                                                is_vertical=False,
                                                is_north=True)
        else:
            stats['channel'] = get_channel_name(stats['sampling_rate'],
                                                is_acceleration=True,
                                                is_vertical=False,
                                                is_north=False)

    logging.debug('channel: %s' % stats['channel'])
    sensor_frequency = floatheader[4, 1]
    standard['instrument_period'] = 1 / sensor_frequency
    standard['instrument_damping'] = floatheader[4, 2]

    standard['corner_frequency'] = floatheader[3, 4]
    format_specific['filter_poles'] = floatheader[4, 0]
    standard['units'] = 'acc'

    # read in the comment lines
    with open(filename) as f:
        skip = ASCII_HEADER_LINES + INTEGER_HEADER_LINES + FLOAT_HEADER_LINES
        _ = [next(f) for x in range(skip)]
        standard['comments'] = [
            next(f).strip().lstrip('|') for x in range(num_comments)
        ]

    standard['comments'] = ' '.join(standard['comments'])
    stats['coordinates'] = coordinates
    stats['standard'] = standard
    stats['format_specific'] = format_specific

    head, tail = os.path.split(filename)
    stats['standard']['source_file'] = tail or os.path.basename(head)

    return (stats, num_comments)
예제 #29
0
def add_channel_metadata(tr, inv, client):
    """
    Adds the channel metadata for each channel in the stream.

    Args:
        tr (obspy.core.trace.Trace):
            Trace of requested data.
        inv (obspy.core.inventory):
            Inventory object corresponding to to the stream.
        client (str):
            FDSN client indicator.

    Returns:
        trace (obspy.core.trace.Trace): Trace with metadata added.
    """

    time = tr.stats.starttime
    id_string = tr.stats.network + '.' + tr.stats.station + '.'
    id_string += tr.stats.location + '.' + tr.stats.channel
    if tr.stats.location == '':
        tr.stats.location = '--'
    metadata = inv.get_channel_metadata(id_string, time)

    coordinates = {
        'latitude': metadata['latitude'],
        'longitude': metadata['longitude'],
        'elevation': metadata['elevation']
    }

    standard = {
        'horizontal_orientation': metadata['azimuth'],
        'instrument_period': np.nan,
        'instrument_damping': np.nan,
        'process_level': 'V0',
        'station_name': tr.stats.station,
        'sensor_serial_number': '',
        'instrument': '',
        'comments': '',
        'structure_type': '',
        'corner_frequency': np.nan,
        'units': 'raw',
        'source': client,
        'source_format': 'fdsn'
    }

    tr.stats['coordinates'] = coordinates
    tr.stats['standard'] = standard

    if metadata['dip'] in [90, -90, 180, -180]:
        tr.stats['channel'] = get_channel_name(
            tr.stats['sampling_rate'],
            is_acceleration=True,
            is_vertical=True,
            is_north=False)
    else:
        ho = metadata['azimuth']
        quad1 = ho > 315 and ho <= 360
        quad2 = ho >= 0 and ho <= 45
        quad3 = ho > 135 and ho <= 225
        if quad1 or quad2 or quad3:
            tr.stats['channel'] = get_channel_name(
                tr.stats['sampling_rate'],
                is_acceleration=True,
                is_vertical=False,
                is_north=True)
        else:
            tr.stats['channel'] = get_channel_name(
                tr.stats['sampling_rate'],
                is_acceleration=True,
                is_vertical=False,
                is_north=False)
    return tr
예제 #30
0
def _read_header(filename):
    # read in first 88 lines
    with open(filename, "rt") as myfile:
        header = [next(myfile) for x in range(HEADER_LINES)]

    header_dict = {}
    lastkey = ""
    for line in header:
        if not len(line.strip()):
            continue
        if ":" in line:
            colidx = line.find(":")
            key = line[0:colidx].strip()
            value = line[colidx + 1:].strip()
            if not len(key):
                key = lastkey + "$"
            lastkey = key
            header_dict[key] = value

    # create a list of dictionaries for channel spec
    channels = [{}, {}, {}]
    channels[0]["standard"] = {}
    channels[1]["standard"] = {}
    channels[2]["standard"] = {}

    station = header_dict["CLAVE DE LA ESTACION"]
    channels[0]["station"] = station
    channels[1]["station"] = station
    channels[2]["station"] = station

    channels[0]["network"] = NETWORK
    channels[1]["network"] = NETWORK
    channels[2]["network"] = NETWORK

    # unam provides the start *time* of the record, but not the date.
    # it is up to us to determine whether midnight has occurred between
    # eq time and record start time.

    # the hour/min/sec of trigger time
    (rhour, rminute,
     rsecond) = header_dict["HORA DE LA PRIMERA MUESTRA (GMT)"].split(":")
    dtsecs = (int(rhour) * 3600) + (int(rminute) * 60) + (float(rsecond))
    startdt = timedelta(seconds=dtsecs)
    eqdatestr = header_dict["FECHA DEL SISMO [GMT]"]
    eqdate = datetime.strptime(eqdatestr, "%Y/%m/%d")
    # the hour, minute and second of origin
    eqtimestr = header_dict["HORA EPICENTRO (GMT)"]
    try:
        eqtime = datetime.strptime(f"{eqdatestr} {eqtimestr}", TIMEFMT1)
    except ValueError:
        eqtime = datetime.strptime(f"{eqdatestr} {eqtimestr}", TIMEFMT2)

    # if the origin time and record start time are more than 10 minutes
    # apart (in either direction), then assume that we need to add 1 day
    # to the record start time.
    starttime = eqdate + startdt
    dt = np.abs((starttime - eqtime).total_seconds())
    if dt > MAX_TIME_DIFF:
        starttime = eqdate + timedelta(days=1) + startdt

    channels[0]["starttime"] = starttime
    channels[1]["starttime"] = starttime
    channels[2]["starttime"] = starttime

    # get record durations for each channel
    durstr = header_dict["DURACION DEL REGISTRO (s), C1-C6"].lstrip("/")
    durations = [float(dur) for dur in durstr.split("/")]

    channels[0]["duration"] = durations[0]
    channels[1]["duration"] = durations[1]
    channels[2]["duration"] = durations[2]

    # get deltas
    delta_strings = header_dict["INTERVALO DE MUESTREO, C1-C6 (s)"].split("/")
    deltas = [float(delta) for delta in delta_strings[1:]]
    channels[0]["delta"] = deltas[0]
    channels[1]["delta"] = deltas[1]
    channels[2]["delta"] = deltas[2]

    # get sampling rates
    channels[0]["sampling_rate"] = 1 / deltas[0]
    channels[1]["sampling_rate"] = 1 / deltas[1]
    channels[2]["sampling_rate"] = 1 / deltas[2]

    # get channel orientations
    azstrings = header_dict["ORIENTACION C1-C6 (rumbo;orientacion)"].split("/")
    az1, az1_vert = _get_azimuth(azstrings[1])
    az2, az2_vert = _get_azimuth(azstrings[2])
    az3, az3_vert = _get_azimuth(azstrings[3])
    channels[0]["standard"]["horizontal_orientation"] = az1
    channels[1]["standard"]["horizontal_orientation"] = az2
    channels[2]["standard"]["horizontal_orientation"] = az3
    channels[0]["standard"]["vertical_orientation"] = np.nan
    channels[1]["standard"]["vertical_orientation"] = np.nan
    channels[2]["standard"]["vertical_orientation"] = np.nan
    az1_north = is_channel_north(az1)
    az2_north = is_channel_north(az2)
    az3_north = is_channel_north(az3)
    channels[0]["channel"] = get_channel_name(channels[0]["sampling_rate"],
                                              True, az1_vert, az1_north)
    channels[1]["channel"] = get_channel_name(channels[1]["sampling_rate"],
                                              True, az2_vert, az2_north)
    channels[2]["channel"] = get_channel_name(channels[2]["sampling_rate"],
                                              True, az3_vert, az3_north)

    # get channel npts
    npts_strings = header_dict["NUM. TOTAL DE MUESTRAS, C1-C6"].split("/")
    npts_list = [float(npts) for npts in npts_strings[1:]]
    channels[0]["npts"] = npts_list[0]
    channels[1]["npts"] = npts_list[1]
    channels[2]["npts"] = npts_list[2]

    # locations
    channels[0]["location"] = "--"
    channels[1]["location"] = "--"
    channels[1]["location"] = "--"

    # get station coordinates
    coord1 = header_dict["COORDENADAS DE LA ESTACION"]
    coord2 = header_dict["COORDENADAS DE LA ESTACION$"]
    if "LAT" in coord1:
        latitude = float(re.search(FLOATRE, coord1).group())
        longitude = float(re.search(FLOATRE, coord2).group())
        if coord1.strip().endswith("S"):
            latitude *= -1
        if coord2.strip().endswith("W"):
            longitude *= -1
    else:
        latitude = re.search(FLOATRE, coord2)
        longitude = re.search(FLOATRE, coord1)
        if coord1.strip().endswith("W"):
            longitude *= -1
        if coord2.strip().endswith("S"):
            latitude *= -1
    elevation = float(header_dict["ALTITUD (msnm)"])
    cdict = {
        "latitude": latitude,
        "longitude": longitude,
        "elevation": elevation
    }
    channels[0]["coordinates"] = cdict
    channels[1]["coordinates"] = cdict
    channels[2]["coordinates"] = cdict

    # fill in other standard stuff
    standard0 = channels[0]["standard"]
    standard1 = channels[1]["standard"]
    standard2 = channels[2]["standard"]
    standard0["units_type"] = "acc"
    standard1["units_type"] = "acc"
    standard2["units_type"] = "acc"

    standard0["source_format"] = SOURCE_FORMAT
    standard1["source_format"] = SOURCE_FORMAT
    standard2["source_format"] = SOURCE_FORMAT

    standard0["instrument"] = header_dict["MODELO DEL ACELEROGRAFO"]
    standard1["instrument"] = header_dict["MODELO DEL ACELEROGRAFO"]
    standard2["instrument"] = header_dict["MODELO DEL ACELEROGRAFO"]

    standard0["sensor_serial_number"] = header_dict[
        "NUMERO DE SERIE DEL ACELEROGRAFO"]
    standard1["sensor_serial_number"] = header_dict[
        "NUMERO DE SERIE DEL ACELEROGRAFO"]
    standard2["sensor_serial_number"] = header_dict[
        "NUMERO DE SERIE DEL ACELEROGRAFO"]

    standard0["process_level"] = PROCESS_LEVELS["V1"]
    standard1["process_level"] = PROCESS_LEVELS["V1"]
    standard2["process_level"] = PROCESS_LEVELS["V1"]

    standard0["process_time"] = ""
    standard1["process_time"] = ""
    standard2["process_time"] = ""

    standard0["station_name"] = header_dict["NOMBRE DE LA ESTACION"]
    standard1["station_name"] = header_dict["NOMBRE DE LA ESTACION"]
    standard2["station_name"] = header_dict["NOMBRE DE LA ESTACION"]

    standard0["structure_type"] = ""
    standard1["structure_type"] = ""
    standard2["structure_type"] = ""

    standard0["corner_frequency"] = np.nan
    standard1["corner_frequency"] = np.nan
    standard2["corner_frequency"] = np.nan

    standard0["units"] = "cm/s/s"
    standard1["units"] = "cm/s/s"
    standard2["units"] = "cm/s/s"

    periods = _get_periods(header_dict["FREC. NAT. DE SENSORES, C1-C6, (Hz)"])
    standard0["instrument_period"] = periods[0]
    standard1["instrument_period"] = periods[1]
    standard2["instrument_period"] = periods[2]

    dampings = _get_dampings(header_dict["AMORTIGUAMIENTO DE SENSORES, C1-C6"])
    standard0["instrument_damping"] = dampings[0]
    standard1["instrument_damping"] = dampings[1]
    standard2["instrument_damping"] = dampings[2]

    with open(filename, "rt") as myfile:
        header = [next(myfile) for x in range(HEADER_PLUS_COMMENT)]
    clines = header[89:102]
    comments = " ".join(clines).strip()
    standard0["comments"] = comments
    standard1["comments"] = comments
    standard2["comments"] = comments

    head, tail = os.path.split(filename)
    source_file = tail or os.path.basename(head)
    standard0["source_file"] = source_file
    standard1["source_file"] = source_file
    standard2["source_file"] = source_file

    standard0["source"] = SOURCE
    standard1["source"] = SOURCE
    standard2["source"] = SOURCE

    decfactor = float(header_dict["FACTOR DE DECIMACION"])
    standard0["instrument_sensitivity"] = decfactor
    standard1["instrument_sensitivity"] = decfactor
    standard2["instrument_sensitivity"] = decfactor

    standard0["volts_to_counts"] = np.nan
    standard1["volts_to_counts"] = np.nan
    standard2["volts_to_counts"] = np.nan

    return channels
예제 #31
0
def _read_header(filename):
    # read in first 88 lines
    with open(filename, 'rt') as myfile:
        header = [next(myfile) for x in range(HEADER_LINES)]

    header_dict = {}
    lastkey = ''
    for line in header:
        if not len(line.strip()):
            continue
        if ':' in line:
            colidx = line.find(':')
            key = line[0:colidx].strip()
            value = line[colidx + 1:].strip()
            if not len(key):
                key = lastkey + '$'
            lastkey = key
            header_dict[key] = value

    # create a list of dictionaries for channel spec
    channels = [{}, {}, {}]
    channels[0]['standard'] = {}
    channels[1]['standard'] = {}
    channels[2]['standard'] = {}

    station = header_dict['CLAVE DE LA ESTACION']
    channels[0]['station'] = station
    channels[1]['station'] = station
    channels[2]['station'] = station

    channels[0]['network'] = NETWORK
    channels[1]['network'] = NETWORK
    channels[2]['network'] = NETWORK

    # unam provides the start *time* of the record, but not the date.
    # it is up to us to determine whether midnight has occurred between
    # eq time and record start time.

    # the hour/min/sec of trigger time
    (rhour, rminute,
     rsecond) = header_dict['HORA DE LA PRIMERA MUESTRA (GMT)'].split(':')
    dtsecs = (int(rhour) * 3600) + (int(rminute) * 60) + (float(rsecond))
    startdt = timedelta(seconds=dtsecs)
    eqdatestr = header_dict['FECHA DEL SISMO [GMT]']
    eqdate = datetime.strptime(eqdatestr, '%Y/%m/%d')
    # the hour, minute and second of origin
    eqtimestr = header_dict['HORA EPICENTRO (GMT)']
    try:
        eqtime = datetime.strptime('%s %s' % (eqdatestr, eqtimestr), TIMEFMT1)
    except ValueError:
        eqtime = datetime.strptime('%s %s' % (eqdatestr, eqtimestr), TIMEFMT2)

    # if the origin time and record start time are more than 10 minutes
    # apart (in either direction), then assume that we need to add 1 day
    # to the record start time.
    starttime = eqdate + startdt
    dt = np.abs((starttime - eqtime).total_seconds())
    if dt > MAX_TIME_DIFF:
        starttime = eqdate + timedelta(days=1) + startdt

    channels[0]['starttime'] = starttime
    channels[1]['starttime'] = starttime
    channels[2]['starttime'] = starttime

    # get record durations for each channel
    durstr = header_dict['DURACION DEL REGISTRO (s), C1-C6'].lstrip('/')
    durations = [float(dur) for dur in durstr.split('/')]

    channels[0]['duration'] = durations[0]
    channels[1]['duration'] = durations[1]
    channels[2]['duration'] = durations[2]

    # get deltas
    delta_strings = header_dict['INTERVALO DE MUESTREO, C1-C6 (s)'].split('/')
    deltas = [float(delta) for delta in delta_strings[1:]]
    channels[0]['delta'] = deltas[0]
    channels[1]['delta'] = deltas[1]
    channels[2]['delta'] = deltas[2]

    # get sampling rates
    channels[0]['sampling_rate'] = 1 / deltas[0]
    channels[1]['sampling_rate'] = 1 / deltas[1]
    channels[2]['sampling_rate'] = 1 / deltas[2]

    # get channel orientations
    azstrings = header_dict['ORIENTACION C1-C6 (rumbo;orientacion)'].split('/')
    az1, az1_vert = _get_azimuth(azstrings[1])
    az2, az2_vert = _get_azimuth(azstrings[2])
    az3, az3_vert = _get_azimuth(azstrings[3])
    channels[0]['standard']['horizontal_orientation'] = az1
    channels[1]['standard']['horizontal_orientation'] = az2
    channels[2]['standard']['horizontal_orientation'] = az3
    channels[0]['standard']['vertical_orientation'] = np.nan
    channels[1]['standard']['vertical_orientation'] = np.nan
    channels[2]['standard']['vertical_orientation'] = np.nan
    az1_north = is_channel_north(az1)
    az2_north = is_channel_north(az2)
    az3_north = is_channel_north(az3)
    channels[0]['channel'] = get_channel_name(channels[0]['sampling_rate'],
                                              True, az1_vert, az1_north)
    channels[1]['channel'] = get_channel_name(channels[1]['sampling_rate'],
                                              True, az2_vert, az2_north)
    channels[2]['channel'] = get_channel_name(channels[2]['sampling_rate'],
                                              True, az3_vert, az3_north)

    # get channel npts
    npts_strings = header_dict['NUM. TOTAL DE MUESTRAS, C1-C6'].split('/')
    npts_list = [float(npts) for npts in npts_strings[1:]]
    channels[0]['npts'] = npts_list[0]
    channels[1]['npts'] = npts_list[1]
    channels[2]['npts'] = npts_list[2]

    # locations
    channels[0]['location'] = '--'
    channels[1]['location'] = '--'
    channels[1]['location'] = '--'

    # get station coordinates
    coord1 = header_dict['COORDENADAS DE LA ESTACION']
    coord2 = header_dict['COORDENADAS DE LA ESTACION$']
    if 'LAT' in coord1:
        latitude = float(re.search(FLOATRE, coord1).group())
        longitude = float(re.search(FLOATRE, coord2).group())
        if coord1.strip().endswith('S'):
            latitude *= -1
        if coord2.strip().endswith('W'):
            longitude *= -1
    else:
        latitude = re.search(FLOATRE, coord2)
        longitude = re.search(FLOATRE, coord1)
        if coord1.strip().endswith('W'):
            longitude *= -1
        if coord2.strip().endswith('S'):
            latitude *= -1
    elevation = float(header_dict['ALTITUD (msnm)'])
    cdict = {
        'latitude': latitude,
        'longitude': longitude,
        'elevation': elevation
    }
    channels[0]['coordinates'] = cdict
    channels[1]['coordinates'] = cdict
    channels[2]['coordinates'] = cdict

    # fill in other standard stuff
    channels[0]['standard']['units_type'] = 'acc'
    channels[1]['standard']['units_type'] = 'acc'
    channels[2]['standard']['units_type'] = 'acc'

    channels[0]['standard']['source_format'] = SOURCE_FORMAT
    channels[1]['standard']['source_format'] = SOURCE_FORMAT
    channels[2]['standard']['source_format'] = SOURCE_FORMAT

    channels[0]['standard']['instrument'] = header_dict[
        'MODELO DEL ACELEROGRAFO']
    channels[1]['standard']['instrument'] = header_dict[
        'MODELO DEL ACELEROGRAFO']
    channels[2]['standard']['instrument'] = header_dict[
        'MODELO DEL ACELEROGRAFO']

    channels[0]['standard']['sensor_serial_number'] = header_dict[
        'NUMERO DE SERIE DEL ACELEROGRAFO']
    channels[1]['standard']['sensor_serial_number'] = header_dict[
        'NUMERO DE SERIE DEL ACELEROGRAFO']
    channels[2]['standard']['sensor_serial_number'] = header_dict[
        'NUMERO DE SERIE DEL ACELEROGRAFO']

    channels[0]['standard']['process_level'] = PROCESS_LEVELS['V1']
    channels[1]['standard']['process_level'] = PROCESS_LEVELS['V1']
    channels[2]['standard']['process_level'] = PROCESS_LEVELS['V1']

    channels[0]['standard']['process_time'] = ''
    channels[1]['standard']['process_time'] = ''
    channels[2]['standard']['process_time'] = ''

    channels[0]['standard']['station_name'] = header_dict[
        'NOMBRE DE LA ESTACION']
    channels[1]['standard']['station_name'] = header_dict[
        'NOMBRE DE LA ESTACION']
    channels[2]['standard']['station_name'] = header_dict[
        'NOMBRE DE LA ESTACION']

    channels[0]['standard']['structure_type'] = ''
    channels[1]['standard']['structure_type'] = ''
    channels[2]['standard']['structure_type'] = ''

    channels[0]['standard']['corner_frequency'] = np.nan
    channels[1]['standard']['corner_frequency'] = np.nan
    channels[2]['standard']['corner_frequency'] = np.nan

    channels[0]['standard']['units'] = 'cm/s/s'
    channels[1]['standard']['units'] = 'cm/s/s'
    channels[2]['standard']['units'] = 'cm/s/s'

    periods = _get_periods(header_dict['FREC. NAT. DE SENSORES, C1-C6, (Hz)'])
    channels[0]['standard']['instrument_period'] = periods[0]
    channels[1]['standard']['instrument_period'] = periods[1]
    channels[2]['standard']['instrument_period'] = periods[2]

    dampings = _get_dampings(header_dict['AMORTIGUAMIENTO DE SENSORES, C1-C6'])
    channels[0]['standard']['instrument_damping'] = dampings[0]
    channels[1]['standard']['instrument_damping'] = dampings[1]
    channels[2]['standard']['instrument_damping'] = dampings[2]

    with open(filename, 'rt') as myfile:
        header = [next(myfile) for x in range(HEADER_PLUS_COMMENT)]
    clines = header[89:102]
    comments = ' '.join(clines).strip()
    channels[0]['standard']['comments'] = comments
    channels[1]['standard']['comments'] = comments
    channels[2]['standard']['comments'] = comments

    head, tail = os.path.split(filename)
    source_file = tail or os.path.basename(head)
    channels[0]['standard']['source_file'] = source_file
    channels[1]['standard']['source_file'] = source_file
    channels[2]['standard']['source_file'] = source_file

    channels[0]['standard']['source'] = SOURCE
    channels[1]['standard']['source'] = SOURCE
    channels[2]['standard']['source'] = SOURCE

    decfactor = float(header_dict['FACTOR DE DECIMACION'])
    channels[0]['standard']['instrument_sensitivity'] = decfactor
    channels[1]['standard']['instrument_sensitivity'] = decfactor
    channels[2]['standard']['instrument_sensitivity'] = decfactor

    return channels
def _get_header_info(int_data, flt_data, lines, volume, location=''):
    """Return stats structure from various headers.

    Output is a dictionary like this:
     - network (str): 'LA'
     - station (str)
     - channel (str): Determined using COSMOS_ORIENTATIONS
     - location (str): Default is '--'
     - starttime (datetime)
     - duration (float)
     - sampling_rate (float)
     - npts (int)
     - coordinates:
       - latitude (float)
       - longitude (float)
       - elevation (float)
    - standard (Defaults are either np.nan or '')
      - horizontal_orientation (float): Rotation from north (degrees)
      - instrument_period (float): Period of sensor (Hz)
      - instrument_damping (float): Fraction of critical
      - process_time (datetime): Reported date of processing
      - process_level: Either 'V0', 'V1', 'V2', or 'V3'
      - station_name (str): Long form station description
      - sensor_serial_number (str): Reported sensor serial
      - instrument (str): See SENSOR_TYPES
      - comments (str): Processing comments
      - structure_type (str): See BUILDING_TYPES
      - corner_frequency (float): Sensor corner frequency (Hz)
      - units (str): See UNITS
      - source (str): Network source description
      - source_format (str): Always cosmos
    - format_specific
      - fractional_unit (float): Units of digitized acceleration
            in file (fractions of g)

    Args:
        int_data (ndarray): Array of integer data
        flt_data (ndarray): Array of float data
        lines (list): List of text headers (str)

    Returns:
        dictionary: Dictionary of header/metadata information
    """
    hdr = {}
    coordinates = {}
    standard = {}
    format_specific = {}
    if volume == 'V1':
        hdr['duration'] = flt_data[2]
        hdr['npts'] = int_data[27]
        hdr['sampling_rate'] = (hdr['npts'] - 1) / hdr['duration']

        # Get required parameter number
        hdr['network'] = 'LA'
        hdr['station'] = str(int_data[8])
        logging.debug('station: %s' % hdr['station'])
        horizontal_angle = int_data[26]
        logging.debug('horizontal: %s' % horizontal_angle)
        if (horizontal_angle in USC_ORIENTATIONS or
                (horizontal_angle >= 0 and horizontal_angle <= 360)):
            if horizontal_angle in USC_ORIENTATIONS:
                channel = USC_ORIENTATIONS[horizontal_angle][1].upper()
                if channel == 'UP' or channel == 'DOWN' or channel == 'VERT':
                    channel = get_channel_name(
                        hdr['sampling_rate'],
                        is_acceleration=True,
                        is_vertical=True,
                        is_north=False)
                horizontal_angle = 0.0
            elif (
                horizontal_angle > 315 or
                horizontal_angle < 45 or
                (horizontal_angle > 135 and horizontal_angle < 225)
            ):
                channel = get_channel_name(
                    hdr['sampling_rate'],
                    is_acceleration=True,
                    is_vertical=False,
                    is_north=True)
            else:
                channel = get_channel_name(
                    hdr['sampling_rate'],
                    is_acceleration=True,
                    is_vertical=False,
                    is_north=False)
            horizontal_orientation = horizontal_angle
            hdr['channel'] = channel
            logging.debug('channel: %s' % hdr['channel'])
        else:
            errstr = ('USC: Not enough information to distinguish horizontal from '
                      'vertical channels.')
            raise GMProcessException(errstr)

        if location == '':
            hdr['location'] = '--'
        else:
            hdr['location'] = location
        month = str(int_data[21])
        day = str(int_data[22])
        year = str(int_data[23])
        time = str(int_data[24])
        tstr = month + '/' + day + '/' + year + '_' + time
        starttime = datetime.strptime(tstr, '%m/%d/%Y_%H%M')
        hdr['starttime'] = starttime

        # Get coordinates
        lat_deg = int_data[9]
        lat_min = int_data[10]
        lat_sec = int_data[11]
        lon_deg = int_data[12]
        lon_min = int_data[13]
        lon_sec = int_data[14]
        # Check for southern hemisphere, default is northern
        if lines[4].find('STATION USC#') >= 0:
            idx = lines[4].find('STATION USC#') + 12
            if 'S' in lines[4][idx:]:
                lat_sign = -1
            else:
                lat_sign = 1
        else:
            lat_sign = 1
        # Check for western hemisphere, default is western
        if lines[4].find('STATION USC#') >= 0:
            idx = lines[4].find('STATION USC#') + 12
            if 'W' in lines[4][idx:]:
                lon_sign = -1
            else:
                lon_sign = 1
        else:
            lon_sign = -1
        latitude = lat_sign * _dms2dd(lat_deg, lat_min, lat_sec)
        longitude = lon_sign * _dms2dd(lon_deg, lon_min, lon_sec)
        # Since sometimes longitudes are positive in this format for data in
        # the western hemisphere, we "fix" it here. Hopefully no one in the
        # eastern hemisphere uses this format!
        if longitude > 0:
            longitude = -longitude
        coordinates['latitude'] = latitude
        coordinates['longitude'] = longitude
        coordinates['elevation'] = np.nan
        # Get standard paramaters
        standard['horizontal_orientation'] = float(horizontal_orientation)
        standard['instrument_period'] = flt_data[0]
        standard['instrument_damping'] = flt_data[1]
        standard['process_time'] = ''
        station_line = lines[5]
        station_length = int(lines[5][72:74])
        name = station_line[:station_length]
        standard['station_name'] = name
        standard['sensor_serial_number'] = ''
        standard['instrument'] = ''
        standard['comments'] = ''
        standard['units'] = 'acc'
        standard['structure_type'] = ''
        standard['process_level'] = PROCESS_LEVELS['V1']
        standard['corner_frequency'] = np.nan
        standard['source'] = ('Los Angeles Basin Seismic Network, University '
                              'of Southern California')
        standard['source_format'] = 'usc'
        # Get format specific
        format_specific['fractional_unit'] = flt_data[4]

    # Set dictionary
    hdr['standard'] = standard
    hdr['coordinates'] = coordinates
    hdr['format_specific'] = format_specific
    return hdr
def _get_header_info(filename, any_structure=False, accept_flagged=False,
                     location=''):
    """Return stats structure from various headers.

    Output is a dictionary like this:
     - network
     - station
     - channel
     - location (str): Set to floor the sensor is located on. If not a
            multi-sensor array, default is '--'. Can be set manually by
            the user.
     - starttime
     - sampling_rate
     - npts
     - coordinates:
       - latitude
       - longitude
       - elevation
    - standard
      - horizontal_orientation
      - instrument_period
      - instrument_damping
      - process_level
      - station_name
      - sensor_serial_number
      - instrument
      - comments
      - structure_type
      - corner_frequency
      - units
      - source
      - source_format
    - format_specific
      - vertical_orientation
      - building_floor (0=basement, 1=floor above basement, -1=1st sub-basement, etc.
      - bridge_number_spans
      - bridge_transducer_location ("free field",
                                    "at the base of a pier or abutment",
                                    "on an abutment",
                                    "on the deck at the top of a pier"
                                    "on the deck between piers or between an abutment and a pier."
        dam_transducer_location ("upstream or downstream free field",
                                 "at the base of the dam",
                                 "on the crest of the dam",
                                 on the abutment of the dam")
        construction_type ("Reinforced concrete gravity",
                           "Reinforced concrete arch",
                           "earth fill",
                           "other")

        filter_poles
        data_source
    """
    stats = {}
    standard = {}
    format_specific = {}
    coordinates = {}
    # read the ascii header lines
    with open(filename) as f:
        ascheader = [next(f).strip() for x in range(ASCII_HEADER_LINES)]

    standard['process_level'] = PROCESS_LEVELS[VALID_HEADERS[ascheader[0]]]
    logging.debug("process_level: %s" % standard['process_level'])

    # station code is in the third line
    stats['station'] = ''
    if len(ascheader[2]) >= 4:
        stats['station'] = ascheader[2][0:4].strip()
    logging.debug('station: %s' % stats['station'])

    standard['process_time'] = ''
    standard['station_name'] = ascheader[5][10:40].strip()
    # sometimes the data source has nothing in it,
    # most of the time it seems has has USGS in it
    # sometimes it's something like JPL/USGS, CDOT/USGS, etc.
    # if it's got USGS in it, let's just say network=US, otherwise "--"
    stats['network'] = 'ZZ'
    if ascheader[7].find('USGS') > -1:
        stats['network'] = 'US'

    try:
        standard['source'] = ascheader[7].split('=')[2].strip()
    except IndexError:
        standard['source'] = 'USGS'
    if standard['source'] == '':
        standard['source'] = 'USGS'
    standard['source_format'] = 'smc'

    # read integer header data

    intheader = np.genfromtxt(filename, dtype=np.int32,
                              max_rows=INTEGER_HEADER_LINES,
                              skip_header=ASCII_HEADER_LINES,
                              delimiter=INT_HEADER_WIDTHS)
    # 8 columns per line
    # first line is start time information, and then inst. serial number
    missing_data = intheader[0, 0]
    year = intheader[0, 1]
    jday = intheader[0, 2]
    hour = intheader[0, 3]
    minute = intheader[0, 4]
    if (year != missing_data
            and jday != missing_data and hour != missing_data
            and minute != missing_data):

        # Handle second if missing
        second = 0
        if not intheader[0, 5] == missing_data:
            second = intheader[0, 5]

        # Handle microsecond if missing and convert milliseconds to microseconds
        microsecond = 0
        if not intheader[0, 6] == missing_data:
            microsecond = intheader[0, 6] / 1e3
        datestr = '%i %00i %i %i %i %i' % (
            year, jday, hour, minute, second, microsecond)

        stats['starttime'] = datetime.strptime(datestr, '%Y %j %H %M %S %f')
    else:
        logging.warning('No start time provided. '
                        'This must be set manually for network/station: '
                        '%s/%s.' % (stats['network'], stats['station']))
        standard['comments'] = 'Missing start time.'

    standard['sensor_serial_number'] = ''
    if intheader[1, 3] != missing_data:
        standard['sensor_serial_number'] = str(intheader[1, 3])

    # we never get a two character location code so floor location is used
    if location == '':
        location = intheader.flatten()[24]
        if location != missing_data:
            location = str(location)
            if len(location) < 2:
                location = location.zfill(2)
            stats['location'] = location
        else:
            stats['location'] = '--'
    else:
        stats['location'] = location

    # second line is information about number of channels, orientations
    # we care about orientations
    format_specific['vertical_orientation'] = np.nan
    if intheader[1, 4] != missing_data:
        format_specific['vertical_orientation'] = int(intheader[1, 4])

    standard['horizontal_orientation'] = np.nan
    if intheader[1, 5] != missing_data:
        standard['horizontal_orientation'] = float(intheader[1, 5])

    if intheader[1, 6] == missing_data or intheader[1, 6] not in INSTRUMENTS:
        standard['instrument'] = ''
    else:
        standard['instrument'] = INSTRUMENTS[intheader[1, 6]]

    num_comments = intheader[1, 7]

    # third line contains number of data points
    stats['npts'] = intheader[2, 0]
    problem_flag = intheader[2, 1]
    if problem_flag == 1:
        if not accept_flagged:
            fmt = 'SMC: Record found in file %s has a problem flag!'
            raise Exception(fmt % filename)
        else:
            logging.warning(
                'SMC: Data contains a problem flag for network/station: '
                '%s/%s. See comments.' % (stats['network'], stats['station']))
    stype = intheader[2, 2]
    if stype == missing_data:
        stype = np.nan
    elif stype not in STRUCTURES:
        # structure type is not defined and should will be considered 'other'
        stype = 4
    fmt = 'SMC: Record found in file %s is not a free-field sensor!'
    standard['structure_type'] = STRUCTURES[stype]
    if standard['structure_type'] == 'building' and not any_structure:
        raise Exception(fmt % filename)

    format_specific['building_floor'] = np.nan
    if intheader[3, 0] != missing_data:
        format_specific['building_floor'] = intheader[3, 0]

    format_specific['bridge_number_spans'] = np.nan
    if intheader[3, 1] != missing_data:
        format_specific['bridge_number_spans'] = intheader[3, 1]

    format_specific['bridge_transducer_location'] = BRIDGE_LOCATIONS[0]
    if intheader[3, 2] != missing_data:
        bridge_number = intheader[3, 2]
        format_specific['bridge_transducer_location'] = \
            BRIDGE_LOCATIONS[bridge_number]

    format_specific['dam_transducer_location'] = DAM_LOCATIONS[0]
    if intheader[3, 3] != missing_data:
        dam_number = intheader[3, 3]
        format_specific['dam_transducer_location'] = DAM_LOCATIONS[dam_number]

    c1 = format_specific['bridge_transducer_location'].find('free field') == -1
    c2 = format_specific['dam_transducer_location'].find('free field') == -1
    if (c1 or c2) and not any_structure:
        raise Exception(fmt % filename)

    format_specific['construction_type'] = CONSTRUCTION_TYPES[4]
    if intheader[3, 4] != missing_data:
        format_specific['construction_type'] = \
            CONSTRUCTION_TYPES[intheader[3, 4]]

    # station is repeated here if all numeric
    if not len(stats['station']):
        stats['station'] = '%i' % intheader[3, 5]

    # read float header data
    skip = ASCII_HEADER_LINES + INTEGER_HEADER_LINES
    floatheader = np.genfromtxt(
        filename,
        max_rows=FLOAT_HEADER_LINES,
        skip_header=skip,
        delimiter=FLOAT_HEADER_WIDTHS)

    # float headers are 10 lines of 5 floats each
    missing_data = floatheader[0, 0]
    stats['sampling_rate'] = floatheader[0, 1]
    coordinates['latitude'] = floatheader[2, 0]
    # the documentation for SMC says that sometimes longitudes are
    # positive in the western hemisphere. Since it is very unlikely
    # any of these files exist for the eastern hemisphere, check for
    # positive longitudes and fix them.
    lon = floatheader[2, 1]
    if lon > 0:
        lon = -1 * lon
    coordinates['longitude'] = lon
    coordinates['elevation'] = 0
    if floatheader[2, 2] != missing_data:
        coordinates['elevation'] = floatheader[2, 2]

    # figure out the channel code
    if format_specific['vertical_orientation'] in [0, 180]:
        stats['channel'] = get_channel_name(
            stats['sampling_rate'],
            is_acceleration=True,
            is_vertical=True,
            is_north=False)
    else:
        ho = standard['horizontal_orientation']
        quad1 = ho > 315 and ho <= 360
        quad2 = ho > 0 and ho <= 45
        quad3 = ho > 135 and ho <= 225
        if quad1 or quad2 or quad3:
            stats['channel'] = get_channel_name(
                stats['sampling_rate'],
                is_acceleration=True,
                is_vertical=False,
                is_north=True)
        else:
            stats['channel'] = get_channel_name(
                stats['sampling_rate'],
                is_acceleration=True,
                is_vertical=False,
                is_north=False)

    logging.debug('channel: %s' % stats['channel'])
    sensor_frequency = floatheader[4, 1]
    standard['instrument_period'] = 1 / sensor_frequency
    standard['instrument_damping'] = floatheader[4, 2]

    standard['corner_frequency'] = floatheader[3, 4]
    format_specific['filter_poles'] = floatheader[4, 0]
    standard['units'] = 'acc'

    # read in the comment lines
    with open(filename) as f:
        skip = ASCII_HEADER_LINES + INTEGER_HEADER_LINES + FLOAT_HEADER_LINES
        _ = [next(f) for x in range(skip)]
        standard['comments'] = [next(f).strip().lstrip('|')
                                for x in range(num_comments)]

    standard['comments'] = ' '.join(standard['comments'])
    stats['coordinates'] = coordinates
    stats['standard'] = standard
    stats['format_specific'] = format_specific

    head, tail = os.path.split(filename)
    stats['standard']['source_file'] = tail or os.path.basename(head)

    return (stats, num_comments)