def read_data(filename, read_format=None, **kwargs): """ Read strong motion data from a file. Args: filename (str): Path to file read_format (str): Format of file Returns: obspy.core.stream.Stream: Stream read from file """ # Check if file exists if not os.path.exists(filename): raise AmptoolsException('Not a file %r' % filename) # Get and validate format if read_format is None: read_format = _get_format(filename) else: read_format = _validate_format(filename, read_format.lower()) # Load reader and read file reader = 'amptools.io.' + read_format + '.core' reader_module = importlib.import_module(reader) read_name = 'read_' + read_format read_method = getattr(reader_module, read_name) stream = read_method(filename, **kwargs) return stream
def read_dmg(filename, **kwargs): """Read DMG strong motion file. Notes: CSMIP is synonymous to as DMG in this reader. Args: filename (str): Path to possible DMG data file. kwargs (ref): units (str): String determining which timeseries is return. Valid options include 'acc', 'vel', 'disp'. Default is 'acc'. Other arguments will be ignored. Returns: Stream: Obspy Stream containing three channels of acceleration data (cm/s**2). """ if not is_dmg(filename): raise Exception('Not a DMG file format.') # Check for units and location units = kwargs.get('units', 'acc') location = kwargs.get('location', '') if units not in UNITS: raise Exception('Not a valid choice of units.') # Check for DMG format and determine volume type line = open(filename, 'rt').readline() if is_dmg(filename): if line.lower().find('uncorrected') >= 0: reader = 'V1' elif line.lower().find('corrected') >= 0: reader = 'V2' elif line.lower().find('response') >= 0: reader = 'V3' # Count the number of lines in the file with open(filename) as f: line_count = sum(1 for _ in f) # Read as many channels as are present in the file line_offset = 0 trace_list = [] while line_offset < line_count: if reader == 'V2': traces, line_offset = _read_volume_two(filename, line_offset, location=location) trace_list += traces else: raise AmptoolsException('Not a supported volume.') stream = Stream([]) for trace in trace_list: if trace.stats['standard']['units'] == units: stream.append(trace) return stream
def _get_format(filename): """ Get the format of the file. Args: filename (str): Path to file Returns: string: Format of file. """ # Get the valid formats valid_formats = [] home = os.path.dirname(os.path.abspath(__file__)) io_directory = os.path.abspath(os.path.join(home, '..', 'io')) # Create valid list for module in os.listdir(io_directory): if module.find('.') < 0 and module not in EXCLUDED: valid_formats += [module] # Test each format formats = [] for valid_format in valid_formats: # Create the module and function name from the request reader = 'amptools.io.' + valid_format + '.core' reader_module = importlib.import_module(reader) is_name = 'is_' + valid_format is_method = getattr(reader_module, is_name) if is_method(filename): formats += [valid_format] # Return the format formats = np.asarray(formats) if len(formats) == 1: return formats[0] elif len(formats) == 2 and 'obspy' in formats: return formats[formats != 'obspy'][0] elif len(formats) == 0: raise AmptoolsException('No format found for file %r.' % filename) else: raise AmptoolsException('Multiple formats passing: %r. Please retry file %r ' 'with a specified format.' % (formats.tolist(), filename))
def read_usc(filename, **kwargs): """Read USC V1 strong motion file. Args: filename (str): Path to possible USC V1 data file. kwargs (ref): Ignored by this function. Returns: Stream: Obspy Stream containing three channels of acceleration data (cm/s**2). """ if not is_usc(filename): raise Exception('%s is not a valid USC file' % filename) # Check for Location location = kwargs.get('location', '') f = open(filename, 'rt') first_line = f.readline() f.close() if first_line.find('OF UNCORRECTED ACCELEROGRAM DATA OF') >= 0: stream = read_volume_one(filename, location=location) else: raise AmptoolsException('Not a supported volume.') return stream
def _get_header_info(int_data, flt_data, lines, volume, location=''): """Return stats structure from various headers. Output is a dictionary like this: - network (str): 'LA' - station (str) - channel (str): Determined using COSMOS_ORIENTATIONS - location (str): Default is '--' - starttime (datetime) - duration (float) - sampling_rate (float) - npts (int) - coordinates: - latitude (float) - longitude (float) - elevation (float) - standard (Defaults are either np.nan or '') - horizontal_orientation (float): Rotation from north (degrees) - instrument_period (float): Period of sensor (Hz) - instrument_damping (float): Fraction of critical - process_time (datetime): Reported date of processing - process_level: Either 'V0', 'V1', 'V2', or 'V3' - station_name (str): Long form station description - sensor_serial_number (str): Reported sensor serial - instrument (str): See SENSOR_TYPES - comments (str): Processing comments - structure_type (str): See BUILDING_TYPES - corner_frequency (float): Sensor corner frequency (Hz) - units (str): See UNITS - source (str): Network source description - source_format (str): Always cosmos - format_specific - fractional_unit (float): Units of digitized acceleration in file (fractions of g) Args: int_data (ndarray): Array of integer data flt_data (ndarray): Array of float data lines (list): List of text headers (str) Returns: dictionary: Dictionary of header/metadata information """ hdr = {} coordinates = {} standard = {} format_specific = {} if volume == 'V1': hdr['duration'] = flt_data[2] hdr['npts'] = int_data[27] hdr['sampling_rate'] = hdr['npts'] / hdr['duration'] # Get required parameter number hdr['network'] = 'LA' hdr['station'] = str(int_data[8]) horizontal_angle = int_data[26] if (horizontal_angle in USC_ORIENTATIONS or (horizontal_angle >= 0 and horizontal_angle <= 360)): if horizontal_angle in USC_ORIENTATIONS: channel = USC_ORIENTATIONS[horizontal_angle][1].upper() if channel == 'UP' or channel == 'DOWN' or channel == 'VERT': channel = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=True, is_north=False) elif (horizontal_angle > 315 or horizontal_angle < 45 or (horizontal_angle > 135 and horizontal_angle < 225)): channel = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=False, is_north=True) else: channel = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=False, is_north=False) horizontal_orientation = horizontal_angle hdr['channel'] = channel else: errstr = ('Not enough information to distinguish horizontal from ' 'vertical channels.') raise AmptoolsException(errstr) if location == '': hdr['location'] = '--' else: hdr['location'] = location month = str(int_data[21]) day = str(int_data[22]) year = str(int_data[23]) time = str(int_data[24]) tstr = month + '/' + day + '/' + year + '_' + time starttime = datetime.strptime(tstr, '%m/%d/%Y_%H%M') hdr['starttime'] = starttime # Get coordinates lat_deg = int_data[9] lat_min = int_data[10] lat_sec = int_data[11] lon_deg = int_data[12] lon_min = int_data[13] lon_sec = int_data[14] # Check for southern hemisphere, default is northern if lines[4].find('STATION USC#') >= 0: idx = lines[4].find('STATION USC#') + 12 if 'S' in lines[4][idx:]: lat_sign = -1 else: lat_sign = 1 else: lat_sign = 1 # Check for western hemisphere, default is western if lines[4].find('STATION USC#') >= 0: idx = lines[4].find('STATION USC#') + 12 if 'W' in lines[4][idx:]: lon_sign = -1 else: lon_sign = 1 else: lon_sign = -1 latitude = lat_sign * _dms2dd(lat_deg, lat_min, lat_sec) longitude = lon_sign * _dms2dd(lon_deg, lon_min, lon_sec) coordinates['latitude'] = latitude coordinates['longitude'] = longitude coordinates['elevation'] = np.nan # Get standard paramaters standard['horizontal_orientation'] = horizontal_orientation standard['instrument_period'] = flt_data[0] standard['instrument_damping'] = flt_data[1] standard['process_time'] = '' station_line = lines[5] station_length = int(lines[5][72:74]) name = station_line[:station_length] standard['station_name'] = name standard['sensor_serial_number'] = '' standard['instrument'] = '' standard['comments'] = '' standard['units'] = 'acc' standard['structure_type'] = '' standard['process_level'] = 'V1' standard['corner_frequency'] = np.nan standard[ 'source'] = 'Los Angeles Basin Seismic Network, University of Southern California' standard['source_format'] = 'usc' # Get format specific format_specific['fractional_unit'] = flt_data[4] # Set dictionary hdr['standard'] = standard hdr['coordinates'] = coordinates hdr['format_specific'] = format_specific return hdr
def _get_header_info(int_data, flt_data, lines, level, location=''): """Return stats structure from various headers. Output is a dictionary like this: - network (str): Default is 'ZZ'. Determined using COSMOS_NETWORKS - station (str) - channel (str) - location (str): Default is '--' - starttime (datetime) - sampling_rate (float) - delta (float) - coordinates: - latitude (float) - longitude (float) - elevation (float): Default is np.nan - standard (Defaults are either np.nan or '') - horizontal_orientation (float): Rotation from north (degrees) - instrument_period (float): Period of sensor (Hz) - instrument_damping (float): Fraction of critical - process_time (datetime): Reported date of processing - process_level: Either 'V0', 'V1', 'V2', or 'V3' - station_name (str): Long form station description - sensor_serial_number (str): Reported sensor serial - instrument (str) - comments (str): Processing comments - structure_type (str) - corner_frequency (float): Sensor corner frequency (Hz) - units (str) - source (str): Network source description - source_format (str): Always dmg - format_specific - sensor_sensitivity (float): Transducer sensitivity (cm/g) - time_sd (float): Standard deviaiton of time steop (millisecond) - fractional_unit (float): Units of digitized acceleration in file (fractions of g) - scaling_factor (float): Scaling used for converting acceleration from g/10 to cm/sec/sec - low_filter_corner (float): Filter corner for low frequency V2 filtering (Hz) - high_filter_corner (float): Filter corner for high frequency V2 filtering (Hz) Args: int_data (ndarray): Array of integer data flt_data (ndarray): Array of float data lines (list): List of text headers (str) level (str): Process level code (V0, V1, V2, V3) Returns: dictionary: Dictionary of header/metadata information """ hdr = {} coordinates = {} standard = {} format_specific = {} # Required metadata name_length = int_data[29] station_name = re.sub(' +', ' ', lines[6][:name_length]).strip() code = re.sub(' +', ' ', lines[1][name_length:]).strip().split(' ')[-1][:2] if code.upper() in CODES: network = code.upper() idx = np.argwhere(CODES == network)[0][0] source = SOURCES1[idx].decode('utf-8') + ', ' + SOURCES2[idx].decode( 'utf-8') else: network = 'ZZ' source = '' hdr['network'] = network station_line = lines[5] station = station_line[12:17].strip() hdr['station'] = station angle = int_data[26] hdr['delta'] = flt_data[60] hdr['sampling_rate'] = 1 / hdr['delta'] if angle == 500 or angle == 600 or (angle >= 0 and angle <= 360): if angle == 500 or angle == 600: hdr['channel'] = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=True, is_north=False) elif angle > 315 or angle < 45 or (angle > 135 and angle < 225): hdr['channel'] = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=False, is_north=True) else: hdr['channel'] = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=False, is_north=False) else: errstr = ('Not enough information to distinguish horizontal from ' 'vertical channels.') raise AmptoolsException(errstr) if location == '': hdr['location'] = '--' else: hdr['location'] = location trigger_line = lines[4][35:77] if trigger_line.find('-') >= 0 or trigger_line.find('/') >= 0: if trigger_line.find('-') >= 0: delimeter = '-' elif trigger_line.find('/') >= 0: delimeter = '/' date = trigger_line.split(delimeter) # look for dates try: month = int(date[0][-2:]) day = int(date[1]) time = trigger_line.split(':') hour = int(time[1][-2:]) minute = int(time[2]) second = float(time[3][:2]) microsecond = int((second - int(second)) * 1e6) year = int(date[2][:4]) if len(str(year)) < 4: earthquake_line = lines[21] print(re.search(r'\d{4}', earthquake_line)) year = re.search(r'\d{4}', earthquake_line)[0] hdr['starttime'] = datetime(year, month, day, hour, minute, int(second), microsecond) except ValueError: # Looking for full year in integer header try: month = int(date[0][-2:]) day = int(date[1]) time = trigger_line.split(':') hour = int(time[1][-2:]) minute = int(time[2]) second = float(time[3][:2]) microsecond = int((second - int(second)) * 1e6) year = int_data[23] hdr['starttime'] = datetime(year, month, day, hour, minute, int(second), microsecond) except ValueError: warnings.warn('No start time provided on trigger line. ' 'This must be set manually for network/station: ' '%s/%s.' % (hdr['network'], hdr['station'])) standard['comments'] = 'Missing start time.' else: warnings.warn('No start time provided on trigger line. ' 'This must be set manually for network/station: ' '%s/%s.' % (hdr['network'], hdr['station'])) standard['comments'] = 'Missing start time.' hdr['npts'] = int_data[52] # Coordinates latitude_str = station_line[20:27].strip() longitude_str = station_line[29:37].strip() try: latitude = float(latitude_str[:-1]) if latitude_str.upper().find('S') >= 0: latitude = -1 * latitude except Exception: warnings.warn( 'No latitude or invalid latitude format provided. ' 'Setting to np.nan.', Warning) latitude = np.nan try: longitude = float(longitude_str[:-1]) if longitude_str.upper().find('W') >= 0: longitude = -1 * longitude except: warnings.warn('No longitude or invalid longitude format provided.', 'Setting to np.nan.', Warning) longitude = np.nan coordinates['latitude'] = latitude coordinates['longitude'] = longitude coordinates['elevation'] = np.nan # Standard metadata standard['horizontal_orientation'] = angle standard['instrument_period'] = flt_data[0] standard['instrument_damping'] = flt_data[1] process_line = lines[1].lower() if process_line.find('processed:') >= 0: process_info = process_line[process_line.find('processed:'):] try: process_info = process_line[process_line.find('processed:'):] date = process_info.split('/') month = int(date[0][-2:]) day = int(date[1]) try: process_year = int(date[2][:4]) except: process_year = date[2][:2] if len(process_year) == 2 and str(process_year) == str(year)[-2:]: process_year = year standard['process_time'] = datetime(process_year, month, day) except: standard['process_time'] = '' else: standard['process_time'] = '' standard['process_level'] = level if 'comments' not in standard: standard['comments'] = '' standard['structure_type'] = lines[7][46:80].strip() standard['instrument'] = station_line[39:47].strip() standard['sensor_serial_number'] = station_line[53:57].strip() standard['corner_frequency'] = '' standard['units'] = 'acc' standard['source'] = source standard['source_format'] = 'dmg' standard['station_name'] = station_name # Format specific metadata format_specific['fractional_unit'] = flt_data[4] format_specific['sensor_sensitivity'] = flt_data[5] if flt_data[13] == -999: format_specific['time_sd'] = np.nan else: format_specific['time_sd'] = flt_data[13] format_specific['scaling_factor'] = flt_data[51] format_specific['low_filter_corner'] = flt_data[61] format_specific['high_filter_corner'] = flt_data[72] # Set dictionary hdr['coordinates'] = coordinates hdr['standard'] = standard hdr['format_specific'] = format_specific return hdr
def _get_header_info(int_data, flt_data, lines, cmt_data, location=''): """Return stats structure from various headers. Output is a dictionary like this: - network (str): Default is '--'. Determined using COSMOS_NETWORKS - station (str) - channel (str): Determined using COSMOS_ORIENTATIONS - location (str): Set to location index of sensor site at station. If not a multi-site array, default is '--'. - starttime (datetime) - duration (float) - sampling_rate (float) - delta (float) - npts (int) - coordinates: - latitude (float) - longitude (float) - elevation (float) - standard (Defaults are either np.nan or '') - horizontal_orientation (float): Rotation from north (degrees) - instrument_period (float): Period of sensor (Hz) - instrument_damping (float): Fraction of critical - process_time (datetime): Reported date of processing - process_level: Either 'V0', 'V1', 'V2', or 'V3' - station_name (str): Long form station description - sensor_serial_number (str): Reported sensor serial - instrument (str): See SENSOR_TYPES - comments (str): Processing comments - structure_type (str): See BUILDING_TYPES - corner_frequency (float): Sensor corner frequency (Hz) - units (str): See UNITS - source (str): Network source description - source_format (str): Always cosmos - format_specific - physical_units (str): See PHYSICAL_UNITS - v30 (float): Site geology V30 (km/s) - least_significant_bit: Recorder LSB in micro-volts (uv/count) - low_filter_type (str): Filter used for low frequency V2 filtering (see FILTERS) - low_filter_corner (float): Filter corner for low frequency V2 filtering (Hz) - low_filter_decay (float): Filter decay for low frequency V2 filtering (dB/octabe) - high_filter_type (str): Filter used for high frequency V2 filtering (see FILTERS) - high_filter_corner (float): Filter corner for high frequency V2 filtering (Hz) - high_filter_decay (float): Filter decay for high frequency V2 filtering (dB/octabe) - maximum (float): Maximum value - maximum_time (float): Time at which maximum occurs - station_code (int): Code for structure_type - record_flag (str): Either 'No problem', 'Fixed', 'Unfixed problem'. Should be described in more depth in comments. - scaling_factor (float): Scaling used for converting acceleration from g/10 to cm/sec/sec - sensor_sensitivity (float): Sensitvity in volts/g Args: int_data (ndarray): Array of integer data flt_data (ndarray): Array of float data lines (list): List of text headers (str) cmt_data (ndarray): Array of comments (str) Returns: dictionary: Dictionary of header/metadata information """ hdr = {} coordinates = {} standard = {} format_specific = {} # Get unknown parameter number try: unknown = int(lines[12][64:71]) except ValueError: unknown = -999 # required metadata network_num = int_data[10] # Get network from cosmos table or fdsn code sheet if network_num in COSMOS_NETWORKS: network = COSMOS_NETWORKS[network_num][0] source = COSMOS_NETWORKS[network_num][1] if network == '': network = COSMOS_NETWORKS[network_num][2] else: network_code = lines[4][25:27].upper() if network_code in CODES: network = network_code idx = np.argwhere(CODES == network_code)[0][0] source = SOURCES1[idx].decode( 'utf-8') + ', ' + SOURCES2[idx].decode('utf-8') else: network = 'ZZ' source = '' hdr['network'] = network hdr['station'] = lines[4][28:34].strip() horizontal_angle = int_data[53] # Store delta and duration. Use them to calculate npts and sampling_rate delta = flt_data[33] if delta != unknown: hdr['delta'] = delta hdr['sampling_rate'] = 1 / delta # Determine the angle based upon the cosmos table # Set horizontal angles other than N,S,E,W to H1 and H2 # Missing angle results in the channel number if horizontal_angle != unknown: if horizontal_angle in COSMOS_ORIENTATIONS: channel = COSMOS_ORIENTATIONS[horizontal_angle][1].upper() if channel == 'UP' or channel == 'DOWN' or channel == 'VERT': channel = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=True, is_north=False) elif horizontal_angle >= 0 and horizontal_angle <= 360: if (horizontal_angle > 315 or horizontal_angle < 45 or (horizontal_angle > 135 and horizontal_angle < 225)): channel = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=False, is_north=True) else: channel = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=False, is_north=False) horizontal_orientation = horizontal_angle else: errstr = ('Not enough information to distinguish horizontal from ' 'vertical channels.') raise AmptoolsException(errstr) hdr['channel'] = channel if location == '': location = int_data[55] location = str(_check_assign(location, unknown, '--')) if len(location) < 2: location = location.zfill(2) hdr['location'] = location else: hdr['location'] = location year = int_data[39] month = int_data[41] day = int_data[42] hour = int_data[43] minute = int_data[44] second = flt_data[29] # If anything more than seconds is excluded # It is considered inadequate time information if second == unknown: try: hdr['starttime'] = datetime(year, month, day, hour, minute) except Exception: raise AmptoolsException('Inadequate start time information.') else: second = second microsecond = int((second - int(second)) * 1e6) try: hdr['starttime'] = datetime(year, month, day, hour, minute, int(second), microsecond) except Exception: raise AmptoolsException('Inadequate start time information.') duration = flt_data[34] if duration != unknown: hdr['duration'] = duration if duration != unknown and delta != unknown: hdr['npts'] = int(hdr['sampling_rate'] * duration) # coordinate information coordinates['latitude'] = flt_data[0] coordinates['longitude'] = flt_data[1] coordinates['elevation'] = flt_data[2] for key in coordinates: if coordinates[key] == unknown: warnings.warn('Missing %r. Setting to np.nan.' % key, Warning) coordinates[key] = np.nan hdr['coordinates'] = coordinates # standard metadata standard['source'] = source standard['horizontal_orientation'] = horizontal_orientation station_name = lines[4][40:-1].strip() standard['station_name'] = station_name instrument_frequency = flt_data[39] standard['instrument_period'] = 1.0 / _check_assign( instrument_frequency, unknown, np.nan) instrument_damping = flt_data[40] standard['instrument_damping'] = _check_assign(instrument_damping, unknown, np.nan) process_line = lines[10][10:40] if process_line.find('-') >= 0 or process_line.find('/') >= 0: if process_line.find('-') >= 0: delimeter = '-' elif process_line.find('/') >= 0: delimeter = '/' try: date = process_line.split(delimeter) month = int(date[0][-2:]) day = int(date[1]) year = int(date[2][:4]) time = process_line.split(':') hour = int(time[0][-2:]) minute = int(time[1]) second = float(time[2][:2]) microsecond = int((second - int(second)) * 1e6) standard['process_time'] = datetime(year, month, day, hour, minute, int(second), microsecond) except Exception: standard['process_time'] = '' else: standard['process_time'] = '' process_level = int_data[0] if process_level == 0: standard['process_level'] = 'V0' elif process_level == 1: standard['process_level'] = 'V1' elif process_level == 2: standard['process_level'] = 'V2' elif process_level == 3: standard['process_level'] = 'V3' else: standard['process_level'] = '' serial = int_data[52] if serial != unknown: standard['sensor_serial_number'] = str( _check_assign(serial, unknown, '')) instrument = int_data[51] if instrument != unknown and instrument in SENSOR_TYPES: standard['instrument'] = SENSOR_TYPES[instrument] else: standard['instrument'] = lines[6][57:-1].strip() structure_type = int_data[18] if structure_type != unknown and structure_type in BUILDING_TYPES: standard['structure_type'] = BUILDING_TYPES[structure_type] else: standard['structure_type'] = '' frequency = flt_data[25] standard['corner_frequency'] = _check_assign(frequency, unknown, np.nan) physical_parameter = int_data[2] units = int_data[1] if units != unknown and units in UNITS: standard['units'] = UNITS[units] else: if physical_parameter in [2, 4, 7, 10, 11, 12, 23]: standard['units'] = 'acc' elif physical_parameter in [5, 8, 24]: standard['units'] = 'vel' elif physical_parameter in [6, 9, 25]: standard['units'] = 'disp' standard['source_format'] = 'cosmos' standard['comments'] = ', '.join(cmt_data) # format specific metadata if physical_parameter in PHYSICAL_UNITS: physical_parameter = PHYSICAL_UNITS[physical_parameter][0] format_specific['physical_units'] = physical_parameter v30 = flt_data[3] format_specific['v30'] = _check_assign(v30, unknown, np.nan) least_significant_bit = flt_data[21] format_specific['least_significant_bit'] = _check_assign( least_significant_bit, unknown, np.nan) low_filter_type = int_data[60] if low_filter_type in FILTERS: format_specific['low_filter_type'] = FILTERS[low_filter_type] else: format_specific['low_filter_type'] = '' low_filter_corner = flt_data[53] format_specific['low_filter_corner'] = _check_assign( low_filter_corner, unknown, np.nan) low_filter_decay = flt_data[54] format_specific['low_filter_decay'] = _check_assign( low_filter_decay, unknown, np.nan) high_filter_type = int_data[61] if high_filter_type in FILTERS: format_specific['high_filter_type'] = FILTERS[high_filter_type] else: format_specific['high_filter_type'] = '' high_filter_corner = flt_data[56] format_specific['high_filter_corner'] = _check_assign( high_filter_corner, unknown, np.nan) high_filter_decay = flt_data[57] format_specific['high_filter_decay'] = _check_assign( high_filter_decay, unknown, np.nan) maximum = flt_data[63] format_specific['maximum'] = _check_assign(maximum, unknown, np.nan) maximum_time = flt_data[64] format_specific['maximum_time'] = _check_assign(maximum_time, unknown, np.nan) format_specific['station_code'] = _check_assign(structure_type, unknown, np.nan) record_flag = int_data[75] if record_flag == 0: format_specific['record_flag'] = 'No problem' elif record_flag == 1: format_specific['record_flag'] = 'Fixed' elif record_flag == 2: format_specific['record_flag'] = 'Unfixed problem' else: format_specific['record_flag'] = '' scaling_factor = flt_data[87] format_specific['scaling_factor'] = _check_assign(scaling_factor, unknown, np.nan) scaling_factor = flt_data[41] format_specific['sensor_sensitivity'] = _check_assign( scaling_factor, unknown, np.nan) # Set dictionary hdr['standard'] = standard hdr['coordinates'] = coordinates hdr['format_specific'] = format_specific return hdr