def detect_format(filename): try: f = open(filename, 'r') data = f.read(512) f.close() except OSError, e: raise FileLoadError(e)
def iload(filename, load_data): try: kanf = KanFile(filename, load_data=load_data) tr = kanf.to_trace() yield tr except (OSError, KanError), e: raise FileLoadError(e)
def iload(filename, load_data=True): try: sacf = SacFile(filename, load_data=load_data) tr = sacf.to_trace() yield tr except (OSError, SacError), e: raise FileLoadError(e)
def iload(filename, load_data=True): try: f = open(filename, 'r') tf = TracesFileIO(f) for tr in tf.load(load_data=load_data): yield tr except (OSError, FileError), e: raise FileLoadError(e)
def iload(filename, load_data=True): from pyrocko import ims try: with open(filename, 'r') as f: r = ims.Reader(f, load_data=load_data, version='GSE2.0', dialect=None) for sec in r: if isinstance(sec, ims.WID2Section): tr = sec.pyrocko_trace(checksum_error='warn') yield tr except (OSError, ims.DeserializeError), e: fle = FileLoadError(e) fle.set_context('filename', filename) raise fle
def iload(filename, format='mseed', getdata=True, substitutions=None): '''Load traces from file (iterator version). This function works like :py:func:`load`, but returns an iterator which yields the loaded traces. ''' load_data = getdata toks = format.split('.', 1) if len(toks) == 2: format, subformat = toks else: subformat = None try: mtime = os.stat(filename)[8] except OSError, e: raise FileLoadError(e)
def _iload(filename, load_data=True, want=('traces', 'stations')): try: f = open(filename, 'rb') while True: tag = read_suds_struct(f, SudsStructtag, end_ok=True) if tag is None: break if tag.struct_type in struct_classes: cls = struct_classes[tag.struct_type] if tag.struct_length != struct.calcsize(cls.fmt): raise SudsError( 'expected and reported struct lengths differ') s = read_suds_struct(f, cls) if isinstance(s, SudsStationcomp) and 'stations' in want: station = s.to_station() yield station if tag.data_length > 0: if isinstance(s, SudsDescriptrace) and 'traces' in want: if load_data: data = f.read(tag.data_length) if tag.data_length != len(data): raise SudsError('premature end of file') tr = s.to_trace(data) else: f.seek(tag.data_length, 1) tr = s.to_trace(None) yield tr else: f.seek(tag.data_length, 1) else: logger.warn('skipping unsupported SUDS struct type %s (%s)' % ( tag.struct_type, struct_names.get(tag.struct_type, '?'))) f.seek(tag.struct_length, 1) if tag.data_length > 0: f.seek(tag.data_length, 1) except (OSError, SudsError) as e: raise FileLoadError(e)
def detect_format(filename): try: f = open(filename, 'r') data = f.read(512) f.close() except OSError, e: raise FileLoadError(e) format = None for mod, fmt in ((yaff, 'yaff'), (mseed, 'mseed'), (sac, 'sac'), (gse1, 'gse1'), (gse2_io_wrap, 'gse2'), (datacube, 'datacube')): if mod.detect(data): return fmt raise FileLoadError(UnknownFormat(filename)) def iload(filename, format='mseed', getdata=True, substitutions=None): '''Load traces from file (iterator version). This function works like :py:func:`load`, but returns an iterator which yields the loaded traces. ''' load_data = getdata toks = format.split('.', 1) if len(toks) == 2: format, subformat = toks else: subformat = None
deltat = reuse(float(1.0) / float(tr[7])) except ZeroDivisionError, e: have_zero_rate_traces = True continue ydata = tr[8] traces.append( trace.Trace(network, station, location, channel, tmin, tmax, deltat, ydata)) for tr in traces: yield tr except (OSError, mseed_ext.MSeedError), e: raise FileLoadError(str(e) + ' (file: %s)' % filename) if have_zero_rate_traces: logger.warn('Ignoring traces with sampling rate of zero in file %s ' '(maybe LOG traces)' % filename) def as_tuple(tr): from pyrocko import mseed_ext itmin = int(round(tr.tmin * mseed_ext.HPTMODULUS)) itmax = int(round(tr.tmax * mseed_ext.HPTMODULUS)) srate = 1.0 / tr.deltat return (tr.network, tr.station, tr.location, tr.channel, itmin, itmax, srate, tr.get_ydata())
def iload(filename, load_data, endianness='>'): '''Read SEGY file. filename -- Name of SEGY file. load_data -- If True, the data is read, otherwise only read headers. ''' endianness = endianness nbth = 3200 nbbh = 400 nbthx = 3200 nbtrh = 240 try: f = open(filename, 'rb') textual_file_header = f.read(nbth) if len(textual_file_header) != nbth: raise SEGYError('incomplete textual file header') binary_file_header = f.read(nbbh) if len(binary_file_header) != nbbh: raise SEGYError('incomplete binary file header') line_number = struct.unpack(endianness+'1I', binary_file_header[4:8]) hvals = struct.unpack(endianness+'24H', binary_file_header[12:12+24*2]) (ntraces, nauxtraces, deltat_us, deltat_us_orig, nsamples, nsamples_orig, format, ensemble_fold) = hvals[0:8] (segy_revision, fixed_length_traces, nextended_headers) = \ struct.unpack(endianness+'3H', binary_file_header[100:100+3*2]) formats = { 1: (unpack_ibm_f4, 4, "4-byte IBM floating-point"), 2: (endianness+'i4', 4, "4-byte, two's complement integer"), 3: (endianness+'i4', 2, "2-byte, two's complement integer"), 4: (None, 4, "4-byte fixed-point with gain (obolete)"), 5: ('f4', 4, "4-byte IEEE floating-point"), 6: (None, 0, "not currently used"), 7: (None, 0, "not currently used"), 8: ('i1', 1, "1-byte, two's complement integer")} dtype = formats[format][0] sample_size = formats[format][1] if dtype is None: raise SEGYError('unsupported sample data format %i: %s' % ( format, formats[format][2])) for ihead in xrange(nextended_headers): f.read(nbthx) atend = False while not atend: for itrace in xrange((ntraces+nauxtraces)): trace_header = f.read(nbtrh) if len(trace_header) == 0: atend = True break if len(trace_header) != nbtrh: raise SEGYError('incomplete trace header') (scoordx, scoordy, gcoordx, gcoordy) = \ struct.unpack(endianness+'4f4', trace_header[72:72+4*4]) (ensemblex, ensembley) = \ struct.unpack(endianness+'2f4', trace_header[180:180+2*4]) (ensemble_num,) = \ struct.unpack(endianness+'1I', trace_header[20:24]) (trensemble_num,) = \ struct.unpack(endianness+'1I', trace_header[24:28]) (trace_number,) = \ struct.unpack(endianness+'1I', trace_header[0:4]) (trace_numbersegy,) = \ struct.unpack(endianness+'1I', trace_header[4:8]) (orfield_num,) = \ struct.unpack(endianness+'1I', trace_header[8:12]) (ortrace_num,) = \ struct.unpack(endianness+'1I', trace_header[12:16]) tvals = struct.unpack( endianness+'12H', trace_header[94:94+12*2]) (nsamples_this, deltat_us_this) = tvals[-2:] tscalar = struct.unpack( endianness+'1H', trace_header[214:216])[0] if tscalar == 0: tscalar = 1. elif tscalar < 0: tscalar = 1.0 / tscalar else: tscalar = float(tscalar) tvals = [x * tscalar for x in tvals[:-2]] (year, doy, hour, minute, second) = \ struct.unpack(endianness+'5H', trace_header[156:156+2*5]) try: if year < 100: year += 2000 tmin = calendar.timegm( (year, 1, doy, hour, minute, second)) except: raise SEGYError('invalid start date/time') if fixed_length_traces: if (nsamples_this, deltat_us_this) \ != (nsamples, deltat_us): raise SEGYError( 'trace of incorrect length or sampling ' 'rate (trace=%i)' % itrace+1) if load_data: datablock = f.read(nsamples_this*sample_size) if len(datablock) != nsamples_this*sample_size: raise SEGYError('incomplete trace data') if isinstance(dtype, basestring): data = num.fromstring(datablock, dtype=dtype) else: data = dtype(datablock) tmax = None else: f.seek(nsamples_this*sample_size, 1) tmax = tmin + deltat_us_this/1000000.*(nsamples_this-1) data = None tr = trace.Trace( '', '%05i' % (line_number), '%02i' % (ensemble_num), '%03i' % (ortrace_num), tmin=tmin, tmax=tmax, deltat=deltat_us_this/1000000., ydata=data, meta=dict(orfield_num=orfield_num)) yield tr f.close() except (OSError, SEGYError), e: raise FileLoadError(e)
def read_header(f, endianness='>'): e = endianness data = read(f, 16, eof_ok=True) isystem_id, istream_id = struct.unpack(e + 'II', data[:8]) ex = isystem_id & 0x80000000 if ex: ex2 = isystem_id & (1 << 30) if ex2: system_id = util.base36encode(isystem_id & (2**21 - 1)) else: system_id = util.base36encode(isystem_id & (2**26 - 1)) instrument_type = (isystem_id >> 26) & 0b1 gain = [None, 1, 2, 4, 8, 16, 32, 64][(isystem_id >> 27) & 0b111] else: system_id = util.base36encode(isystem_id) instrument_type = None gain = None stream_id = util.base36encode(istream_id) i_day_second = struct.unpack(e + 'I', data[8:12])[0] iday = i_day_second >> 17 isecond = i_day_second & 0x1ffff time = (iday * 24 * 60 * 60) + guralp_zero + isecond ittl, israte, compression, nrecords = struct.unpack(e + 'BBBB', data[12:]) if nrecords > 250: raise FileLoadError('Header indicates too many records in block.') if israte == 0: if compression == 4 and stream_id[-2:] == '00': block_type = 'status_block' elif compression == 4 and stream_id[-2:] == '01': block_type = 'unified_status_block' elif compression == 4 and stream_id[-2:] == 'SM': block_type = 'strong_motion_block' elif stream_id[-2:] == 'CD': block_type = 'cd_status_block' elif compression == 4 and stream_id[-2:] == 'BP': block_type = 'byte_pipe_block' elif stream_id[-2:] == 'IB': block_type = 'information_block' else: raise FileLoadError('Unexpected block type found.') return Header(block_type, system_id, stream_id, instrument_type, time, gain, ittl, 0.0, compression, nrecords) else: block_type = 'data_block' if not re.match(r'^([ZNEXC][0-9A-CG-S]|M[0-9A-F])$', stream_id[-2:]): raise FileLoadError('Unexpected data stream ID') sample_rate_tab = { 157: (0.1, None), 161: (0.125, None), 162: (0.2, None), 164: (0.25, None), 167: (0.5, None), 171: (400., 8), 174: (500., 2), 176: (1000., 4), 179: (2000., 8), 181: (4000., 16) } if israte in sample_rate_tab: sample_rate, tfod = sample_rate_tab[israte] else: sample_rate = float(israte) tfod = None if tfod is not None: toff = (compression >> 4) / tfod compression = compression & 0b1111 time += toff if compression not in (1, 2, 4): raise GCFLoadError('Unsupported compression code: %i' % compression) return Header(block_type, system_id, stream_id, instrument_type, time, gain, ittl, sample_rate, compression, nrecords)
deltat = reuse(float(1.0) / float(tr[7])) except ZeroDivisionError, e: have_zero_rate_traces = True continue ydata = tr[8] traces.append( trace.Trace(network, station, location, channel, tmin, tmax, deltat, ydata)) for tr in traces: yield tr except (OSError, mseed_ext.MSeedError), e: raise FileLoadError(str(e)) if have_zero_rate_traces: logger.warn( 'Ignoring traces with sampling rate of zero in file %s (maybe LOG traces)' % filename) def as_tuple(tr): from pyrocko import mseed_ext itmin = int(round(tr.tmin * mseed_ext.HPTMODULUS)) itmax = int(round(tr.tmax * mseed_ext.HPTMODULUS)) srate = 1.0 / tr.deltat return (tr.network, tr.station, tr.location, tr.channel, itmin, itmax, srate, tr.get_ydata())
loc, cha, tmin=tmin, tmax=tmax, deltat=deltat, ydata=data) yield t except util.UnpackError, e: raise SeisanFileError( 'Error loading trace %i from file %s: %s' % (itrace, filename, str(e))) itrace += 1 except EOF: pass except (OSError, SeisanFileError), e: raise FileLoadError(e) finally: f.close() if __name__ == '__main__': fn = sys.argv[1] for tr in iload(fn): print tr