def test_valid_sac_from_minimal_existing_sac_header(self): """ An incomplete manually-produced SAC header should still produce a valid SAC file, including values from the ObsPy header. Issue 1204. """ tr = Trace(np.arange(100)) t = UTCDateTime() tr.stats.starttime = t tr.stats.station = 'AAA' tr.stats.network = 'XX' tr.stats.channel = 'BHZ' tr.stats.location = '00' tr.stats.sac = AttribDict() tr.stats.sac.iztype = 9 tr.stats.sac.nvhdr = 6 tr.stats.sac.leven = 1 tr.stats.sac.lovrok = 1 tr.stats.sac.iftype = 1 tr.stats.sac.stla = 1. tr.stats.sac.stlo = 2. with NamedTemporaryFile() as tf: tempfile = tf.name with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') tr.write(tempfile, format='SAC') self.assertEqual(len(w), 1) self.assertIn('reftime', str(w[-1].message)) tr1 = read(tempfile)[0] # starttime made its way to SAC file nztimes, microsecond = utcdatetime_to_sac_nztimes(t) self.assertEqual(tr1.stats.sac.nzyear, nztimes['nzyear']) self.assertEqual(tr1.stats.sac.nzjday, nztimes['nzjday']) self.assertEqual(tr1.stats.sac.nzhour, nztimes['nzhour']) self.assertEqual(tr1.stats.sac.nzmin, nztimes['nzmin']) self.assertEqual(tr1.stats.sac.nzsec, nztimes['nzsec']) self.assertEqual(tr1.stats.sac.nzmsec, nztimes['nzmsec']) self.assertEqual(tr1.stats.sac.kstnm, 'AAA') self.assertEqual(tr1.stats.sac.knetwk, 'XX') self.assertEqual(tr1.stats.sac.kcmpnm, 'BHZ') self.assertEqual(tr1.stats.sac.khole, '00') self.assertEqual(tr1.stats.sac.iztype, 9) self.assertEqual(tr1.stats.sac.nvhdr, 6) self.assertEqual(tr1.stats.sac.leven, 1) self.assertEqual(tr1.stats.sac.lovrok, 1) self.assertEqual(tr1.stats.sac.iftype, 1) self.assertEqual(tr1.stats.sac.stla, 1.0) self.assertEqual(tr1.stats.sac.stlo, 2.0)
def test_valid_sac_from_minimal_existing_sac_header(self): """ An incomplete manually-produced SAC header should still produce a valid SAC file, including values from the ObsPy header. Issue 1204. """ tr = Trace(np.arange(100)) t = UTCDateTime() tr.stats.starttime = t tr.stats.station = 'AAA' tr.stats.network = 'XX' tr.stats.channel = 'BHZ' tr.stats.location = '00' tr.stats.sac = AttribDict() tr.stats.sac.iztype = 9 tr.stats.sac.nvhdr = 6 tr.stats.sac.leven = 1 tr.stats.sac.lovrok = 1 tr.stats.sac.iftype = 1 tr.stats.sac.stla = 1. tr.stats.sac.stlo = 2. with NamedTemporaryFile() as tf: tempfile = tf.name with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') tr.write(tempfile, format='SAC') self.assertEqual(len(w), 1) self.assertIn('reftime', str(w[-1].message)) tr1 = read(tempfile)[0] # starttime made its way to SAC file nztimes, microsecond = utcdatetime_to_sac_nztimes(t) self.assertEqual(tr1.stats.sac.nzyear, nztimes['nzyear']) self.assertEqual(tr1.stats.sac.nzjday, nztimes['nzjday']) self.assertEqual(tr1.stats.sac.nzhour, nztimes['nzhour']) self.assertEqual(tr1.stats.sac.nzmin, nztimes['nzmin']) self.assertEqual(tr1.stats.sac.nzsec, nztimes['nzsec']) self.assertEqual(tr1.stats.sac.nzmsec, nztimes['nzmsec']) self.assertEqual(tr1.stats.sac.kstnm, 'AAA') self.assertEqual(tr1.stats.sac.knetwk, 'XX') self.assertEqual(tr1.stats.sac.kcmpnm, 'BHZ') self.assertEqual(tr1.stats.sac.khole, '00') self.assertEqual(tr1.stats.sac.iztype, 9) self.assertEqual(tr1.stats.sac.nvhdr, 6) self.assertEqual(tr1.stats.sac.leven, 1) self.assertEqual(tr1.stats.sac.lovrok, 1) self.assertEqual(tr1.stats.sac.iftype, 1) self.assertEqual(tr1.stats.sac.stla, 1.0) self.assertEqual(tr1.stats.sac.stlo, 2.0)
def _validate_and_write_waveforms( st, starttime, endtime, scale, source, receiver, db, label, format ): if not label: label = "" else: label += "_" for tr in st: # Half the filesize but definitely sufficiently accurate. tr.data = np.require(tr.data, dtype=np.float32) if scale != 1.0: for tr in st: tr.data *= scale # Sanity checks. Raise internal server errors in case something fails. # This should not happen and should have been caught before. if endtime > st[0].stats.endtime: msg = ( "Endtime larger than the extracted endtime: endtime=%s, " "largest db endtime=%s" % ( _format_utc_datetime(endtime), _format_utc_datetime(st[0].stats.endtime), ) ) return tornado.web.HTTPError(500, log_message=msg, reason=msg), None if starttime < st[0].stats.starttime - 3600.0: msg = ( "Starttime more than one hour before the starttime of the " "seismograms." ) return tornado.web.HTTPError(500, log_message=msg, reason=msg), None if isinstance(source, FiniteSource): mu = None else: mu = st[0].stats.instaseis.mu # Trim, potentially pad with zeroes. st.trim(starttime, endtime, pad=True, fill_value=0.0, nearest_sample=False) # Checked in another function and just a sanity check. assert format in ("miniseed", "saczip") if format == "miniseed": with io.BytesIO() as fh: st.write(fh, format="mseed") fh.seek(0, 0) binary_data = fh.read() return binary_data, mu # Write a number of SAC files into an archive. elif format == "saczip": byte_strings = [] for tr in st: # Write SAC headers. tr.stats.sac = obspy.core.AttribDict() # Write WGS84 coordinates to the SAC files. tr.stats.sac.stla = geocentric_to_elliptic_latitude( receiver.latitude ) tr.stats.sac.stlo = receiver.longitude tr.stats.sac.stdp = receiver.depth_in_m tr.stats.sac.stel = 0.0 if isinstance(source, FiniteSource): tr.stats.sac.evla = geocentric_to_elliptic_latitude( source.hypocenter_latitude ) tr.stats.sac.evlo = source.hypocenter_longitude tr.stats.sac.evdp = source.hypocenter_depth_in_m # Force source has no magnitude. if not isinstance(source, ForceSource): tr.stats.sac.mag = source.moment_magnitude src_lat = source.hypocenter_latitude src_lng = source.hypocenter_longitude else: tr.stats.sac.evla = geocentric_to_elliptic_latitude( source.latitude ) tr.stats.sac.evlo = source.longitude tr.stats.sac.evdp = source.depth_in_m # Force source has no magnitude. if not isinstance(source, ForceSource): tr.stats.sac.mag = source.moment_magnitude src_lat = source.latitude src_lng = source.longitude # Thats what SPECFEM uses for a moment magnitude.... tr.stats.sac.imagtyp = 55 # The event origin time relative to the reference which I'll # just assume to be the starttime here? tr.stats.sac.o = source.origin_time - starttime # Sac coordinates are elliptical thus it only makes sense to # have elliptical distances. dist_in_m, az, baz = gps2dist_azimuth( lat1=tr.stats.sac.evla, lon1=tr.stats.sac.evlo, lat2=tr.stats.sac.stla, lon2=tr.stats.sac.stlo, ) tr.stats.sac.dist = dist_in_m / 1000.0 tr.stats.sac.az = az tr.stats.sac.baz = baz # XXX: Is this correct? Maybe better use some function in # geographiclib? tr.stats.sac.gcarc = locations2degrees( lat1=src_lat, long1=src_lng, lat2=receiver.latitude, long2=receiver.longitude, ) # Set two more headers. See #45. tr.stats.sac.lpspol = 1 tr.stats.sac.lcalda = 0 # Add cmpinc and cmpaz headers. # # From the SAC format manual: # CMPAZ: Component azimuth (degrees clockwise from north). # CMPINC: Component incident angle (degrees from vertical). # # I guess "degrees from vertical" means degrees from vertical up. # So the vertical channel would have a CMPINC of 0 and all # others a CMPINC of 90. This is different from the dip used in # SEED. _c = tr.stats.channel[-1] # Special case handling for the green's function route. Don't # assign it here as we don't operate in geographical coordinates. if len(st) == 10: pass elif _c == "Z": tr.stats.sac.cmpinc = 0.0 # Zero seems reasonable. tr.stats.sac.cmpaz = 0.0 # Explicitly handle the other cases to not run into surprises. elif _c in ["E", "N", "R", "T"]: tr.stats.sac.cmpinc = 90.0 if _c == "E": tr.stats.sac.cmpaz = 90.0 elif _c == "N": tr.stats.sac.cmpaz = 0.0 elif _c == "R": tr.stats.sac.cmpaz = (baz - 180.0) % 360.0 elif _c == "T": tr.stats.sac.cmpaz = (baz - 90.0) % 360.0 # Cannot really happen else: # pragma: no cover raise NotImplementedError else: # pragma: no cover raise NotImplementedError # Some provenance. tr.stats.sac.kuser0 = "InstSeis" tr.stats.sac.kuser1 = db.info.velocity_model[:8] tr.stats.sac.user0 = scale # Prefix version numbers to identify them at a glance. tr.stats.sac.kt7 = "A" + db.info.axisem_version[:7] tr.stats.sac.kt8 = "I" + __version__[:7] # Times have to be set by hand. t, _ = utcdatetime_to_sac_nztimes(tr.stats.starttime) for key, value in t.items(): tr.stats.sac[key] = value with io.BytesIO() as temp: tr.write(temp, format="sac") temp.seek(0, 0) filename = "%s%s.sac" % (label, tr.id) byte_strings.append((filename, temp.read())) return byte_strings, mu
def _validate_and_write_waveforms(st, callback, starttime, endtime, scale, source, receiver, db, label, format): if not label: label = "" else: label += "_" for tr in st: # Half the filesize but definitely sufficiently accurate. tr.data = np.require(tr.data, dtype=np.float32) if scale != 1.0: for tr in st: tr.data *= scale # Sanity checks. Raise internal server errors in case something fails. # This should not happen and should have been caught before. if endtime > st[0].stats.endtime: msg = ("Endtime larger than the extracted endtime: endtime=%s, " "largest db endtime=%s" % ( _format_utc_datetime(endtime), _format_utc_datetime(st[0].stats.endtime))) callback((tornado.web.HTTPError(500, log_message=msg, reason=msg), None)) return if starttime < st[0].stats.starttime - 3600.0: msg = ("Starttime more than one hour before the starttime of the " "seismograms.") callback((tornado.web.HTTPError(500, log_message=msg, reason=msg), None)) return if isinstance(source, FiniteSource): mu = None else: mu = st[0].stats.instaseis.mu # Trim, potentially pad with zeroes. st.trim(starttime, endtime, pad=True, fill_value=0.0, nearest_sample=False) # Checked in another function and just a sanity check. assert format in ("miniseed", "saczip") if format == "miniseed": with io.BytesIO() as fh: st.write(fh, format="mseed") fh.seek(0, 0) binary_data = fh.read() callback((binary_data, mu)) # Write a number of SAC files into an archive. elif format == "saczip": byte_strings = [] for tr in st: # Write SAC headers. tr.stats.sac = obspy.core.AttribDict() # Write WGS84 coordinates to the SAC files. tr.stats.sac.stla = geocentric_to_elliptic_latitude( receiver.latitude) tr.stats.sac.stlo = receiver.longitude tr.stats.sac.stdp = receiver.depth_in_m tr.stats.sac.stel = 0.0 if isinstance(source, FiniteSource): tr.stats.sac.evla = geocentric_to_elliptic_latitude( source.hypocenter_latitude) tr.stats.sac.evlo = source.hypocenter_longitude tr.stats.sac.evdp = source.hypocenter_depth_in_m # Force source has no magnitude. if not isinstance(source, ForceSource): tr.stats.sac.mag = source.moment_magnitude src_lat = source.hypocenter_latitude src_lng = source.hypocenter_longitude else: tr.stats.sac.evla = geocentric_to_elliptic_latitude( source.latitude) tr.stats.sac.evlo = source.longitude tr.stats.sac.evdp = source.depth_in_m # Force source has no magnitude. if not isinstance(source, ForceSource): tr.stats.sac.mag = source.moment_magnitude src_lat = source.latitude src_lng = source.longitude # Thats what SPECFEM uses for a moment magnitude.... tr.stats.sac.imagtyp = 55 # The event origin time relative to the reference which I'll # just assume to be the starttime here? tr.stats.sac.o = source.origin_time - starttime # Sac coordinates are elliptical thus it only makes sense to # have elliptical distances. dist_in_m, az, baz = gps2dist_azimuth( lat1=tr.stats.sac.evla, lon1=tr.stats.sac.evlo, lat2=tr.stats.sac.stla, lon2=tr.stats.sac.stlo) tr.stats.sac.dist = dist_in_m / 1000.0 tr.stats.sac.az = az tr.stats.sac.baz = baz # XXX: Is this correct? Maybe better use some function in # geographiclib? tr.stats.sac.gcarc = locations2degrees( lat1=src_lat, long1=src_lng, lat2=receiver.latitude, long2=receiver.longitude) # Set two more headers. See #45. tr.stats.sac.lpspol = 1 tr.stats.sac.lcalda = 0 # Some provenance. tr.stats.sac.kuser0 = "InstSeis" tr.stats.sac.kuser1 = db.info.velocity_model[:8] tr.stats.sac.user0 = scale # Prefix version numbers to identify them at a glance. tr.stats.sac.kt7 = "A" + db.info.axisem_version[:7] tr.stats.sac.kt8 = "I" + __version__[:7] # Times have to be set by hand. t, _ = utcdatetime_to_sac_nztimes(tr.stats.starttime) for key, value in t.items(): tr.stats.sac[key] = value with io.BytesIO() as temp: tr.write(temp, format="sac") temp.seek(0, 0) filename = "%s%s.sac" % (label, tr.id) byte_strings.append((filename, temp.read())) callback((byte_strings, mu))