Пример #1
0
def get_tensor(cmt_file=None, quake_file=None, timedelta=None):
    """From a cmt tensor file we get tensor information in a dictionary.
    
    :param cmt_file: location of text file to be used
    :param timedelta: difference between O.T. and run time
    :type cmt_file: string, optional
    :type timedelta: float, optional
    """
    if cmt_file:
        tensor_info = read_gcmt_file(cmt_file)
        tensor_info = modify_tensor(tensor_info)
        delta = UTCDateTime.utcnow() - tensor_info['date_origin']
        tensor_info['timedelta'] = delta  #delta.total_seconds()
    if quake_file:
        tensor_info = read_quake_file(quake_file)
        tensor_info = modify_tensor(tensor_info)
        delta = UTCDateTime.utcnow() - tensor_info['date_origin']
        tensor_info['timedelta'] = delta  #.total_seconds()
    if not cmt_file and not quake_file:
        if not os.path.isfile('tensor_info.json'):
            raise RuntimeError('No file named tensor_info.json located in '\
                               'folder {}'.format(os.getcwd()))
        tensor_info = json.load(open('tensor_info.json'))
        tensor_info = modify_tensor(tensor_info)
    return tensor_info
Пример #2
0
def make_SA(config, ot):

    evid = '1111'
    mag = 6.0
    lat_uncer = .05
    lon_uncer = .05
    mag_uncer = .1
    dep_uncer = 5
    time_uncer = 1
    version = 1
    timestamp = UTCDateTime.utcnow()

    lat = float(config['lat'])
    lon = float(config['lon'])
    dep = float(config['dep'])

    SA = '<?xml version="1.0" encoding="UTF-8" standalone="no" ?>\n'
    SA += '<event_message alg_vers="3.1.4-2018-11-08" category="live" ' \
          'instance="epic@eew-bk-prod1" message_type="update" orig_sys="PDL" ref_id="0" ref_src="" ' \
          'timestamp="%s" version="%s">\n' % (timestamp, version)
    SA += '  <core_info id="%s">\n' % evid
    SA += '    <mag units="Mw">%f</mag>\n' % mag
    SA += '    <mag_uncer units="Mw">%f</mag_uncer>\n' % mag_uncer
    SA += '    <lat units="deg">%f</lat>\n' % lat
    SA += '    <lat_uncer units="deg">%f</lat_uncer>\n' % lat_uncer
    SA += '    <lon units="deg">%f</lon>\n' % lon
    SA += '    <lon_uncer units="deg">%f</lon_uncer>\n' % lon_uncer

    SA += '    <depth units="km">%f</depth>\n' % dep
    SA += '    <depth_uncer units="km">%f</depth_uncer>\n' % dep_uncer

    SA += '    <orig_time units="UTC">%s</orig_time>\n' % ot
    SA += '    <orig_time_uncer units="UTC">%s</orig_time_uncer>\n' % time_uncer
    #SA += '    <likelihood>1.0000</likelihood>\n'
    #SA += '    <num_stations>%d</num_stations>\n' % num_stations
    SA += '  </core_info>\n'
    SA += '</event_message>'

    return SA
Пример #3
0
def main():
    '''
        Simple script to modify playback event ShakeAlert SA.xml with time = now
        and start tankplayer to begin releasing tankplayer packets onto the wave_ring
    '''

    params_dir = os.environ.get('EW_PARAMS')
    cwd = os.getcwd()

    if params_dir is None:
        print("You must source an EW env before running this!")
        exit(2)

    usage = "python start_it.py kaikoura (or some other event in known_events)"
    if len(sys.argv) != 2 or sys.argv[1] not in known_events:
        print(usage)
        exit(2)

    event = sys.argv[1]
    event_path = os.path.join('test_data', event)
    configFile = os.path.join(event_path, 'config.yml')

    configuration = {}
    with open(configFile, 'r') as ymlfile:
        config = yaml.load(ymlfile, Loader=yaml.FullLoader)

    try:
        offset_time = float(config['offset_time'])
        tnkfile = config['tankfile']
        SAfile = os.path.join(event_path, os.path.basename(config['SA_file']))
    except:
        raise

    # Wait this many seconds before dropping the SA.xml file in the events dir to trigger gfast
    delay_trigger = offset_time + TIME_FOR_EEW_SOLUTION

    # Where SA.xml file will be dropped:
    gfast_events_dir = os.path.join(GFAST_DIR, os.path.join(event, 'events'))

    # Copy tankplayer.d template to EW_PARAMS/tankplayer.d.gfast with WaveFile set to find this tankfile
    path = os.path.join(cwd, event_path)
    tnkfile = os.path.join(path, tnkfile)

    tankplayer_file = os.path.join(params_dir, 'tankplayer.d.gfast')

    template = 'resources/tankplayer.d.template'
    lines = None
    with open(template, 'r') as f:
        lines = f.readlines()
    for i, line in enumerate(lines):
        if line[0:8] == 'WaveFile':
            lines[i] = line.replace('WaveFile', 'WaveFile %s' % tnkfile)

    with open(tankplayer_file, 'w') as f:
        #f.write("%s" % lines)
        for line in lines:
            #f.write("{0}\n".format(line))
            f.write(line)

    # Modify the SA.xml file and drop it in the GFAST events dir
    '''
    xmlfile = SAfile
    target = os.path.join(target_dir, os.path.basename(xmlfile))

    tree = ET.parse(xmlfile)
    root = tree.getroot()
    core = root.find('core_info')
    orig_time = core.find('orig_time')
    '''
    # The tankplayer will stamp the first packets to NOW
    #   So we adjust the OT by offset_time wrt actual first packet time:
    #timestamp = datetime.now(tz=timezone.utc).timestamp()
    #otime = datetime.utcfromtimestamp(timestamp).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
    #print("         Now time:%s   [%f]" % (otime, timestamp))
    #timestamp = datetime.now(tz=timezone.utc).timestamp() + offset_time
    timestamp = UTCDateTime.utcnow()
    otime = UTCDateTime.utcnow() + offset_time
    print("         Now time:%s" % timestamp)
    print("Stamp origin time:%s" % otime)
    print("    delay trigger:%.2f secs" % delay_trigger)
    #orig_time.text = orig_time.text.replace(orig_time.text, otime)
    SA = make_SA(config, otime)

    with open('tmp/SA.xml', 'w') as f:
        f.write(SA)

    # Start tankplayer
    thread = myThread(1, "Thread-1", 1, tankplayer_file)
    thread.start()

    #os.chdir(cwd)
    #target = os.path.join(xmldir_out, os.path.basename(xmlfile))

    time.sleep(delay_trigger)
    print("*********** STOP SLEEPING AND DROP SA.xml FILE *********")
    dest = shutil.copy('tmp/SA.xml', gfast_events_dir)
    #tree.write(target)

    return
Пример #4
0
    def retrieveData(self):
        """Retrieve data from many FDSN services, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        # Bail out if FDSNFetcher not configured
        if 'FDSNFetcher' not in self.config['fetchers']:
            return
        rawdir = self.rawdir
        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        # use the mass downloader to retrieve data of interest from any FSDN
        # service.
        origin_time = UTCDateTime(self.time)

        # The Obspy mass downloader has it's own logger - grab that stream
        # and write it to our own log file
        ldict = logging.Logger.manager.loggerDict
        if OBSPY_LOGGER in ldict:
            root = logging.getLogger()
            fhandler = root.handlers[0]
            obspy_logger = logging.getLogger(OBSPY_LOGGER)
            obspy_stream_handler = obspy_logger.handlers[0]
            obspy_logger.removeHandler(obspy_stream_handler)
            obspy_logger.addHandler(fhandler)

        # Circular domain around the epicenter.
        domain = CircularDomain(latitude=self.lat,
                                longitude=self.lon,
                                minradius=0,
                                maxradius=self.radius)

        min_dist = self.minimum_interstation_distance_in_m
        restrictions = Restrictions(
            # Define the temporal bounds of the waveform data.
            starttime=origin_time - self.time_before,
            endtime=origin_time + self.time_after,
            network=self.network,
            station='*',
            location='*',
            location_priorities=['*'],
            reject_channels_with_gaps=self.reject_channels_with_gaps,
            # Any trace that is shorter than 95 % of the
            # desired total duration will be discarded.
            minimum_length=self.minimum_length,
            sanitize=self.sanitize,
            minimum_interstation_distance_in_m=min_dist,
            exclude_networks=self.exclude_networks,
            exclude_stations=self.exclude_stations,
            channel_priorities=self.channels)

        # For each of the providers, check if we have a username and password
        # provided in the config. If we do, initialize the client with the
        # username and password. Otherwise, use default initalization.
        client_list = []
        for provider_str in URL_MAPPINGS.keys():
            if provider_str == GEO_NET_ARCHIVE_KEY:
                dt = UTCDateTime.utcnow() - UTCDateTime(self.time)
                if dt < GEONET_ARCHIVE_DAYS:
                    provider_str = GEONET_REALTIME_URL
            try:
                fdsn_config = self.config['fetchers']['FDSNFetcher']
                if provider_str in fdsn_config:
                    client = Client(
                        provider_str,
                        user=fdsn_config[provider_str]['user'],
                        password=fdsn_config[provider_str]['password'])
                else:
                    client = Client(provider_str)
                client_list.append(client)
            # If the FDSN service is down, then an FDSNException is raised
            except FDSNException:
                logging.warning('Unable to initalize client %s' % provider_str)
            except KeyError:
                logging.warning('Unable to initalize client %s' % provider_str)

        if len(client_list):
            # Pass off the initalized clients to the Mass Downloader
            mdl = MassDownloader(providers=client_list)

            logging.info('Downloading new MiniSEED files...')
            # The data will be downloaded to the ``./waveforms/`` and
            # ``./stations/`` folders with automatically chosen file names.
            mdl.download(domain,
                         restrictions,
                         mseed_storage=rawdir,
                         stationxml_storage=rawdir)

            seed_files = glob.glob(os.path.join(rawdir, '*.mseed'))
            streams = []
            for seed_file in seed_files:
                try:
                    tstreams = read_obspy(seed_file, self.config)
                except BaseException as e:
                    tstreams = None
                    fmt = 'Could not read seed file %s - "%s"'
                    logging.info(fmt % (seed_file, str(e)))
                if tstreams is None:
                    continue
                else:
                    streams += tstreams

            stream_collection = StreamCollection(
                streams=streams, drop_non_free=self.drop_non_free)
            return stream_collection
Пример #5
0
    def retrieveData(self):
        """Retrieve data from many FDSN services, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        # Bail out if FDSNFetcher not configured
        if "FDSNFetcher" not in self.config["fetchers"]:
            return

        fdsn_conf = self.config["fetchers"]["FDSNFetcher"]
        rawdir = self.rawdir

        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        # use the mass downloader to retrieve data of interest from any FSDN
        # service.
        origin_time = UTCDateTime(self.time)

        # The Obspy mass downloader has it's own logger - grab that stream
        # and write it to our own log file
        ldict = logging.Logger.manager.loggerDict
        if OBSPY_LOGGER in ldict:
            root = logging.getLogger()
            fhandler = root.handlers[0]
            obspy_logger = logging.getLogger(OBSPY_LOGGER)
            try:
                obspy_stream_handler = obspy_logger.handlers[0]
                obspy_logger.removeHandler(obspy_stream_handler)
            except IndexError:
                pass

            obspy_logger.addHandler(fhandler)

        # Circular domain around the epicenter.
        if fdsn_conf["domain"]["type"] == "circular":
            dconf = fdsn_conf["domain"]["circular"]
            if dconf["use_epicenter"]:
                dconf["latitude"] = self.lat
                dconf["longitude"] = self.lon
            dconf.pop("use_epicenter")
            domain = CircularDomain(**dconf)
        elif fdsn_conf["domain"]["type"] == "rectangular":
            dconf = fdsn_conf["domain"]["rectangular"]
            domain = RectangularDomain(**dconf)
        else:
            raise ValueError(
                'Domain type must be either "circular" or "rectangular".')

        rconf = fdsn_conf["restrictions"]

        rconf["starttime"] = origin_time - rconf["time_before"]
        rconf["endtime"] = origin_time + rconf["time_after"]
        rconf.pop("time_before")
        rconf.pop("time_after")

        restrictions = Restrictions(**rconf)

        # For each of the providers, check if we have a username and password provided
        # in the config. If we do, initialize the client with the username and password.
        # Otherwise, use default initalization.
        providers = URL_MAPPINGS
        if "IRISPH5" in providers.keys():
            del providers["IRISPH5"]

        client_list = []
        for provider_str in providers.keys():
            if provider_str == GEO_NET_ARCHIVE_KEY:
                dt = UTCDateTime.utcnow() - UTCDateTime(self.time)
                if dt < GEONET_ARCHIVE_DAYS:
                    provider_str = GEONET_REALTIME_URL
            try:
                fdsn_config = self.config["fetchers"]["FDSNFetcher"]
                if provider_str in fdsn_config:
                    if logging.getLevelName(root.level) == "DEBUG":
                        client = Client(
                            provider_str,
                            user=fdsn_config[provider_str]["user"],
                            password=fdsn_config[provider_str]["password"],
                            debug=True,
                        )
                    else:
                        client = Client(
                            provider_str,
                            user=fdsn_config[provider_str]["user"],
                            password=fdsn_config[provider_str]["password"],
                        )
                else:
                    if logging.getLevelName(root.level) == "DEBUG":
                        client = Client(provider_str, debug=True)
                    else:
                        client = Client(provider_str)

                client_list.append(client)
            # If the FDSN service is down, then an FDSNException is raised
            except FDSNException:
                logging.warning(f"Unable to initalize client {provider_str}")
            except KeyError:
                logging.warning(f"Unable to initalize client {provider_str}")

        if len(client_list):
            # Pass off the initalized clients to the Mass Downloader
            if logging.getLevelName(root.level) == "DEBUG":
                for handler in root.handlers:
                    if hasattr(handler, "baseFilename"):
                        log_file = getattr(handler, "baseFilename")
                sys.stdout = open(log_file, "a")
                mdl = MassDownloader(providers=client_list, debug=True)
            else:
                try:
                    # Need to turn off built in logging for ObsPy>=1.3.0
                    mdl = MassDownloader(providers=client_list,
                                         configure_logging=False)
                except TypeError:
                    # For ObsPy<1.3.0 the configure_logging parameter doesn't exist
                    mdl = MassDownloader(providers=client_list)

            logging.info("Downloading new MiniSEED files...")
            # The data will be downloaded to the ``./waveforms/`` and
            # ``./stations/`` folders with automatically chosen file names.
            mdl.download(domain,
                         restrictions,
                         mseed_storage=rawdir,
                         stationxml_storage=rawdir)
            sys.stdout.close()

            if self.stream_collection:
                seed_files = glob.glob(os.path.join(rawdir, "*.mseed"))
                streams = []
                for seed_file in seed_files:
                    try:
                        tstreams = read_obspy(seed_file, self.config)
                    except BaseException as e:
                        tstreams = None
                        fmt = 'Could not read seed file %s - "%s"'
                        logging.info(fmt % (seed_file, str(e)))
                    if tstreams is None:
                        continue
                    else:
                        streams += tstreams

                stream_collection = StreamCollection(
                    streams=streams, drop_non_free=self.drop_non_free)
                return stream_collection
            else:
                return None