Beispiel #1
0
def kmean(Config, inputCentroid, FilterMeta, counter, Folder, Origin, flag):

    counter += 1
    Logfile.add('COUNTER ' + str(counter) + ' CUTOFF ' + Config['cutoff'])

    cfg = ConfigObj(dict=Config)

    scl = stationBelongTocluster(Config, inputCentroid, FilterMeta)

    acounter = 1
    for a in inputCentroid:
        for i in scl:
            if acounter == i.member:
                delta = loc2degrees(i, a)
                if delta > cfg.Float('initialstationdistance'):
                    i.member = -1

        acounter += 1

    if counter == cfg.UInt('cutoff'):
        endcheck(inputCentroid, FilterMeta, Config, Folder, Origin, flag)
        sys.exit()

    nsc = calculateclusterCentre(Config, scl)
    t = compareclusterCentre(inputCentroid, nsc, Config)

    Logfile.add('ITERATIONSTEP: ---> ' + str(counter) +
                ' <-----------------------------')

    while t < cfg.UInt('maxcluster'):
        Logfile.add('number of arrays in KMEAN: ' + str(t))
        kmean(Config, nsc, FilterMeta, counter, Folder, Origin, flag)

    endcheck(inputCentroid, FilterMeta, Config, Folder, Origin, flag)
    sys.exit()
Beispiel #2
0
def calculateclusterCentre(Config, clusterStationList):

    newclusterVector = []
    cfg = ConfigObj(dict=Config)

    for i in range(1, cfg.Int('maxcluster') + 1):
        sumlat = 0
        sumlon = 0
        clusterstationcounter = 0

        for j in clusterStationList:
            if i == j.member:
                sumlat += float(j.lat)
                sumlon += float(j.lon)
                clusterstationcounter += 1

        if clusterstationcounter == 0:
            newclusterVector.append(Centroid(0.0, 0.0, -1))
        else:
            scla = sumlat / clusterstationcounter
            sclo = sumlon / clusterstationcounter
            name = i
            newclusterVector.append(Centroid(scla, sclo, name))

    return newclusterVector
def filterBestSolution(solution):

    evp  = os.path.join('/',*solution.path.split('/')[:-2])
    C= Config(evp)
    Conf = C.parseConfig('config')
    cfg  = ConfigObj(dict=Conf)

    SL   = []
    M= []
    fobj = open(os.path.join(solution.path, 'event.stations'),'r')

    for s in fobj:
       try:
           line = s.split()
           net,sta,loc,comp = line[0].split('.')

           slat= line[1]
           slon= line[2]
           smember = line[3]

           M.append(smember)
           SL.append(Station(net,sta,loc,comp,lat=slat,lon=slon,member=smember))

       except:
           Logfile.exception('filterBestSolution', '<' + s + '>')
           continue

    fobj.close()

    M = list(set(M))

    Logfile.add('number of clusters ' + str(len(M)),
                 'number of stations ' + str(len(SL)))

    kd = obs_kilometer2degrees(cfg.Distance('intraclusterdistance'))
    Logfile.add('icd ' + str(kd))

    maxdist = -1

    for i in SL:
        counter = 0

        for k in SL:
            if i.member == '8' and k.member == '8':
               if i.getName() != k.getName():
                  delta = loc2degrees(i, k)

                  if delta > maxdist:
                      maxdist = delta

                  if delta < kd:
                      counter +=1

        print(i, 'less then allowd ', counter)

    print('masxdist ', maxdist)
Beispiel #4
0
def checkStationAroundInitialCentroid(station, Config, StationMetaList):

    cfg = ConfigObj(dict=Config)
    initDist = cfg.Float('initialstationdistance')
    counter = 0

    for i in StationMetaList:
        sdelta = loc2degrees(station, i)
        if sdelta < initDist:
            counter += 1

    return counter
Beispiel #5
0
def deleteFarStations(CentroidList, StationclusterList, Config):

    cfg = ConfigObj(dict=Config)
    stationdistance = int(cfg.Distance('stationdistance'))

    for i in CentroidList:
        for j in StationclusterList:
            if i.rank == j.member:
                if loc2degrees(i, j) > stationdistance:
                    j.member = -1

    for index, k in enumerate(StationclusterList):
        if k.member == -1:
            del StationclusterList[index]

    return StationclusterList
    def readWaveformsPicker_pyrocko(self, station, tw, Origin, ttime, cfg_yaml):

        obspy_compat.plant()
        cfg = ConfigObj(dict=self.Config)
        if cfg_yaml.config_data.quantity == 'displacement':
            try:
                traces = io.load(self.EventPath+'/data/traces_rotated.mseed')
            except:
                traces = io.load(self.EventPath+'/data/traces_restituted.mseed')
        else:
            traces = io.load(self.EventPath+'/data/traces_velocity.mseed')
        for tr in traces:
            tr_name = str(tr.network+'.'+tr.station+'.'+tr.location+'.'
                                    + tr.channel[:3])
            if tr_name == str(station)[:-2] or tr_name == str(station)[:]:
                traces_station = tr
                es = obspy_compat.to_obspy_trace(traces_station)

                st = obspy.Stream()
                st.extend([es])
                stream = ''

                if station.loc == '--':
                    station.loc = ''

                if len(st.get_gaps()) > 0:
                    st.merge(method=0, fill_value='interpolate',
                             interpolation_samples=0)
                stream = self.filterWaveform(st, cfg_yaml)

                stream.trim(tw['xcorrstart'], tw['xcorrend'])
                return stream

        else:
            pass
Beispiel #7
0
    def readWaveformsCross_colesseo(self, station, tw, ttime):
        obspy_compat.plant()
        Config = self.Config
        cfg = ConfigObj(dict=Config)
        t2 = UTCDateTime(self.Origin.time)

        traces = io.load(cfg.colosseo_scenario_yml()[:-12] + 'scenario.mseed')

        for tr in traces:
            tr_name = str(tr.network + '.' + tr.station + '.' + tr.location +
                          '.' + tr.channel[:3])
            if tr_name == str(station):
                traces_station = tr

                es = obspy_compat.to_obspy_trace(traces_station)
                streamData = station.net + '.' + station.sta + '.'\
                                         + station.loc + '.'\
                                         + station.comp + '.D.'\
                                         + str(t2.year) + '.'\
                                         + str("%03d" % t2.julday)

                st = obspy.Stream()
                st.extend([es])
                stream = ''
                snr = ''

                if station.loc == '--':
                    station.loc = ''

                if len(st.get_gaps()) > 0:
                    st.merge(method=0,
                             fill_value='interpolate',
                             interpolation_samples=0)
                snr_trace = traces_station.chop(tmin=traces_station.tmin,
                                                tmax=traces_station.tmin +
                                                ttime - 20.,
                                                inplace=False)
                snr = num.var(snr_trace.ydata)
                stream = self.filterWaveform(st)

                xname = os.path.join(self.AF, (streamData + '_all.mseed'))
                stream.write(xname, format='MSEED')
                stream.trim(tw['xcorrstart'], tw['xcorrend'])
                return stream, snr

            else:
                pass
Beispiel #8
0
def filterStations(StationList, Config, Origin):
    F = []
    cfg = ConfigObj(dict=Config)

    minDist, maxDist = cfg.FloatRange('mindist', 'maxdist')
    origin = DataTypes.dictToLocation(Origin)

    Logfile.red('Filter stations with configured parameters')
    for i in StationList:
        sdelta = loc2degrees(origin, i)

        if sdelta > minDist and sdelta < maxDist:
            F.append(
                Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon, i.ele,
                        i.dip, i.azi, i.gain))
    Logfile.red('%d STATIONS LEFT IN LIST' % len(F))
    return F
    def traveltimes(self, phase, traces, cfg_yaml):

        Logfile.red('Enter AUTOMATIC CROSSCORRELATION ')
        Logfile.red('\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n ')
        T = []
        Wdict = OrderedDict()
        SNR = OrderedDict()
        Config = self.Config
        cfg = ConfigObj(dict=Config)

        for i in self.StationMeta:
            Logfile.red('read in %s ' % (i))
            de = loc2degrees(self.Origin, i)
            Phase = cake.PhaseDef(phase)
            traveltime_model = cfg_yaml.config.traveltime_model
            path = palantiri.__path__
            model = cake.load_model(path[0]+'/data/'+traveltime_model)
            if cfg_yaml.config_data.colesseo_input is True:
                arrivals = model.arrivals([de, de], phases=Phase,
                                          zstart=self.Origin.depth, zstop=0.)
            else:
                arrivals = model.arrivals([de, de], phases=Phase,
                                          zstart=self.Origin.depth*km,
                                          zstop=0.)
            try:
                ptime = arrivals[0].t
            except Exception:
                try:
                    arrivals = model.arrivals([de, de], phases=Phase,
                                              zstart=self.Origin.depth*km-2.1)
                    ptime = arrivals[0].t
                except Exception:
                    ptime = 0
            T.append(ptime)
            if ptime == 0:
                Logfile.red('Available phases for station %s in\
                            range %f deegree' % (i, de))
                Logfile.red('you tried phase %s' % (phase))
                raise Exception("ILLEGAL: phase definition")
            else:
                tw = self.calculateTimeWindows(ptime)
                if cfg_yaml.config_data.pyrocko_download is True:
                    w, snr, found = self.readWaveformsCross_pyrocko(i, tw,
                                                                    ptime,
                                                                    traces,
                                                                    cfg_yaml)
                elif cfg_yaml.config_data.colesseo_input is True:
                    w, snr = self.readWaveformsCross_colesseo(i, tw, ptime,
                                                              cfg_yaml)
                else:
                    w, snr = self.readWaveformsCross(i, tw, ptime, cfg_yaml)
                Wdict[i.getName()] = w
                SNR[i.getName()] = snr

            Logfile.red('\n\n+++++++++++++++++++++++++++++++++++++++++++++++ ')

        Logfile.red('Exit AUTOMATIC FILTER ')
        return Wdict, SNR
Beispiel #10
0
def readWaveforms_colesseo(stationlist, w, EventPath, Origin, C):
    Wdict = OrderedDict()
    Config = C.parseConfig('config')
    cfg = ConfigObj(dict=Config)
    traces_dict = []
    traces = io.load(cfg.colosseo_scenario_yml()[:-12] + 'scenario.mseed')

    for tr in traces:
        for il in stationlist:
            tr_name = str(tr.network + '.' + tr.station + '.' + tr.location +
                          '.' + tr.channel[:3])
            if tr_name == str(il):
                st = obspy.Stream()
                es = obspy_compat.to_obspy_trace(tr)
                st.extend([es])
                traces_dict.append(tr)
                Wdict[il.getName()] = st
    return Wdict
Beispiel #11
0
def semblance(ncpus, nostat, nsamp, ntimes, nstep, dimX, dimY, mint,
              new_frequence, minSampleCount, latv_1, lonv_1, traveltime_1,
              trace_1, calcStreamMap, time, Config, Origin, refshifts, nstats,
              bs_weights=None, flag_rpe=False):

        cfg = ConfigObj(dict=Config)
        origin = OriginCfg(Origin)
        cfg_f = FilterCfg(Config)

        if cfg.Bool('dynamic_filter') is False:
            if cfg.Bool('bp_freq') is True:
               return semblance_py_freq(ncpus, nostat, nsamp, ntimes, nstep, dimX, dimY,
                                   mint, new_frequence, minSampleCount, latv_1,
                                   lonv_1, traveltime_1, trace_1, calcStreamMap,
                                   time, cfg, refshifts, nstats, bs_weights=bs_weights)
            if cfg.Bool('bp_coh') is True:
               return semblance_py_coherence(ncpus, nostat, nsamp, ntimes, nstep, dimX, dimY,
                                   mint, new_frequence, minSampleCount, latv_1,
                                   lonv_1, traveltime_1, trace_1, calcStreamMap,
                                   time, cfg, refshifts, nstats, bs_weights=bs_weights)

            if cfg.Int('dimz') != 0:
                return semblance_py_cube(ncpus, nostat, nsamp, ntimes, nstep,
                                         dimX, dimY, mint, new_frequence,
                                         minSampleCount, latv_1, lonv_1,
                                         traveltime_1, trace_1, calcStreamMap,
                                         time, cfg, refshifts,
                                         bs_weights=bs_weights)

            if cfg.Bool('bp_music') is True:
                return music_wrapper(ncpus, nostat, nsamp, ntimes, nstep, dimX,
                                     dimY, mint, new_frequence, minSampleCount,
                                     latv_1, lonv_1, traveltime_1, trace_1,
                                     calcStreamMap, time, cfg, refshifts,
                                     nstats,
                                     bs_weights=bs_weights)

            if flag_rpe is True:
               return semblance_py(ncpus, nostat, nsamp, ntimes, nstep, dimX, dimY,
                                   mint, new_frequence, minSampleCount, latv_1,
                                   lonv_1, traveltime_1, trace_1, calcStreamMap,
                                   time, cfg, refshifts, nstats, bs_weights=bs_weights,
                                   flag_rpe=flag_rpe)

            else:
               return semblance_py(ncpus, nostat, nsamp, ntimes, nstep, dimX, dimY,
                                   mint, new_frequence, minSampleCount, latv_1,
                                   lonv_1, traveltime_1, trace_1, calcStreamMap,
                                   time, cfg, refshifts, nstats, bs_weights=bs_weights,
                                   flag_rpe=flag_rpe)
        else:
           return semblance_py_dynamic_cf(ncpus, nostat, nsamp, ntimes, nstep,
                                          dimX, dimY, mint, new_frequence,
                                          minSampleCount, latv_1, lonv_1,
                                          traveltime_1, trace_1, calcStreamMap,
                                          time, origin, cfg_f)
Beispiel #12
0
    def readWaveformsPicker_colos(self, station, tw, Origin, ttime):

        obspy_compat.plant()
        Config = self.Config
        cfg = ConfigObj(dict=Config)

        traces = io.load(cfg.colosseo_scenario_yml()[:-12] + 'scenario.mseed')

        for tr in traces:
            tr_name = str(tr.network + '.' + tr.station + '.' + tr.location +
                          '.' + tr.channel[:3])
        for tr in traces:
            tr_name = str(tr.network + '.' + tr.station + '.' + tr.location +
                          '.' + tr.channel[:3])
            if tr_name == str(station):
                traces_station = tr

                es = obspy_compat.to_obspy_trace(traces_station)

                st = obspy.Stream()
                st.extend([es])
                stream = ''

                if station.loc == '--':
                    station.loc = ''

                if len(st.get_gaps()) > 0:
                    st.merge(method=0,
                             fill_value='interpolate',
                             interpolation_samples=0)
                stream = self.filterWaveform(st)

                stream.trim(tw['xcorrstart'], tw['xcorrend'])
                return stream

        else:
            pass
Beispiel #13
0
def addOK(station, stationList, Config, MetaList):

    cfg = ConfigObj(dict=Config)
    minDist = 0
    minAround = cfg.UInt('minstationaroundinitialcluster')
    t = 0

    for i in stationList:
        sdelta = loc2degrees(station, i)

        if sdelta > minDist:
            aroundcounter = checkStationAroundInitialCentroid(
                station, Config, MetaList)

            if aroundcounter >= minAround:
                t = 1
            else:
                t = 0
                return t
        else:
            t = 0
            return t

    return t
Beispiel #14
0
    def readWaveformsCross_pyrocko(self, station, tw, ttime, traces):
        obspy_compat.plant()

        cfg = ConfigObj(dict=self.Config)

        t2 = UTCDateTime(self.Origin.time)

        found = False

        for tr in traces:
            tr_name = str(tr.network + '.' + tr.station + '.' + tr.location +
                          '.' + tr.channel[:3])
            if tr_name == str(station)[:-2] or tr_name == str(station)[:]:
                traces_station = tr
                es = obspy_compat.to_obspy_trace(traces_station)
                streamData = station.net + '.' + station.sta + '.'\
                                         + station.loc + '.'\
                                         + station.comp\
                                         + '.D.'\
                                         + str(t2.year) + '.'\
                                         + str("%03d" % t2.julday)

                st = obspy.Stream()
                st.extend([es])
                stream = ''
                snr = ''
                if station.loc == '--':
                    station.loc = ''

                if len(st.get_gaps()) > 0:
                    st.merge(method=0,
                             fill_value='interpolate',
                             interpolation_samples=0)
                snr_trace = traces_station.chop(tmin=traces_station.tmin,
                                                tmax=traces_station.tmin +
                                                ttime - 20.,
                                                inplace=False)
                snr = num.var(snr_trace.ydata)
                stream = self.filterWaveform(st)

                xname = os.path.join(self.AF, (streamData + '_all.mseed'))
                stream.write(xname, format='MSEED')
                stream.trim(tw['xcorrstart'], tw['xcorrend'])
                found = True
                return stream, snr, found
        if found is False:
            print('Waveform missing!', tr_name, str(station))
Beispiel #15
0
def filterStations(StationList, Config, Origin, network, cfg_yaml):

    F = []

    cfg = ConfigObj(dict=Config)
    minDist = cfg_yaml.config_cluster.minDist
    maxDist = cfg_yaml.config_cluster.maxDist
    origin = Location(Origin['lat'], Origin['lon'])
    Logfile.red('Filter stations with configured parameters...')
    for j in network:
        for i in StationList:
            if str(i.getcmpName()[:-2]) == str(j) or str(
                    i.getcmpName()[:]) == str(j):
                pos = Location(i.lat, i.lon)
                sdelta = loc2degrees(origin, pos)
                if sdelta > minDist and sdelta < maxDist:
                    s = Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon,
                                i.ele, i.dip, i.azi, i.gain)
                    if s not in F:
                        F.append(s)
    Logfile.red('%d STATIONS LEFT IN LIST' % len(F))

    return F
Beispiel #16
0
    def process(self):

        t = time.time()
        C = config.Config(self.eventpath)

        Config = C.parseConfig('config')
        cfg = ConfigObj(dict=Config)
        Origin = C.parseConfig('origin')
        if cfg.pyrocko_download() is True:
            if cfg.quantity() == 'displacement':
                disp = True
            else:
                disp = False
            Meta = readpyrockostations(self.eventpath, disp, cfg)
        elif cfg.colesseo_input() is True:
            scenario = guts.load(filename=cfg.colosseo_scenario_yml())
            scenario_path = cfg.colosseo_scenario_yml()[:-12]
            Meta = readcolosseostations(scenario_path)
            events = scenario.get_events()
            ev = events[0]
            Origin['strike'] = str(ev.moment_tensor.strike1)
            Origin['rake'] = str(ev.moment_tensor.rake1)
            Origin['dip'] = str(ev.moment_tensor.dip1)
            Origin['lat'] = str(ev.lat)
            Origin['lon'] = str(ev.lon)
            Origin['depth'] = str(ev.depth / 1000.)

        else:
            Meta = readMetaInfoFile(self.eventpath)
        Folder = createFolder(self.eventpath)

        FilterMeta = filterStations(Meta, Config, Origin)

        km(Config, FilterMeta, Folder, Origin, t)

        return True
Beispiel #17
0
def optimization(*params, **args):
    counter = params[1]
    Config = params[2]
    Wdf = params[3]
    FilterMeta = params[4]
    mint = params[5]
    maxt = params[6]
    TTTGridMap = params[7]
    Folder = params[8]
    Origin = params[9]
    ntimes = params[10]
    switch = params[11]
    ev = params[12]
    arrayfolder = params[13]
    syn_in = params[14]
    data = params[15]
    evpath = params[16]
    XDict = params[17]
    RefDict = params[18]
    workdepth = params[19]
    filterindex = params[20]
    Wdfs = params[21]

    networks = Config['networks'].split(',')
    params = num.asarray(params)
    parameter = num.ndarray.tolist(params)
    ASL_syn = []


    C  = config.Config (evpath)
    Config = C.parseConfig ('config')
    cfg = ConfigObj (dict=Config)
    if cfg.pyrocko_download() == True:
        Meta = C.readpyrockostations()#

    elif cfg.colesseo_input() == True:
        scenario = guts.load(filename=cfg.colosseo_scenario_yml())
        scenario_path = cfg.colosseo_scenario_yml()[:-12]
        Meta = C.readcolosseostations(scenario_path)
    else:
        Meta = C.readMetaInfoFile()
    l = 0
    for i in networks:

        arrayname = i
        arrayfolder = os.path.join (Folder['semb'],arrayname)

        network = Config[i].split('|')

        FilterMeta = ttt.filterStations (Meta,Config,Origin,network)

        if len(FilterMeta)  < 3: continue

        W = XDict[i]
        refshift = RefDict[i]

        FilterMeta = cmpFilterMetavsXCORR (W, FilterMeta)

        Logfile.add ('BOUNDING BOX DIMX: %s  DIMY: %s  GRIDSPACING: %s \n'
                 % (Config['dimx'],Config['dimy'],Config['gridspacing']))

        f = open('../tttgrid/tttgrid_%s_%s_%s.pkl' % (ev.time, arrayname, workdepth), 'rb')
        TTTGridMap,mint,maxt = pickle.load(f)
        f.close()


        switch = filterindex

        tw  = times.calculateTimeWindows (mint,maxt,Config,ev, switch)
        Wdf = Wdfs[l]
        semb_syn = doCalc_syn (counter,Config,Wdf,FilterMeta,mint,maxt,TTTGridMap,
                                     Folder,Origin,ntimes,switch, ev,arrayfolder, syn_in,
                                      parameter[0])
        ASL_syn.append(semb_syn)
        counter += 1
        l += 1

    sembmax_syn = sembCalc.collectSemb(ASL_syn,Config,Origin,Folder,ntimes,len(networks),switch)

    misfit_list = []  # init a list for a all the singular misfits
    norm_list = []  # init a list for a all the singular normalizations
    taper = trace.CosFader(xfade=2.0)  # Cosine taper with fade in and out of 2s.
    bw_filter = trace.ButterworthResponse(corner=0.000055,  # in Hz
                                      order=4,
                                      type='high')  # "low"pass or "high"pass
    setup = trace.MisfitSetup(description='Misfit Setup',
                              norm=2,  # L1 or L2 norm
                              taper=taper,
                              filter=bw_filter,
                              domain='time_domain')
    nsamples = len(data)
    tmin = util.str_to_time('2010-02-20 15:15:30.100')
    tr = trace.Trace(station='TEST', channel='Z',
                     deltat=0.5, tmin=tmin, ydata=data)
    syn = trace.Trace(station='TEST', channel='Z',
                     deltat=0.5, tmin=tmin, ydata=sembmax_syn)
    misfit, norm = tr.misfit(candidate=syn, setup=setup) # calculate the misfit of a single observed trace with its synthetics
    # with the setup from above
    misfit_list.append(misfit), norm_list.append(norm)  # append the misfit into a list
    global_misfit_normed = num.sqrt(num.nansum((num.asarray(misfit_list))**2) / # sum all the misfits and normalize to get a single minimizable value
                                    num.nansum((num.asarray(norm_list))**2))
    return global_misfit_normed
Beispiel #18
0
def load(filter, step=None, path=None):
    if path is None:
        rel = 'events/' + str(sys.argv[1]) + '/work/semblance/'
    else:
        rel = path
    boot = False
    if path is not None:
        evpath = path
    else:
        evpath = 'events/' + str(sys.argv[1])
    C = config.Config(evpath)
    Config = C.parseConfig('config')
    cfg = ConfigObj(dict=Config)
    dimx = int(Config['dimx'])
    dimy = int(Config['dimy'])
    data_int = None
    data = None
    data_boot = None
    data_int_boot = None
    datamax = 0
    phase = "P"
    for argv in sys.argv:
        if argv == "--phases:S":
            phase = "S"
        if argv == "--phases:all":
            phase = ""
        if argv == "--phases:P,S":
            phase = ""
        if argv == "--phases:P":
            phase = "P"
    if step is None:
        try:
            pathlist = Path(rel).glob('*.ASC')
        except:
            pathlist = Path(rel).glob('%s-*%s.ASC' % (filter, phase))
    else:
        try:
            try:
                pathlist = Path(rel).glob('*0%s.ASC' % step)
            except:
                pathlist = Path(rel).glob('*%s.ASC' % step)
        except:
            pathlist = Path(rel).glob('%s-*00%s_*%s.ASC' %
                                      (filter, step, phase))
    maxs = 0.
    count = 0
    for path in sorted(pathlist):
        path_in_str = str(path)
        data = num.loadtxt(path_in_str, delimiter=' ', skiprows=5)
        maxd = num.max(data[:, 2])
        count = count + 1
        if maxs < maxd:
            maxs = maxd
            datamax = data[:, 2]
    if sys.argv[3] == 'max':
        if step is None:
            try:
                pathlist = Path(rel).glob('%s-' + str(sys.argv[5]) +
                                          '*.ASC' % filter)
            except:
                pathlist = Path(rel).glob('%s-*%s.ASC' % (filter, phase))
        else:
            try:
                pathlist = Path(rel).glob('%s-' + str(sys.argv[5]) +
                                          '00%s_*.ASC' % (filter, step))
            except:
                pathlist = Path(rel).glob('%s-*00%s_*%s.ASC' %
                                          (filter, step, phase))
        data_int = num.zeros(num.shape(data[:, 2]))
        for path in sorted(pathlist):
            path_in_str = str(path)
            data = num.loadtxt(path_in_str, delimiter=' ', skiprows=5)
            i = 0
            for k in num.nan_to_num(data[:, 2]):
                if k > data_int[i]:
                    data_int[i] = k
                if num.max(datamax) == 0:
                    data_int[i] = 0
                i = i + 1
        try:
            if sys.argv[4] == 'boot':
                boot = True
                if step is None:
                    try:
                        pathlist = Path(rel).glob('%s-*boot*' +
                                                  str(sys.argv[5]) +
                                                  '*.ASC' % filter)
                    except:
                        pathlist = Path(rel).glob('%s-*boot*%s.ASC' %
                                                  (filter, phase))
                else:
                    try:
                        pathlist = Path(rel).glob('%s-*boot*' +
                                                  str(sys.argv[5]) +
                                                  '00%s_*.ASC' %
                                                  (filter, step))
                    except:
                        pathlist = Path(rel).glob('%s-*boot00%s_*%s.ASC' %
                                                  (filter, step, phase))
                data_int_boot = num.zeros(num.shape(data[:, 2]))
                for path in sorted(pathlist):
                    path_in_str = str(path)
                    data_boot = num.loadtxt(path_in_str,
                                            delimiter=' ',
                                            skiprows=5)
                    i = 0
                    for k in num.nan_to_num(data[:, 2]):
                        if k > data_int_boot[i]:
                            data_int_boot[i] = k
                        if num.max(datamax) == 0:
                            data_int[i] = 0
                        i = i + 1
        except IndexError:
            pass

    if sys.argv[3] == 'combined':
        if step is None:
            try:
                pathlist = Path(rel).glob('%s-' + str(sys.argv[5]) +
                                          '*.ASC' % filter)
            except:
                pathlist = Path(rel).glob('%s*-%s*.ASC' % (filter, phase))
        else:
            try:
                pathlist = Path(rel).glob('%s-' + str(sys.argv[5]) +
                                          '00%s_*.ASC' % (filter, step))
            except:
                pathlist = Path(rel).glob('%s-*00%s_*%s.ASC' %
                                          (filter, step, phase))
        data_int = num.zeros(num.shape(data[:, 2]))
        for path in sorted(pathlist):
            path_in_str = str(path)
            if path_in_str[-14] is not "o":
                data = num.loadtxt(path_in_str, delimiter=' ', skiprows=5)
                data_int += num.nan_to_num(data[:, 2])

        try:
            if sys.argv[4] == 'boot':
                boot = True

                if step is None:
                    try:
                        pathlist = Path(rel).glob('%s-*boot*' +
                                                  str(sys.argv[5]) +
                                                  '*.ASC' % filter)
                    except:
                        pathlist = Path(rel).glob('%s-*boot*.ASC' % filter)
                else:
                    try:
                        pathlist = Path(rel).glob('%s-*boot*' +
                                                  str(sys.argv[5]) +
                                                  '00%s_*.ASC' %
                                                  (filter, step))
                    except:
                        pathlist = Path(rel).glob('%s-*boot*00%s_*.ASC' %
                                                  (filter, step))
                data_int_boot = num.zeros(num.shape(data[:, 2]))
                for path in sorted(pathlist):
                    path_in_str = str(path)
                    data_boot = num.loadtxt(path_in_str,
                                            delimiter=' ',
                                            skiprows=5)
                    data_int_boot += num.nan_to_num(data_boot[:, 2])
        except IndexError:
            pass
    return data, data_int, data_boot, data_int_boot, path_in_str, maxs, datamax, count
Beispiel #19
0
def from_palantiri():
    km = 1000.
    try:
        path = sys.argv[3]
        evpath = path
    except:
        path = None
        evpath = 'events/' + str(sys.argv[1])

    C = config.Config(evpath)
    Origin = C.parseConfig('origin')
    Config = C.parseConfig('config')
    cfg = ConfigObj(dict=Config)
    step = cfg.UInt('step')
    step2 = cfg.UInt('step_f2')
    duration = cfg.UInt('duration')
    forerun = cfg.UInt('forerun')
    deltat = step
    deltat2 = step2
    rel = 'events/' + str(sys.argv[1]) + '/work/semblance/'

    dimx = int(Config['dimx'])
    dimy = int(Config['dimy'])

    origin = OriginCfg(Origin)
    depth = origin.depth() * 1000.
    ev = event.Event(lat=origin.lat(),
                     lon=origin.lon(),
                     depth=depth,
                     time=util.str_to_time(origin.time()))
    data, data_int, data_boot, data_int_boot, path_in_str, maxs, datamax, n_files = load(
        0, path=path)
    values_orig = data[:, 2]
    values_orig = num.append(values_orig, num.array([0., 0.]))

    lat_orig = data[:, 1]
    lon_orig = data[:, 0]

    ncorners = 4
    lon_grid_orig = num.linspace(num.min(lat_orig), num.max(lat_orig), (dimy))
    lat_grid_orig = num.linspace(num.min(lon_orig), num.max(lon_orig), dimx)

    if path is None:
        ntimes = int((forerun + duration) / step)
    else:
        ntimes = n_files

    verts = []
    lon_diff = ((lon_orig)[dimy + 1] - (lon_orig)[0]) / 4.
    lat_diff = ((lat_orig)[1] - (lat_orig)[0]) / 4.

    dist = orthodrome.distance_accurate50m(lat_grid_orig[1], lon_grid_orig[1],
                                           lat_grid_orig[0], lon_grid_orig[0])

    for x, y in zip(lon_orig, lat_orig):

        xyz = ([dist / 2., dist / 2.,
                depth], [-dist / 2., dist / 2.,
                         depth], [-dist / 2., -dist / 2.,
                                  depth], [dist / 2., -dist / 2., depth])
        latlon = ([x, y], [x, y], [x, y], [x, y])
        patchverts = num.hstack((latlon, xyz))
        verts.append(patchverts)

    vertices = num.vstack(verts)

    npatches = int(len(vertices))  #*2?
    faces1 = num.arange(ncorners * npatches,
                        dtype='int64').reshape(npatches, ncorners)
    faces2 = num.fliplr(faces1)
    faces = num.vstack((faces2, faces1))
    srf_semblance_list = []
    for i in range(0, ntimes):
        if len(sys.argv) < 4:
            print("missing input arrayname")
        else:
            data, data_int, data_boot, data_int_boot, path_in_str, maxsb, datamaxb, n_files = load(
                0, step=i, path=path)
            srf_semblance = data[:, 2]
            srf_semblance = num.append(srf_semblance, num.array([0., 0.]))
            srf_semblance = duplicate_property(srf_semblance)
            srf_semblance_list.append(srf_semblance)

    srf_semblance = num.asarray(srf_semblance_list).T
    srf_times = num.linspace(0, forerun + duration, ntimes)
    geom = Geometry(times=srf_times, event=ev)
    geom.setup(vertices, faces)
    sub_headers = tuple([str(i) for i in srf_times])
    geom.add_property((('semblance', 'float64', sub_headers)), srf_semblance)
    dump(geom, filename='geom.yaml')
Beispiel #20
0
def createRandomInitialCentroids(Config, StationMetaList):
    Logfile.red('Begin initial centroid search')
    cfg = ConfigObj(dict=Config)

    initialCentroids = []
    usedIndexes = []
    random.seed(time.clock())

    if len(StationMetaList) == 0:
        Logfile.red('Empty station list')
        return initialCentroids

    MAX_TIME_ALLOWED = 350
    start = time.time()
    if int(Config['maxcluster']) == 0:
        to = len(StationMetaList) - 1
    else:
        to = int(Config['maxcluster'])
    while len(initialCentroids) < to:
        dist_centroids = float(Config['centroidmindistance'])

        randomIndex = random.randint(0, len(StationMetaList) - 1)
        redraw = True
        while redraw is True:
            if randomIndex in usedIndexes:
                randomIndex = random.randint(0, len(StationMetaList) - 1)
            else:
                if len(usedIndexes) > 2:
                    for rdx in usedIndexes:
                        s1 = StationMetaList[randomIndex]
                        s2 = StationMetaList[rdx]
                        delta = loc2degrees(s1, s2)
                        if delta >= dist_centroids:
                            redraw = False
                else:
                    redraw = False
        usedIndexes.append(randomIndex)

        around = checkStationAroundInitialCentroid(
            StationMetaList[randomIndex], Config, StationMetaList)
        found = False

        if len(initialCentroids) == 0:
            initialCentroids.append(StationMetaList[randomIndex])
            found = True
            start = time.time()

        else:
            t = addOK(StationMetaList[randomIndex], initialCentroids, Config,
                      StationMetaList)
            if (time.time() - start) > MAX_TIME_ALLOWED:
                break
            if t == 1:
                if len(usedIndexes) > 1:
                    for rdx in usedIndexes:
                        s1 = StationMetaList[randomIndex]
                        s2 = StationMetaList[rdx]
                        delta = loc2degrees(s1, s2)
                        if delta >= dist_centroids:
                            initialCentroids.append(
                                StationMetaList[randomIndex])
                            found = True
                else:
                    initialCentroids.append(StationMetaList[randomIndex])
                    found = True

            else:
                continue

        if found:
            initDist = cfg.Float('initialstationdistance')
            Logfile.red('found initial cluster %d' % (len(initialCentroids)))
            Logfile.red('centroid %s with %d stations around %s deegree' %
                        (StationMetaList[randomIndex], around, initDist))

    Logfile.red('Initial centroid search finished')
    return initialCentroids
Beispiel #21
0
def  doCalc_syn (flag,Config,WaveformDict,FilterMetaData,Gmint,Gmaxt,TTTGridMap,
                Folder,Origin, ntimes, switch, ev,arrayfolder, syn_in, parameter):
    '''
    method for calculating semblance of one station array
    '''
    Logfile.add ('PROCESS %d %s' % (flag,' Enters Semblance Calculation') )
    Logfile.add ('MINT  : %f  MAXT: %f Traveltime' % (Gmint,Gmaxt))

    cfg = ConfigObj (dict=Config)

    dimX   = cfg.dimX()         # ('dimx')
    dimY   = cfg.dimY()         # ('dimy')
    winlen = cfg.winlen ()      # ('winlen')
    step   = cfg.step()         # ('step')

    new_frequence   = cfg.newFrequency()          #('new_frequence')
    forerun= cfg.Int('forerun')
    duration= cfg.Int('duration')
    gridspacing = cfg.Float('gridspacing')

    nostat = len (WaveformDict)
    traveltimes = {}
    recordstarttime = ''
    minSampleCount  = 999999999

    if cfg.UInt ('forerun')>0:
        ntimes = int ((cfg.UInt ('forerun') + cfg.UInt ('duration') ) / cfg.UInt ('step') )
    else:
        ntimes = int ((cfg.UInt ('duration') ) / cfg.UInt ('step') )
    nsamp  = int (winlen * new_frequence)
    nstep  = int (step   * new_frequence)
    from pyrocko import obspy_compat
    from pyrocko import orthodrome, model
    obspy_compat.plant()

    ############################################################################
    calcStreamMap = WaveformDict

    stations = []
    py_trs = []
    for trace in calcStreamMap.keys():
        py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace])
        py_trs.append(py_tr)
        for il in FilterMetaData:
            if str(il) == str(trace):
                        szo = model.Station(lat=il.lat, lon=il.lon,
                                            station=il.sta, network=il.net,
                                            channels=py_tr.channel,
                                            elevation=il.ele, location=il.loc)
                        stations.append(szo) #right number of stations?

    store_id = syn_in.store()
    engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])

    targets = []
    for st in stations:
        target = Target(
                lat=st.lat,
                lon=st.lon,
                store_id=store_id,
                codes=(st.network, st.station, st.location, 'BHZ'),
                tmin=-1900,
                tmax=3900,
                interpolation='multilinear',
                quantity=cfg.quantity())
        targets.append(target)

    if syn_in.nsources() == 1:
        if syn_in.use_specific_stf() is True:
            stf = syn_in.stf()
            exec(stf)
        else:
            stf = STF()
        if syn_in.source() == 'RectangularSource':
                source = RectangularSource(
                    lat=float(syn_in.lat_0()),
                    lon=float(syn_in.lon_0()),
                    depth=syn_in.depth_syn_0()*1000.,
                    strike=syn_in.strike_0(),
                    dip=syn_in.dip_0(),
                    rake=syn_in.rake_0(),
                    width=syn_in.width_0()*1000.,
                    length=syn_in.length_0()*1000.,
                    nucleation_x=syn_in.nucleation_x_0(),
                    slip=syn_in.slip_0(),
                    nucleation_y=syn_in.nucleation_y_0(),
                    stf=stf,
                    time=util.str_to_time(syn_in.time_0()))
        if syn_in.source() == 'DCSource':
                source = DCSource(
                    lat=float(syn_in.lat_0()),
                    lon=float(syn_in.lon_0()),
                    depth=syn_in.depth_syn_0()*1000.,
                    strike=syn_in.strike_0(),
                    dip=syn_in.dip_0(),
                    rake=syn_in.rake_0(),
                    stf=stf,
                    time=util.str_to_time(syn_in.time_0()),
                    magnitude=syn_in.magnitude_0())

    else:
        sources = []
        for i in range(syn_in.nsources()):
            if syn_in.use_specific_stf() is True:
                stf = syn_in.stf()
                exec(stf)

            else:
                stf = STF()
            if syn_in.source() == 'RectangularSource':
                    sources.append(RectangularSource(
                        lat=float(syn_in.lat_1(i)),
                        lon=float(syn_in.lon_1(i)),
                        depth=syn_in.depth_syn_1(i)*1000.,
                        strike=syn_in.strike_1(i),
                        dip=syn_in.dip_1(i),
                        rake=syn_in.rake_1(i),
                        width=syn_in.width_1(i)*1000.,
                        length=syn_in.length_1(i)*1000.,
                        nucleation_x=syn_in.nucleation_x_1(i),
                        slip=syn_in.slip_1(i),
                        nucleation_y=syn_in.nucleation_y_1(i),
                        stf=stf,
                        time=util.str_to_time(syn_in.time_1(i))))

            if syn_in.source() == 'DCSource':
                    sources.append(DCSource(
                        lat=float(syn_in.lat_1(i)),
                        lon=float(syn_in.lon_1(i)),
                        depth=syn_in.depth_1(i)*1000.,
                        strike=syn_in.strike_1(i),
                        dip=syn_in.dip_1(i),
                        rake=syn_in.rake_1(i),
                        stf=stf,
                        time=util.str_to_time(syn_in.time_1(i)),
                        magnitude=syn_in.magnitude_1(i)))
        source = CombiSource(subsources=sources)
    response = engine.process(source, targets)

    synthetic_traces = response.pyrocko_traces()
    if cfg.Bool('synthetic_test_add_noise') is True:
        from noise_addition import add_noise
        trs_orgs = []
        calcStreamMapsyn = calcStreamMap.copy()
        #from pyrocko import trace
        for tracex in calcStreamMapsyn.keys():
                for trl in synthetic_traces:
                    if str(trl.name()[4:12]) == str(tracex[4:]):
                        tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex])
                        tr_org.downsample_to(2.0)
                        trs_orgs.append(tr_org)
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        synthetic_traces = add_noise(trs_orgs, engine, source.pyrocko_event(),
                                     stations,
                                     store_id, phase_def='P')
    trs_org = []
    trs_orgs = []
    fobj = os.path.join(arrayfolder, 'shift.dat')
    xy = num.loadtxt(fobj, usecols=1, delimiter=',')
    calcStreamMapsyn = calcStreamMap.copy()
    #from pyrocko import trace
    for tracex in calcStreamMapsyn.keys():
            for trl in synthetic_traces:
                if str(trl.name()[4:12])== str(tracex[4:]):
                    mod = trl

                    recordstarttime = calcStreamMapsyn[tracex].stats.starttime.timestamp
                    recordendtime = calcStreamMapsyn[tracex].stats.endtime.timestamp
                    tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex])
                    trs_orgs.append(tr_org)

                    tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False)
                    synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add)
                    calcStreamMapsyn[tracex] = synthetic_obs_tr
                    trs_org.append(tr_org_add)
    calcStreamMap = calcStreamMapsyn

    if cfg.Bool('shift_by_phase_pws') == True:
        calcStreamMapshifted= calcStreamMap.copy()
        from obspy.core import stream
        stream = stream.Stream()
        for trace in calcStreamMapshifted.keys():
            stream.append(calcStreamMapshifted[trace])
        pws_stack = PWS_stack([stream], weight=2, normalize=True)
        for tr in pws_stack:
            for trace in calcStreamMapshifted.keys():
                    calcStreamMapshifted[trace]=tr
        calcStreamMap = calcStreamMapshifted


    if cfg.Bool('shift_by_phase_onset') == True:
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs= []
        calcStreamMapshifted= calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs

        event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                  timing=timing,
                  fn_dump_center=pjoin(directory, 'array_center.pf'),
                  fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        for trace in calcStreamMapshifted.keys():
            recordstarttime = calcStreamMapshifted[trace].stats.starttime.timestamp
            recordendtime = calcStreamMapshifted[trace].stats.endtime.timestamp
            mod = shifted_traces[i]
            extracted = mod.chop(recordstarttime, recordendtime, inplace=False)
            shifted_obs_tr = obspy_compat.to_obspy_trace(extracted)
            calcStreamMapshifted[trace]=shifted_obs_tr
            i = i+1

        calcStreamMap = calcStreamMapshifted


    weight = 0.
    if cfg.Bool('weight_by_noise') == True:
        from noise_analyser import analyse
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs= []
        calcStreamMapshifted= calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs
        event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                  timing=timing,
                  fn_dump_center=pjoin(directory, 'array_center.pf'),
                  fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        weight = analyse(shifted_traces, engine, event, stations,
         100., store_id, nwindows=1,
         check_events=True, phase_def='P')

    for trace in calcStreamMap.keys():
        recordstarttime = calcStreamMap[trace].stats.starttime
        d = calcStreamMap[trace].stats.starttime
        d = d.timestamp

        if calcStreamMap[trace].stats.npts < minSampleCount:
            minSampleCount = calcStreamMap[trace].stats.npts

    ############################################################################
    traces = num.ndarray (shape=(len(calcStreamMap), minSampleCount), dtype=float)
    traveltime = num.ndarray (shape=(len(calcStreamMap), dimX*dimY), dtype=float)
    latv   = num.ndarray (dimX*dimY, dtype=float)
    lonv   = num.ndarray (dimX*dimY, dtype=float)
    ############################################################################


    c=0
    streamCounter = 0

    for key in calcStreamMap.keys():
        streamID = key
        c2   = 0

        for o in calcStreamMap[key]:
            if c2 < minSampleCount:
                traces[c][c2] = o

                c2 += 1


        for key in TTTGridMap.keys():

            if streamID == key:
                traveltimes[streamCounter] = TTTGridMap[key]
            else:
                "NEIN", streamID, key


        if not streamCounter in traveltimes :
           continue                              #hs : thread crashed before

        g = traveltimes[streamCounter]
        dimZ  = g.dimZ
        mint  = g.mint
        maxt  = g.maxt
        Latul = g.Latul
        Lonul = g.Lonul
        Lator = g.Lator
        Lonor = g.Lonor

        gridElem = g.GridArray

        for x in range(dimX):
            for y in range(dimY):
                elem = gridElem[x, y]

                traveltime [c][x * dimY + y] = elem.tt
                latv [x * dimY + y] = elem.lat
                lonv [x * dimY + y] = elem.lon
        #endfor

        c += 1
        streamCounter += 1

    #endfor


    ############################## CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ##################
    nsamp = winlen * new_frequence

    nstep = int (step*new_frequence)
    migpoints = dimX * dimY

    dimZ = 0
    new_frequence = cfg.newFrequency ()              # ['new_frequence']
    maxp = int (Config['ncore'])


    Logfile.add ('PROCESS %d  NTIMES: %d' % (flag,ntimes))

    if False :
       print ('nostat ',nostat,type(nostat))
       print ('nsamp ',nsamp,type(nsamp))
       print ('ntimes ',ntimes,type(ntimes))
       print ('nstep ',nstep,type(nstep))
       print ('dimX ',dimX,type(dimX))
       print ('dimY ',dimY,type(dimY))
       print ('mint ',Gmint,type(mint))
       print ('new_freq ',new_frequence,type(new_frequence))
       print ('minSampleCount ',minSampleCount,type(minSampleCount))
       print ('latv ',latv,type(latv))
       print ('traces',traces,type(traces))
       print ('traveltime',traveltime,type(traveltime))


#==================================semblance calculation========================================

    t1 = time.time()
    traces = traces.reshape   (1,nostat*minSampleCount)
    traveltime = traveltime.reshape (1,nostat*dimX*dimY)
    USE_C_CODE = True
    try:
        if USE_C_CODE :
            import Cm
            import CTrig
            start_time = time.time()
            k  = Cm.otest (maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence,
                          minSampleCount,latv,lonv,traveltime,traces)
            print("--- %s seconds ---" % (time.time() - start_time))
        else :
            start_time = time.time()
            k = otest (maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence,
                      minSampleCount,latv,lonv,traveltime,traces)                       #hs
            print("--- %s seconds ---" % (time.time() - start_time))
    except:
        print("loaded tttgrid has probably wrong dimensions or stations, delete\
                ttgrid or exchange")

    t2 = time.time()


    partSemb = k

    partSemb_syn  = partSemb.reshape (ntimes,migpoints)


    return partSemb_syn
Beispiel #22
0
def  doCalc (flag,Config,WaveformDict,FilterMetaData,Gmint,Gmaxt,TTTGridMap,Folder,Origin, ntimes, switch, ev,arrayfolder, syn_in):
    '''
    method for calculating semblance of one station array
    '''
    Logfile.add ('PROCESS %d %s' % (flag,' Enters Semblance Calculation') )
    Logfile.add ('MINT  : %f  MAXT: %f Traveltime' % (Gmint,Gmaxt))

    cfg = ConfigObj (dict=Config)

    dimX   = cfg.dimX()         # ('dimx')
    dimY   = cfg.dimY()         # ('dimy')
    winlen = cfg.winlen ()      # ('winlen')
    step   = cfg.step()         # ('step')

    new_frequence   = cfg.newFrequency()          #('new_frequence')
    forerun= cfg.Int('forerun')
    duration= cfg.Int('duration')
    gridspacing = cfg.Float('gridspacing')

    nostat = len (WaveformDict)
    traveltimes = {}
    recordstarttime = ''
    minSampleCount  = 999999999

    if cfg.UInt ('forerun')>0:
        ntimes = int ((cfg.UInt ('forerun') + cfg.UInt ('duration') ) / cfg.UInt ('step') )
    else:
        ntimes = int ((cfg.UInt ('duration') ) / cfg.UInt ('step') )
    nsamp  = int (winlen * new_frequence)
    nstep  = int (step   * new_frequence)
    from pyrocko import obspy_compat
    from pyrocko import orthodrome, model
    obspy_compat.plant()

    ############################################################################
    calcStreamMap = WaveformDict

    stations = []
    py_trs = []
    for trace in calcStreamMap.keys():
        py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace])
        py_trs.append(py_tr)
        for il in FilterMetaData:
            if str(il) == str(trace):
                        szo = model.Station(lat=il.lat, lon=il.lon,
                                            station=il.sta, network=il.net,
                                            channels=py_tr.channel,
                                            elevation=il.ele, location=il.loc)
                        stations.append(szo) #right number of stations?


#==================================synthetic BeamForming=======================================

    if cfg.Bool('shift_by_phase_pws') == True:
        calcStreamMapshifted= calcStreamMap.copy()
        from obspy.core import stream
        stream = stream.Stream()
        for trace in calcStreamMapshifted.keys():
            stream.append(calcStreamMapshifted[trace])
        pws_stack = PWS_stack([stream], weight=2, normalize=True)
        for tr in pws_stack:
            for trace in calcStreamMapshifted.keys():
                    calcStreamMapshifted[trace]=tr
        calcStreamMap = calcStreamMapshifted


    if cfg.Bool('shift_by_phase_onset') == True:
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs= []
        calcStreamMapshifted= calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs

        event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                  timing=timing,
                  fn_dump_center=pjoin(directory, 'array_center.pf'),
                  fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        for trace in calcStreamMapshifted.keys():
            recordstarttime = calcStreamMapshifted[trace].stats.starttime.timestamp
            recordendtime = calcStreamMapshifted[trace].stats.endtime.timestamp
            mod = shifted_traces[i]
            extracted = mod.chop(recordstarttime, recordendtime, inplace=False)
            shifted_obs_tr = obspy_compat.to_obspy_trace(extracted)
            calcStreamMapshifted[trace]=shifted_obs_tr
            i = i+1

        calcStreamMap = calcStreamMapshifted


    weight = 0.
    if cfg.Bool('weight_by_noise') == True:
        from noise_analyser import analyse
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs= []
        calcStreamMapshifted= calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs
        event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                  timing=timing,
                  fn_dump_center=pjoin(directory, 'array_center.pf'),
                  fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        weight = analyse(shifted_traces, engine, event, stations,
         100., store_id, nwindows=1,
         check_events=True, phase_def='P')

    for trace in calcStreamMap.keys():
        recordstarttime = calcStreamMap[trace].stats.starttime
        d = calcStreamMap[trace].stats.starttime
        d = d.timestamp

        if calcStreamMap[trace].stats.npts < minSampleCount:
            minSampleCount = calcStreamMap[trace].stats.npts

    ############################################################################
    traces = num.ndarray (shape=(len(calcStreamMap), minSampleCount), dtype=float)
    traveltime = num.ndarray (shape=(len(calcStreamMap), dimX*dimY), dtype=float)
    latv   = num.ndarray (dimX*dimY, dtype=float)
    lonv   = num.ndarray (dimX*dimY, dtype=float)
    ############################################################################


    c=0
    streamCounter = 0

    for key in calcStreamMap.keys():
        streamID = key
        c2   = 0

        for o in calcStreamMap[key]:
            if c2 < minSampleCount:
                traces[c][c2] = o

                c2 += 1


        for key in TTTGridMap.keys():

            if streamID == key:
                traveltimes[streamCounter] = TTTGridMap[key]
            else:
                "NEIN", streamID, key


        if not streamCounter in traveltimes :
           continue                              #hs : thread crashed before

        g = traveltimes[streamCounter]
        dimZ  = g.dimZ
        mint  = g.mint
        maxt  = g.maxt
        Latul = g.Latul
        Lonul = g.Lonul
        Lator = g.Lator
        Lonor = g.Lonor

        gridElem = g.GridArray

        for x in range(dimX):
            for y in range(dimY):
                elem = gridElem[x, y]

                traveltime [c][x * dimY + y] = elem.tt
                latv [x * dimY + y] = elem.lat
                lonv [x * dimY + y] = elem.lon
        #endfor

        c += 1
        streamCounter += 1

    #endfor


# ==================================semblance calculation=======

    t1 = time.time()
    traces = traces.reshape(1, nostat*minSampleCount)

    traveltimes = traveltime.reshape(1, nostat*dimX*dimY)
    TTTGrid = True
    manual_shift = False

    if manual_shift:

        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs = []
        calcStreamMapshifted = calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(
                    calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs
        backSemb = num.ndarray(shape=(ntimes, dimX*dimY), dtype=float)
        bf = BeamForming(stations, traces, normalize=True)

        for i in range(ntimes):
            sembmax = 0
            sembmaxX = 0
            sembmaxY = 0
            for j in range(dimX * dimY):
                event = model.Event(lat=float(latv[j]), lon=float(lonv[j]),
                                    depth=ev.depth*1000., time=timeev)
                directory = arrayfolder
                shifted_traces, stack = bf.process(event=event,
                                                   timing=timing,
                                                   fn_dump_center=pjoin(
                                                                directory,
                                                         'array_center.pf'),
                                                   fn_beam=pjoin(directory,
                                                                 'beam.mseed'))
                tmin = stack.tmin+(i*nstep)+20
                tmax = stack.tmin+(i*nstep)+60
                stack.chop(tmin, tmax)
                backSemb[i][j] = abs(sum(stack.ydata))

        k = backSemb
        TTTGrid = False

    if TTTGrid:
        start_time = time.time()
        if cfg.UInt('forerun') > 0:
            ntimes = int((cfg.UInt('forerun') + cfg.UInt('duration'))/step)
        else:
            ntimes = int((cfg.UInt('duration')) / step)
        nsamp = int(winlen)
        nstep = int(step)
        Gmint = cfg.Int('forerun')

        k = semblance(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint,
                      new_frequence, minSampleCount, latv, lonv, traveltimes,
                      traces, calcStreamMap, timeev, Config, Origin)
        print("--- %s seconds ---" % (time.time() - start_time))

    t2 = time.time()

    Logfile.add('%s took %0.3f s' % ('CALC:',(t2-t1)))

    partSemb = k
    partSemb = partSemb.reshape(ntimes, migpoints)

    return partSemb
Beispiel #23
0
def collectSembweighted(SembList,Config,Origin,Folder,ntimes,arrays,switch, weights):
    '''
    method to collect semblance matrizes from all processes and write them to file for each timestep
    '''
    Logfile.add ('start collect in collectSemb')

    cfg= ConfigObj (dict=Config)
    origin = ConfigObj (dict=Origin)

    dimX   = cfg.dimX()         # ('dimx')
    dimY   = cfg.dimY()         # ('dimy')
    winlen = cfg.winlen ()      # ('winlen')
    step   = cfg.step()         # ('step')

    latv= []
    lonv= []

    gridspacing = cfg.Float ('gridspacing')
    migpoints   = dimX * dimY
    o_lat   = origin.lat()         # float (Origin['lat'])
    o_lon   = origin.lon()         # float (Origin['lon'])
    oLatul  = 0
    oLonul  = 0

    z=0

    for i in xrange(dimX):
         oLatul = o_lat - ((dimX-1)/2) * gridspacing + i*gridspacing

         if z == 0 and i == 0 :
             Latul = oLatul
         o=0

         for j in xrange (dimY):
               oLonul = o_lon - ((dimY-1)/2) * gridspacing + j*gridspacing

               if o==0 and j==0:
                    Lonul = oLonul

               latv.append (oLatul)
               lonv.append (oLonul)


    tmp=1
    for a, w in zip(SembList, weights):
        tmp *= a
    #sys.exit()

    sembmaxvaluev = num.ndarray (ntimes,dtype=float)
    sembmaxlatv   = num.ndarray (ntimes,dtype=float)
    sembmaxlonv   = num.ndarray (ntimes,dtype=float)

    rc= UTCDateTime(Origin['time'])
    rcs= '%s-%s-%s_%02d:%02d:%02d'% (rc.day,rc.month,rc.year, rc.hour,rc.minute,rc.second)
    d = rc.timestamp
    usedarrays = 5

    folder  = Folder['semb']
    fobjsembmax = open (os.path.join (folder,'sembmax_%s.txt' % (switch)),'w')

    for a, i in enumerate(tmp):
        logger.info('timestep %d' % a)


        fobj  = open (os.path.join (folder,'%s-%s_%03d._weighted_semblance.ASC' % (switch,Origin['depth'],a)),'w')
        #fobj = open (os.path.join (folder, '%03d.ASC'    % a),'w')

        fobj.write ('# %s , %s\n' % (d,rcs))
        fobj.write ('# step %ds| ntimes %d| winlen: %ds\n' % (step,ntimes,winlen))
        fobj.write ('# \n')
        fobj.write ('# southwestlat: %.2f dlat: %f nlat: %f \n'%(Latul,gridspacing,dimX))
        fobj.write ('# southwestlon: %.2f dlon: %f nlon: %f \n'%(Lonul,gridspacing,dimY))
        fobj.write ('# ddepth: 0 ndepth: 1 \n')


        sembmax  = 0
        sembmaxX = 0
        sembmaxY = 0

        origin = DataTypes.dictToLocation (Origin)
        uncert = num.std(i) #maybe not std?
        for j in range(migpoints):
            x= latv[j]
            y= lonv[j]
            semb = i[j]

            fobj.write ('%.2f %.2f %.20f\n' % (x,y,semb))

            if  semb > sembmax:
                sembmax  = semb;# search for maximum and position of maximum on semblance grid for given time step
                sembmaxX = x;
                sembmaxY = y;

        delta = loc2degrees (Location (sembmaxX, sembmaxY), origin)
        azi   = toAzimuth (float(Origin['lat']), float(Origin['lon']),float(sembmaxX), float(sembmaxY))

        sembmaxvaluev[a] = sembmax
        sembmaxlatv[a]   = sembmaxX
        sembmaxlonv[a]   = sembmaxY

        fobjsembmax.write ('%d %.2f %.2f %.20f %.20f %d %03f %f %03f\n' % (a*step,sembmaxX,sembmaxY,sembmax,uncert,usedarrays,delta,float(azi),delta*119.19))
        fobj.close()


    fobjsembmax.close()

    durationpath  = os.path.join (folder, "duration.txt")
    trigger.writeSembMaxValue (sembmaxvaluev,sembmaxlatv,sembmaxlonv,ntimes,Config,Folder)
    trigger.semblancestalta (sembmaxvaluev,sembmaxlatv,sembmaxlonv)
Beispiel #24
0
def calcTTTAdvTauP(Config,
                   station,
                   Origin,
                   flag,
                   Xcorrshift=None,
                   Refshift=None,
                   flag_rpe=False):

    cfg = ConfigObj(dict=Config)
    if flag_rpe is False:
        dimX = cfg.Int('dimx')
        dimY = cfg.Int('dimy')
    else:
        dimX = cfg.Int('dimx_emp')
        dimY = cfg.Int('dimy_emp')
    gridspacing = cfg.config_geometry.gridspacing
    print('done this')
    o_lat = float(Origin['lat'])
    o_lon = float(Origin['lon'])
    o_depth = float(Origin['depth'])

    oLator = o_lat + dimX / 2
    oLonor = o_lon + dimY / 2
    oLatul = 0
    oLonul = 0

    TTTGridMap = {}
    LMINMAX = []
    GridArray = {}
    locStation = Location(station.lat, station.lon)

    sdelta = loc2degrees(Location(o_lat, o_lon), locStation)
    Logfile.add('TTT PROCESS %d STATION: %s --> DELTA: %f' %
                (flag, station.getName(), sdelta))

    inputpath = str(flag) + '-' + station.getName() + ".input"
    outputpath = str(flag) + '-' + station.getName() + ".output"
    errorpath = str(flag) + '-' + station.getName() + '.error'

    fobjinput = open(inputpath, 'w')

    fobjinput.write('s\n')
    fobjinput.write(('%s %s\n') % (station.lat, station.lon))
    fobjinput.write('h\n')
    fobjinput.write(('%s\n') % (o_depth))

    for i in xrange(dimX):
        oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing

        for j in xrange(dimY):
            oLonul = o_lon - ((dimY - 1) / 2) * gridspacing + j * gridspacing

            fobjinput.write('e\n')
            fobjinput.write(('%s %s\n') % (oLatul, oLonul))
    # endfor

    fobjinput.close()

    cmd = ('taup_time -ph P -mod ak135 -time -o %s < %s > %s') % (
        outputpath, inputpath, errorpath)
    p = subprocess.Popen(cmd,
                         shell=True,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.STDOUT)
    p.wait()

    L = []
    output = open(outputpath, 'r')
    'OUTPUT: ', outputpath

    for k in output:
        k = k.split()

        if len(k) == 1:
            tt = k[0].replace('\n', '')
            tt = float(tt) - float(Xcorrshift[station.getName()].shift)
            L.append(tt)

    output.close()

    z = 0

    for i in xrange(dimX):
        oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing

        if z == 0 and i == 0:
            Latul = oLatul
        o = 0

        for j in xrange(dimY):
            oLonul = o_lon - ((dimY - 1) / 2) * gridspacing + j * gridspacing

            if o == 0 and j == 0:
                Lonul = oLonul

            de = loc2degrees(Location(oLatul, oLonul), locStation)
            time = L[i * dimX + j]

            GridArray[(i, j)] = GridElem(oLatul, oLonul, o_depth, time, de)
            LMINMAX.append(time)

    mint = float(min(LMINMAX))
    maxt = float(max(LMINMAX))
    k = MinTMaxT(mint, maxt)

    TTTGridMap[station.getName()] = TTTGrid(o_depth, mint, maxt, Latul, Lonul,
                                            oLator, oLonor, GridArray)

    #tttname = str(flag)+'-ttt.pkl'
    #Basic.dumpToFile(tttname, TTTGridMap)
    #Basic.dumpToFile('minmax-'+str(flag)+'.pkl', k)
    if flag_rpe is True:
        Basic.dumpToFile(str(flag) + '-ttt_emp.pkl', TTTGridMap)
        Basic.dumpToFile('minmax-emp' + str(flag) + '.pkl', k)
        Basic.dumpToFile('station-emp' + str(flag) + '.pkl', station)
    else:
        Basic.dumpToFile(str(flag) + '-ttt.pkl', TTTGridMap)
        Basic.dumpToFile('minmax-' + str(flag) + '.pkl', k)
        Basic.dumpToFile('station-' + str(flag) + '.pkl', station)
    try:
        os.remove(inputpath)
        os.remove(outputpath)
        os.remove(errorpath)

    except:
        Logfile.exception('cannot delete files')
Beispiel #25
0
def calcTTTAdv_cube(Config,
                    station,
                    Origin,
                    flag,
                    arrayname,
                    Xcorrshift,
                    Refshift,
                    phase,
                    flag_rpe=False):

    cfg = ConfigObj(dict=Config)
    if flag_rpe is True:
        dimX = cfg.Int('dimx_emp')
        dimY = cfg.Int('dimy_emp')
        dimZ = cfg.Int('dimz_emp')
    else:
        dimX = cfg.Int('dimx')
        dimY = cfg.Int('dimy')
        dimZ = cfg.Int('dimz')

    orig_depth = float(Origin['depth'])

    start, stop, step = cfg.String('depths').split(',')
    start = orig_depth + float(start)
    stop = orig_depth + float(stop)
    depths = np.linspace(start, stop, num=dimZ)

    gridspacing = cfg.config_geometry.gridspacing
    traveltime_model = cfg_yaml.config.traveltime_model

    o_lat = float(Origin['lat'])
    o_lon = float(Origin['lon'])

    oLator = o_lat + dimX / 2
    oLonor = o_lon + dimY / 2
    oLatul = 0
    oLonul = 0
    o_dip = 80.
    plane = False

    TTTGridMap = {}
    LMINMAX = []
    GridArray = {}
    locStation = Location(station.lat, station.lon)
    sdelta = loc2degrees(Location(o_lat, o_lon), locStation)
    Phase = cake.PhaseDef(phase)
    path = palantiri.__path__
    model = cake.load_model(path[0] + '/data/' + traveltime_model)
    for depth in depths:
        o_depth = depth
        for i in xrange(dimX):
            oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing

            if i == 0:
                Latul = oLatul
            o = 0
            for j in xrange(dimY):
                oLonul = o_lon - (
                    (dimY - 1) / 2) * gridspacing + j * gridspacing

                if o == 0 and j == 0:
                    Lonul = oLonul
                de = loc2degrees(Location(oLatul, oLonul), locStation)
                arrivals = model.arrivals([de, de],
                                          phases=Phase,
                                          zstart=o_depth * km)
                try:
                    ttime = arrivals[0].t
                except Exception:
                    try:
                        arrivals = model.arrivals([de, de],
                                                  phases=Phase,
                                                  zstart=o_depth * km,
                                                  zstop=o_depth * km,
                                                  refine=True)
                        ttime = arrivals[0].t
                    except Exception:
                        arrivals = model.arrivals([de, de],
                                                  phases=Phase,
                                                  zstart=o_depth * km - 2.5,
                                                  zstop=o_depth * km + 2.5,
                                                  refine=True)
                        ttime = arrivals[0].t

                GridArray[(i, j, depth)] = GridElem(oLatul, oLonul, o_depth,
                                                    ttime, de)
                LMINMAX.append(ttime)

                if ttime == 0:
                    raise Exception("\033[31mILLEGAL: phase definition\033[0m")

    mint = min(LMINMAX)
    maxt = max(LMINMAX)
    TTTGridMap[station.getName()] = TTTGrid(o_depth, mint, maxt, Latul, Lonul,
                                            oLator, oLonor, GridArray)
    k = MinTMaxT(mint, maxt)

    if flag_rpe is True:
        Basic.dumpToFile(str(flag) + '-ttt_emp.pkl', TTTGridMap)
        Basic.dumpToFile('minmax-emp' + str(flag) + '.pkl', k)
        Basic.dumpToFile('station-emp' + str(flag) + '.pkl', station)
    else:
        Basic.dumpToFile(str(flag) + '-ttt.pkl', TTTGridMap)
        Basic.dumpToFile('minmax-' + str(flag) + '.pkl', k)
        Basic.dumpToFile('station-' + str(flag) + '.pkl', station)
    def refTrigger(self, RefWaveform, phase, cfg_yaml):
        Config = self.Config
        cfg = ConfigObj(dict=Config)
        name = ('%s.%s.%s.%s') % (RefWaveform[0].stats.network,
                                  RefWaveform[0].stats.station,
                                  RefWaveform[0].stats.location,
                                  RefWaveform[0].stats.channel)

        i = self.searchMeta(name, self.StationMeta)
        de = loc2degrees(self.Origin, i)

        ptime = 0

        Phase = cake.PhaseDef(phase)
        model = cake.load_model()
        if cfg_yaml.config_data.colesseo_input is True:
            arrivals = model.arrivals([de, de], phases=Phase,
                                      zstart=self.Origin.depth, zstop=0.)
        else:
            arrivals = model.arrivals([de, de], phases=Phase,
                                      zstart=self.Origin.depth*km, zstop=0.)
        try:
            ptime = arrivals[0].t
        except Exception:
            arrivals = model.arrivals([de, de], phases=Phase,
                                      zstart=self.Origin.depth*km-0.1)
            ptime = arrivals[0].t

        if ptime == 0:
                raise Exception("\033[31mILLEGAL: phase definition\033[0m")

        tw = self.calculateTimeWindows(ptime)

        if cfg_yaml.config_data.pyrocko_download is True:
            stP = self.readWaveformsPicker_pyrocko(i, tw, self.Origin, ptime,
                                                   cfg_yaml)
        elif cfg_yaml.config_data.colesseo_input is True:
            stP = self.readWaveformsPicker_colos(i, tw, self.Origin, ptime,
                                                 cfg_yaml)
        else:
            stP = self.readWaveformsPicker(i, tw, self.Origin, ptime, cfg_yaml)

        refuntouchname = os.path.basename(self.AF)+'-refstation-raw.mseed'
        stP.write(os.path.join(self.EventPath, refuntouchname), format='MSEED',
                                                                byteorder='>')
        stP.filter("bandpass",
                   freqmin=float(cfg_yaml.config_xcorr.refstationfreqmin),
                   freqmax=float(cfg_yaml.config_xcorr.refstationfreqmax))

        stP.trim(tw['xcorrstart'], tw['xcorrend'])
        trP = stP[0]

        trP.stats.starttime = UTCDateTime(3600)
        refname = os.path.basename(self.AF)+'-refstation-filtered.mseed'
        trP.write(os.path.join(self.EventPath, refname), format='MSEED',
                                                         byteorder='>')

        sta = float(cfg_yaml.config_xcorr.refsta)
        lta = float(cfg_yaml.config_xcorr.reflta)
        cft = recSTALTA(trP.data, int(sta * trP.stats.sampling_rate),
                        int(lta * trP.stats.sampling_rate))

        t = triggerOnset(cft, lta, sta)

        try:
            onset = t[0][0] / trP.stats.sampling_rate

        except Exception:
            onset = self.mintforerun

        trigger = trP.stats.starttime+onset

        tdiff = (trP.stats.starttime + onset)-(UTCDateTime(3600)
                                               + self.mintforerun)

        refp = UTCDateTime(self.Origin.time)+ptime
        reftriggeronset = refp+onset-self.mintforerun

        if cfg_yaml.config_xcorr.autoxcorrcorrectur is True:
                refmarkername = os.path.join(self.EventPath,
                                             ('%s-marker') % (os.path.basename(
                                              self.AF)))
                fobjrefmarkername = open(refmarkername, 'w')
                fobjrefmarkername.write('# Snuffler Markers File Version\
                                         0.2\n')
                fobjrefmarkername.write(('phase: %s 0 %s    None           None         None         XWStart        None False\n') % (tw['xcorrstart'].strftime('%Y-%m-%d %H:%M:%S.%f'), name))
                fobjrefmarkername.write(('phase: %s 0 %s    None           None         None         XWEnd        None False\n') % (tw['xcorrend'].strftime('%Y-%m-%d %H:%M:%S.%f'), name))
                fobjrefmarkername.write(('phase: %s 1 %s    None           None         None         TheoP        None False\n') % (refp.strftime('%Y-%m-%d %H:%M:%S.%f'), name))
                fobjrefmarkername.write(('phase: %s 3 %s    None           None         None         XTrig        None False') % (reftriggeronset.strftime('%Y-%m-%d %H:%M:%S.%f'), name))
                fobjrefmarkername.close()

                cmd = 'snuffler %s --markers=%s&' % (os.path.join(
                                                    self.EventPath,
                                                    refuntouchname),
                                                    refmarkername)
                os.system(cmd)

                thrOn = float(self.Config['reflta'])
                thrOff = float(self.Config['refsta'])
                plotTrigger(trP, cft, thrOn, thrOff)

                selection = float(input('Enter self picked phase in seconds: '))
                tdiff = selection-self.mintforerun
                refname = os.path.basename(self.AF)+'-shift.mseed'
                trP.stats.starttime = trP.stats.starttime - selection
                trP.write(os.path.join(self.EventPath, refname),
                                       format='MSEED')

        '''
        tdiff = 0
        trigger = trP.stats.starttime
        '''
        To = Trigger(name, trigger, os.path.basename(self.AF), tdiff)

        return tdiff, To
Beispiel #27
0
def writeSembMatricesSingleArray (SembList,Config,Origin,arrayfolder,ntimes,switch):
    '''
    method to write semblance matrizes from one processes to file for each timestep
    '''
    logger.info ('start write semblance matrices')

    cfg= ConfigObj (dict=Config)
    origin = OriginCfg (Origin)

    dimX   = cfg.dimX()         # ('dimx')
    dimY   = cfg.dimY()         # ('dimy')
    winlen = cfg.winlen ()      # ('winlen')
    step   = cfg.step()         # ('step')

    latv   = []
    lonv   = []

    gridspacing = cfg.Float ('gridspacing')
    migpoints   = dimX * dimY

    o_lat   = origin.lat()         # float (Origin['lat'])
    o_lon   = origin.lon()         # float (Origin['lon'])
    oLatul  = 0
    oLonul  = 0

    z=0

    for i in xrange(dimX):
         oLatul = o_lat - ((dimX-1)/2) * gridspacing + i*gridspacing

         if z == 0 and i == 0:
             Latul = oLatul
         o=0

         for j in xrange (dimY):
               oLonul = o_lon - ((dimY-1)/2) * gridspacing + j * gridspacing

               if o==0 and j==0:  Lonul = oLonul

               latv.append (oLatul)
               lonv.append (oLonul)
    #endfor

    rc  = UTCDateTime (Origin['time'])
    rcs = '%s-%s-%s_%02d:%02d:%02d'% (rc.day,rc.month,rc.year, rc.hour,rc.minute,rc.second)
    d   = rc.timestamp

    for a, i in enumerate(SembList):
        #logger.info('timestep %d' % a)

        fobj = open (os.path.join (arrayfolder,'%s-%s_%03d.ASC' % (switch,Origin['depth'],a)),'w')
        fobj.write ('# %s , %s\n' % (d,rcs))
        fobj.write ('# step %ds| ntimes %d| winlen: %ds\n' % (step,ntimes,winlen))
        fobj.write ('# \n')
        fobj.write ('# southwestlat: %.2f dlat: %f nlat: %f \n'%(Latul,gridspacing,dimX))
        fobj.write ('# southwestlon: %.2f dlon: %f nlon: %f \n'%(Lonul,gridspacing,dimY))
        fobj.write ('# ddepth: 0 ndepth: 1 \n')

        for j in range (migpoints):
            x= latv[j]
            y= lonv[j]
            z= origin.depth()         # float(Origin['depth'])
            semb = i[j]

            fobj.write ('%.2f %.2f %.2f %.20f\n' % (x,y,z,semb))
        #endfor

        fobj.close()