Example #1
0
    def convert_time(self, r):
        """
        given a row in the result,
         if have epoch times on 2,3: convert them to Fdsn
         if have Fdsn times on 2,3: return row as it is
         otherwise return -1
        """
        fmt = ("%Y-%m-%dT%H:%M:%S.%fZ")
        try:
            earliest = dt.datetime.strptime(r[f_id['earliest']], fmt)
            latest = dt.datetime.strptime(r[f_id['latest']], fmt)
            return r
        except TypeError:
            pass
        try:
            # availability/availability_extent
            earliest = timedoy.TimeDOY(epoch=r[f_id['earliest']])
            earliest = earliest.getFdsnTime() + "Z"
            latest = timedoy.TimeDOY(epoch=r[f_id['latest']])
            latest = latest.getFdsnTime() + "Z"
            r[f_id['earliest']] = earliest
            r[f_id['latest']] = latest
        except TypeError:
            raise PH5AvailabilityError(
                "convert_time receives list as parameter.")
        except Exception:
            errmsg = "The list sent to convert_time does not have epoch "\
                "times at %s and %s" % (f_id['earliest'], f_id['latest'])
            raise PH5AvailabilityError(errmsg)

        return r
Example #2
0
def read_json():
    '''
       Read all 3 json files.
          Inputs:
             ARGS.array_json
             ARGS.event_json
             ARGS.data_json
          Sets:
             ARRAY - A global object containing the contents of ARGS.array_json
             EVENT - A global object containing the contents of ARGS.event_json
             DATA - A global object containing the contents of ARGS.data_json
    '''
    global EVENT, ARRAY, DATA

    nope = []
    if not os.path.exists(ARGS.event_json):
        nope.append(ARGS.event_json)
    elif not os.path.exists(ARGS.array_json):
        nope.append(ARGS.array_json)
    elif not os.path.exists(ARGS.data_json):
        nope.append(ARGS.data_json)

    if len(nope) != 0:
        for n in nope:
            sys.stderr.write("Error: {0} not found.".format(n))
        sys.exit()

    EVENT = _read_json(ARGS.event_json)
    ARRAY = _read_json(ARGS.array_json)
    DATA = {}
    #   Organize DATA by DAS SN for easy lookup
    D = _read_json(ARGS.data_json)
    Datas = D['Data']
    for Data in Datas:
        if not DATA.has_key(Data['das']):
            DATA[Data['das']] = []
        yr, doy, hr, mn, sc = Data['first_sample'].split(':')
        window_start = timedoy.TimeDOY(year=int(yr),
                                       doy=int(doy),
                                       hour=int(hr),
                                       minute=int(mn),
                                       second=float(sc))
        yr, doy, hr, mn, sc = Data['last_sample'].split(':')
        window_stop = timedoy.TimeDOY(year=int(yr),
                                      doy=int(doy),
                                      hour=int(hr),
                                      minute=int(mn),
                                      second=float(sc))
        #   Save window_start and window_stop as timedoy object
        Data['window_start'] = window_start
        Data['window_stop'] = window_stop
        DATA[Data['das']].append(Data)
Example #3
0
def backup(table_type, table_path, table):
    '''   Create a backup in kef format. File has year and doy in name.
    '''
    if NO_BACKUP or table.rows == []: return
    tdoy = timedoy.TimeDOY(epoch=time.time())
    tt = "{0:04d}{1:03d}".format(tdoy.dtobject.year, tdoy.dtobject.day)
    prefix = "{0}_{1}".format(table_type, tt)
    outfile = "{0}_00.kef".format(prefix)
    #   Do not overwite existing file
    i = 1
    while os.path.exists(outfile):
        outfile = "{0}_{1:02d}.kef".format(prefix, i)
        i += 1
    #   Exit if we can't write backup kef
    if os.access(os.getcwd(), os.W_OK):
        print "Writing table backup: {0}.".format(os.path.join(outfile))
    else:
        sys.stderr.write("Can't write: {0}.\nExiting!\n".format(
            os.getcwd(), outfile))
        sys.exit(-3)
    #
    try:
        fh = open(outfile, 'w')
        T2K.table_print(table_path, table, fh=fh)
        fh.close()
    except Exception as e:
        sys.stderr.write("Failed to save {0}.\ne.message\nExiting!\n".format(
            os.getcwd(), outfile))
        sys.exit(-4)
Example #4
0
def process_all():
    '''
       Process through each shot line, shot, array, station, component (channel) and print matches to stdout
    '''
    Events = EVENT['Events']
    for Event in Events:
        shot_line = Event['shot_line']
        shot_line_name = "Event_t_{0:03d}".format(int(shot_line))
        for event in Event['Events']:
            yr, doy, hr, mn, sc = event['time'].split(':')
            shot_time = timedoy.TimeDOY(year=int(yr),
                                        doy=int(doy),
                                        hour=int(hr),
                                        minute=int(mn),
                                        second=float(sc))
            if ARGS.offset_secs:
                shot_time = shot_time + ARGS.offset_secs
            shot_id = event['id']
            Arrays = ARRAY['Arrays']
            for Array in Arrays:
                array_name = "Array_t_{0:03d}".format(int(Array['array']))
                sample_rate = Array['sample_rate']
                length = 65536. / sample_rate
                cut_end = shot_time + length
                for station in Array['Stations']:
                    chan = station['chan']
                    das = station['das']
                    station_id = station['id']
                    seed_id = station['seed_station_name']
                    fs, ls, gaps = _is_in(das, shot_time, length,
                                          1. / sample_rate)
                    if fs == None: fs = 'NA'
                    if ls == None: ls = 'NA'
                    if ARGS.epoch:
                        if fs != 'NA':
                            fs = str(timedoy.passcal2epoch(fs, fepoch=True))
                        if ls != 'NA':
                            ls = str(timedoy.passcal2epoch(ls, fepoch=True))
                        line = [
                            shot_line_name, shot_id,
                            str(shot_time.epoch(fepoch=True)),
                            str(cut_end.epoch(fepoch=True)), array_name,
                            station_id, seed_id, das,
                            str(chan), fs, ls
                        ]
                    else:
                        line = [
                            shot_line_name, shot_id,
                            shot_time.getPasscalTime(ms=True),
                            cut_end.getPasscalTime(ms=True), array_name,
                            station_id, seed_id, das,
                            str(chan), fs, ls
                        ]
                    if ARGS.csv:
                        print ','.join(line)
                    else:
                        print ' '.join(line)
                    if len(gaps) != 0:
                        for g in gaps:
                            print "\t", g[0], g[1]
Example #5
0
    def get_json_report(self, result):
        today = dt.datetime.now()
        now_tdoy = timedoy.TimeDOY(epoch=time.mktime(today.timetuple()))
        header = '"created":"%s","datasources":' \
            % now_tdoy.getFdsnTime()

        arow = '"net":"%(net)s","sta":"%(sta)s","loc":"%(loc)s",'\
            '"cha":"%(chan)s","quality":"",'
        if self.SR_included:
            arow += '"sample_rate":%(sRate)s,'
        arow += '"timespans":[%(tspan)s]'
        rows = []
        tspan = []
        try:
            for i in range(len(result)):
                if i != 0 and result[i - 1][:3] != result[i][:3]:
                    # add row and reset tspan for previous stat, loc, chan
                    # when there is any changes
                    r = result[i - 1]
                    v = {
                        "net": self.netcode,
                        "sta": r[f_id['sta']],
                        "loc": r[f_id['loc']],
                        "chan": r[f_id['chan']],
                        "tspan": ','.join(tspan)
                    }
                    if self.SR_included:
                        v['sRate'] = r[f_id['sRate']]

                    rows.append("{%s}" % (arow % v))

                    tspan = []
                # add timespan for current processed row
                r = list(result[i])
                r = self.convert_time(r)

                tspan.append('["%s","%s"]' %
                             (r[f_id['earliest']], r[f_id['latest']]))
        except Exception:
            raise PH5AvailabilityError(
                "Wrong format result sent to get_json_report.")

        if tspan != []:
            r = result[-1]
            v = {
                "net": self.netcode,
                "sta": r[f_id['sta']],
                "loc": r[f_id['loc']],
                "chan": r[f_id['chan']],
                "tspan": ','.join(tspan)
            }
            if self.SR_included:
                v['sRate'] = r[f_id['sRate']]

            rows.append("{%s}" % arow % v)

        ret = '{%s[\n%s\n]}' % (header, ',\n'.join(rows))

        return ret
Example #6
0
def read_windows_file(f):
    '''   Window start time   Window length
          YYYY:JJJ:HH:MM:SS   SSSS   '''
    w = []
    try:
        fh = open(f)
    except:
        return w

    #tdoy = TimeDoy.TimeDoy ()
    while 1:
        line = fh.readline()
        if not line: break
        line = line.strip()
        if not line or line[0] == '#': continue
        flds = line.split()
        if len(flds) != 2:
            sys.stderr.write("Error in window file: %s\n" % line)
            continue

        ttuple = flds[0].split(':')
        if len(ttuple) != 5:
            sys.stderr.write("Error in window file: %s\n" % flds[0])
            continue

        try:
            tDOY = timedoy.TimeDOY(year=ttuple[0],
                                   month=None,
                                   day=None,
                                   hour=ttuple[2],
                                   minute=ttuple[3],
                                   second=ttuple[4],
                                   microsecond=0,
                                   doy=ttuple[1],
                                   epoch=None)
            #mo, da = tdoy.getMonthDay (int (ttuple[0]), int (ttuple[1]))
            #print flds[0], mo, da
            #start_secs = time.mktime ((int (ttuple[0]),   #   Year
            #mo,                #   Month
            #da,                #   Day
            #int (ttuple[2]),   #   Hour
            #int (ttuple[3]),   #   Minute
            #int (ttuple[4]),   #   Seconds
            #-1,                #   Weekday
            #int (ttuple[1]),   #   Day of year
            #0))                #   DST

            #start_secs = int (start_secs)
            start_secs = tDOY.epoch()
            stop_secs = int(flds[1]) + start_secs
        except Exception, e:
            sys.stderr.write("Error in window file: %s\n" % line)
            sys.stderr.write("%s" % e)
            continue

        w.append([start_secs, stop_secs])
Example #7
0
def window_contained(e):
    '''   Is this event in the data we want to keep?   '''
    global WINDOWS

    #   We want to keep all the data
    if WINDOWS == None:
        return True

    if not e:
        return False

    #tdoy = TimeDoy.TimeDoy ()
    sample_rate = e.sampleRate
    sample_count = e.sampleCount
    tDOY = timedoy.TimeDOY(year=e.year,
                           month=None,
                           day=None,
                           hour=e.hour,
                           minute=e.minute,
                           second=int(e.seconds),
                           microsecond=0,
                           doy=e.doy,
                           epoch=None)
    #mo, da = tdoy.getMonthDay (e.year, e.doy)
    #event_start_epoch = time.mktime ((e.year,
    #mo,
    #da,
    #e.hour,
    #e.minute,
    #int (e.seconds),
    #-1,
    #e.doy,
    #0))

    event_start_epoch = tDOY.epoch()
    event_stop_epoch = int((float(sample_count) / float(sample_rate)) +
                           event_start_epoch)

    for w in WINDOWS:
        window_start_epoch = w[0]
        window_stop_epoch = w[1]

        #   Window start in event KEEP
        if event_start_epoch <= window_start_epoch and event_stop_epoch >= window_start_epoch:
            return True
        #   Entire event in window KEEP
        if event_start_epoch >= window_start_epoch and event_stop_epoch <= window_stop_epoch:
            return True
        #   Event stop in window KEEP
        if event_start_epoch <= window_stop_epoch and event_stop_epoch >= window_stop_epoch:
            return True

    return False
Example #8
0
 def str2tdoy () :
     '''   Convert colon separated string to epoch   '''
     yr, jd, hr, mn, sc, ms = map (int, t.split (":"))
     us = int (ms) * 1000
     tdoy = timedoy.TimeDOY (year=int (yr),
                             hour=int (hr),
                             minute=int (mn),
                             second=int (sc),
                             microsecond=us,
                             doy=(jd))
     
     return tdoy
Example #9
0
 def __init__(self):
     self.min = timedoy.TimeDOY(year=2019,
                                month=01,
                                day=01,
                                hour=0,
                                minute=0,
                                second=0,
                                microsecond=0,
                                doy=None,
                                epoch=None,
                                dtobject=None)
     self.max = timedoy.TimeDOY(year=1970,
                                month=01,
                                day=01,
                                hour=0,
                                minute=0,
                                second=0,
                                microsecond=0,
                                doy=None,
                                epoch=None,
                                dtobject=None)
     self.bar_info = []
     self.shots = []
Example #10
0
    def _passcal(self, passcal_s):
        flds = passcal_s.split(':')
        for i in range(5):
            try:
                flds[i]
            except BaseException:
                flds.append(0)

        tdoy = timedoy.TimeDOY(year=int(flds[0]),
                               hour=int(flds[2]),
                               minute=int(flds[3]),
                               second=int(flds[4]),
                               microsecond=0,
                               doy=int(flds[1]))
        epoch_l = tdoy.epoch()
        self._epoch(epoch_l)
Example #11
0
def read_windows_file(f):
    '''   Window start time   Window length
          YYYY:JJJ:HH:MM:SS   SSSS   '''
    w = []
    try:
        fh = open(f)
    except BaseException:
        return w

    while True:
        line = fh.readline()
        if not line:
            break
        line = line.strip()
        if not line or line[0] == '#':
            continue
        flds = line.split()
        if len(flds) != 2:
            LOGGER.error("Error in window file: {0}".format(line))
            continue

        ttuple = flds[0].split(':')
        if len(ttuple) != 5:
            LOGGER.error("Error in window file: {0}".format(flds[0]))
            continue

        tDOY = timedoy.TimeDOY(year=int(ttuple[0]),
                               month=None,
                               day=None,
                               hour=int(ttuple[2]),
                               minute=int(ttuple[3]),
                               second=int(ttuple[4]),
                               microsecond=0,
                               doy=int(ttuple[1]),
                               epoch=None)
        try:
            start_secs = tDOY.epoch()
            stop_secs = int(flds[1]) + start_secs
        except Exception as e:
            LOGGER.error("Error in window file: {0}\n{1}".format(line, e))
            continue

        w.append([start_secs, stop_secs])

    return w
Example #12
0
def writeEvent(trace, page):
    global EX, EXREC, RESP, SR
    p_das_t = {}
    p_response_t = {}

    if SR is not None:
        if trace.sampleRate != int(SR):
            return

    das_number = str(page.unitID)

    # The gain and bit weight
    p_response_t['gain/value_i'] = trace.gain
    p_response_t['bit_weight/units_s'] = 'volts/count'
    p_response_t['bit_weight/value_d'] = 10.0 / trace.gain / trace.fsd

    n_i = RESP.match(p_response_t['bit_weight/value_d'],
                     p_response_t['gain/value_i'])
    if n_i < 0:
        n_i = RESP.next_i()
        p_response_t['n_i'] = n_i
        EX.ph5_g_responses.populateResponse_t(p_response_t)
        RESP.update()

    # Check to see if group exists for this das, if not build it
    das_g, das_t, receiver_t, time_t = EXREC.ph5_g_receivers.newdas(das_number)
    # Fill in das_t
    p_das_t['raw_file_name_s'] = os.path.basename(F)
    p_das_t['array_name_SOH_a'] = EXREC.ph5_g_receivers.nextarray('SOH_a_')
    p_das_t['response_table_n_i'] = n_i
    p_das_t['channel_number_i'] = trace.channel_number
    p_das_t['event_number_i'] = trace.event
    p_das_t['sample_count_i'] = trace.sampleCount
    p_das_t['sample_rate_i'] = trace.sampleRate
    p_das_t['sample_rate_multiplier_i'] = 1
    p_das_t['stream_number_i'] = trace.stream_number
    tDOY = timedoy.TimeDOY(year=trace.year,
                           month=None,
                           day=None,
                           hour=trace.hour,
                           minute=trace.minute,
                           second=int(trace.seconds),
                           microsecond=0,
                           doy=trace.doy,
                           epoch=None)

    p_das_t['time/epoch_l'] = tDOY.epoch()
    # XXX   need to cross check here   XXX
    p_das_t['time/ascii_s'] = time.asctime(time.gmtime(
        p_das_t['time/epoch_l']))
    p_das_t['time/type_s'] = 'BOTH'
    # XXX   Should this get set????   XXX
    p_das_t['time/micro_seconds_i'] = 0
    # XXX   Need to check if array name exists and generate unique name.   XXX
    p_das_t['array_name_data_a'] = EXREC.ph5_g_receivers.nextarray('Data_a_')
    des = "Epoch: " + str(p_das_t['time/epoch_l']) + \
        " Channel: " + str(trace.channel_number)
    # XXX   This should be changed to handle exceptions   XXX
    EXREC.ph5_g_receivers.populateDas_t(p_das_t)
    # Write out array data (it would be nice if we had int24) we use int32!
    EXREC.ph5_g_receivers.newarray(p_das_t['array_name_data_a'],
                                   trace.trace,
                                   dtype='int32',
                                   description=des)
    update_index_t_info(
        p_das_t['time/epoch_l'] +
        (float(p_das_t['time/micro_seconds_i']) / 1000000.),
        p_das_t['sample_count_i'],
        p_das_t['sample_rate_i'] / p_das_t['sample_rate_multiplier_i'])
Example #13
0
def gather():
    '''   Create event gather   '''
    if not ARGS.stations_to_gather:
        ARGS.stations_to_gather = P5.Array_t[ARGS.station_array]['order']
    if ARGS.all_events:
        ARGS.evt_list = P5.Event_t[ARGS.shot_line]['order']

    for evt in ARGS.evt_list:
        try:
            if not ARGS.start_time:
                event_t = P5.Event_t[ARGS.shot_line]['byid'][evt]
            else:
                event_t = None

            logging.info("Extracting receivers for event {0:s}.".format(evt))
        except Exception as e:
            logging.warn("Warning: The event {0} not found.\n".format(evt))
            continue
        #
        fh = None
        #   Initialize
        sf = segyfactory.Ssegy(None, event_t, utm=ARGS.use_utm)
        #   Allow lenght of traces to be up to 2^16 samples long
        sf.set_break_standard(ARGS.break_standard)
        #   Set external header type
        sf.set_ext_header_type(ARGS.ext_header)
        #   Set event information
        if event_t:
            sf.set_event_t(event_t)
            #   Event time
            event_tdoy = timedoy.TimeDOY(
                microsecond=event_t['time/micro_seconds_i'],
                epoch=event_t['time/epoch_l'])
            Offset_t = P5.read_offsets_shot_order(ARGS.station_array, evt,
                                                  ARGS.shot_line)
            #Offset_t = P5.calc_offsets (ARGS.station_array, evt, ARGS.shot_line)
        else:
            event_tdoy = evt
            Offset_t = None
            logging.warn("Warning: No shot to receiver distances found.")
        if ARGS.seconds_offset_from_shot:
            event_tdoy += ARGS.seconds_offset_from_shot
        end_tdoy = event_tdoy + ARGS.length
        #   Event start time
        start_fepoch = event_tdoy.epoch(fepoch=True)
        #   Trace cut end time
        stop_fepoch = end_tdoy.epoch(fepoch=True)
        #
        #if event_t :
        #Offset_t = P5.read_offsets_shot_order (ARGS.station_array, evt, ARGS.shot_line)
        Array_t = P5.Array_t[ARGS.station_array]['byid']
        #   All channels (components) available for this array
        chans_available = P5.channels_Array_t(ARGS.station_array)
        #   The trace sequence
        i = 0
        skipped_chans = 0
        for sta in ARGS.stations_to_gather:
            logging.info("-=" * 20)
            logging.info(
                "Attempting to find data for station {0}.".format(sta))
            #   Shot to station information
            if Offset_t and Offset_t.has_key(sta):
                offset_t = Offset_t[sta]
                sf.set_offset_t(offset_t)
            #   Array geometry
            if not Array_t.has_key(sta):
                logging.info(
                    "Warning: The station {0} is not in array {1}.".format(
                        sta, ARGS.station_array))
                continue
            array_t = Array_t[sta]
            #   Filter out unwanted channels
            chans = []
            for c in ARGS.channels:
                if c in chans_available:
                    chans.append(c)
            #   Create channel name for output file name
            chan_name = ''
            for c in chans:
                chan_name += "{0}".format(c)
            num_traces = len(chans) * len(ARGS.stations_to_gather)
            #   Loop through channels
            for c in chans:
                if not array_t.has_key(c):
                    logging.warn(
                        "Warning: No channel information for {0} in array {1}."
                        .format(c, ARGS.station_array))
                    skipped_chans += 1
                    continue
                try:
                    #   Filter out unwanted seed loc codes
                    if ARGS.seed_location and array_t[c][0][
                            'seed_location_code_s'] != ARGS.seed_location:
                        logging.info(
                            "Location code mismatch: {0}/{1}/{2}".format(
                                array_t[c][0]['seed_location_code_s'],
                                ARGS.seed_location, c))
                        continue
                    #   Filter out unwanted seed channels
                    seed_channel_code_s = ph5api.seed_channel_code(
                        array_t[c][0])
                    if ARGS.seed_channel and seed_channel_code_s != ARGS.seed_channel:
                        logging.info(
                            "Channel code mismatch: {0}/{1}/{2}".format(
                                array_t[c][0]['seed_channel_code_s'],
                                ARGS.seed_channel, c))
                        continue
                except:
                    pass
                #   Loop for each array_t per id_s and channel
                for t in range(len(array_t[c])):
                    #   DAS
                    das = array_t[c][t]['das/serial_number_s']
                    #   Deploy time
                    start_epoch = array_t[c][t]['deploy_time/epoch_l']
                    #   Pickup time
                    stop_epoch = array_t[c][t]['pickup_time/epoch_l']
                    #   Is this shot within the deploy and pickup times
                    if not ph5api.is_in(start_epoch, stop_epoch,
                                        event_tdoy.epoch(), end_tdoy.epoch()):
                        logging.info(
                            "Data logger {0} not deployed between {1} to {2} at {3}."
                            .format(array_t[c][t]['das/serial_number_s'],
                                    event_tdoy, end_tdoy, sta))
                        if ARGS.deploy_pickup:
                            logging.info("Skipping.")
                            continue
                    #   Read Das table, may already be read so don't reread it
                    #   XXX   Debug only
                    try:
                        das_or_fail = P5.read_das_t(das,
                                                    start_epoch=start_fepoch,
                                                    stop_epoch=stop_fepoch,
                                                    reread=False)
                    except:
                        logging.warn(
                            "Failed to read DAS: {0} between {1} and {2}.".
                            format(das, timedoy.epoch2passcal(start_epoch),
                                   timedoy.epoch2passcal(stop_epoch)))
                        continue

                    if das_or_fail == None:
                        logging.warn(
                            "Failed to read DAS: {0} between {1} and {2}.".
                            format(das, timedoy.epoch2passcal(start_epoch),
                                   timedoy.epoch2passcal(stop_epoch)))
                        continue

                    #   Sample rate
                    if P5.Das_t.has_key(array_t[c][t]['das/serial_number_s']):
                        sr = float(
                            P5.Das_t[array_t[c][t]['das/serial_number_s']]
                            ['rows'][0]['sample_rate_i']) / float(
                                P5.Das_t[array_t[c][t]['das/serial_number_s']]
                                ['rows'][0]['sample_rate_multiplier_i'])
                    else:
                        sr = 0.  #   Oops! No data for this DAS
                    #   Check v4 sample rate from array_t
                    try:
                        if sr != array_t[c][0]['sample_rate_i'] / float(
                                array_t[c][0]['sample_rate_multiplier_i']):
                            continue
                    except:
                        pass
                    ###   Need to check against command line sample rate here
                    if ARGS.sample_rate and ARGS.sample_rate != sr:
                        logging.warn(
                            "Warning: Sample rate for {0} is not {1}. Skipping."
                            .format(das, sr))
                        continue
                    sf.set_length_points(int(
                        (stop_fepoch - start_fepoch) * sr))

                    ###   Need to apply reduction velocity here
                    #   Set cut start and stop times
                    cut_start_fepoch = start_fepoch
                    cut_stop_fepoch = stop_fepoch
                    if ARGS.red_vel > 0.:

                        try:
                            secs, errs = segyfactory.calc_red_vel_secs(
                                offset_t, ARGS.red_vel)
                        except Exception as e:
                            secs = 0.
                            errs = "Can not calculate reduction velocity: {0}.".format(
                                e.message)
                        for e in errs:
                            logging.info(e)
                        cut_start_fepoch += secs
                        cut_stop_fepoch += secs
                    #
                    sf.set_cut_start_epoch(cut_start_fepoch)
                    sf.set_array_t(array_t[c][t])
                    #
                    ###   Cut trace
                    #     Need to pad iff multiple traces
                    traces = P5.cut(das,
                                    cut_start_fepoch,
                                    cut_stop_fepoch,
                                    chan=c,
                                    sample_rate=sr,
                                    apply_time_correction=ARGS.do_time_correct)
                    if len(traces[0].data) == 0:
                        logging.warn(
                            "Warning: No data found for {0} for station {1}.".
                            format(das, sta))
                        continue
                    trace = ph5api.pad_traces(traces)
                    if ARGS.do_time_correct:
                        logging.info(
                            "Applied time drift correction by shifting trace by {0} samples."
                            .format(-1 * sr *
                                    (trace.time_correction_ms / 1000.)))
                        logging.info("Correction is {0} ms.".format(
                            trace.time_correction_ms))
                        logging.info(
                            "Clock drift (seconds/second): {0}".format(
                                trace.clock.slope))
                        for tccomment in trace.clock.comment:
                            tccmt = tccomment.split('\n')
                            for tcc in tccmt:
                                logging.info("Clock comment: {0}".format(tcc))
                    if trace.padding != 0:
                        logging.warn(
                            "Warning: There were {0} samples of padding added to fill gap at middle or end of trace."
                            .format(trace.padding))
                    ##   This may be a command line option later
                    #if True :
                    #if trace.response_t :
                    #try :
                    #tmp_data = trace.data * trace.response_t['bit_weight/value_d']
                    #trace.data = tmp_data
                    #except Exception as e :
                    #logging.warn ("Warning: Failed to apply bit weight. {0}".format (e.message))
                    ###   Need to apply decimation here
                    if ARGS.decimation:
                        #   Decimate
                        shift, data = decimate.decimate(
                            DECIMATION_FACTORS[ARGS.decimation], trace.data)
                        #   Set new sample rate
                        wsr = int(sr / int(ARGS.decimation))
                        sf.set_sample_rate(wsr)
                        trace.sample_rate = wsr
                        #   Set length of trace in samples
                        sf.set_length_points(len(data))
                        sf.length_points_all = len(data)
                        trace.nsamples = len(data)
                        trace.data = data
                    #   Did we read any data?
                    if trace.nsamples == 0:
                        #   Failed to read any data
                        logging.warning(
                            "Warning: No data for data logger {2}/{0} starting at {1}."
                            .format(das, trace.start_time, sta))
                        continue
                    #   Read receiver and response tables
                    receiver_t = trace.receiver_t
                    if receiver_t:
                        sf.set_receiver_t(receiver_t)
                    else:
                        logging.warning(
                            "No sensor orientation found in ph5 file. Contact PIC."
                        )
                    #   Read gain and bit weight

                    if array_t[c][t].has_key('response_table_n_i') and array_t[
                            c][t]['response_table_n_i'] is not -1:
                        response_t = P5.get_response_t_by_n_i(
                            int(array_t[c][t]['response_table_n_i']))
                    else:
                        response_t = trace.response_t

                    if response_t:
                        sf.set_response_t(response_t)
                    else:
                        logging.warning(
                            "No gain or bit weight found in ph5 file. Contact PIC."
                        )
                    #   Increment line sequence
                    i += 1
                    sf.set_line_sequence(i)
                    sf.set_das_t(trace.das_t[0])
                    logging.info("=-" * 20)
                    logging.info("trace: {0}".format(i))
                    logging.info("Extracted: Station ID {0}".format(sta))
                    logging.info("Chan: {2} Start: {0:s}, Stop: {1:s}.".format(
                        event_tdoy, end_tdoy, c))
                    logging.info("Lat: %f Lon: %f Elev: %f %s" %
                                 (array_t[c][t]['location/Y/value_d'],
                                  array_t[c][t]['location/X/value_d'],
                                  array_t[c][t]['location/Z/value_d'],
                                  array_t[c][t]['location/Z/units_s'].strip()))
                    logging.info("{0}".format(array_t[c][t]['description_s']))
                    #
                    ###   Open SEG-Y file
                    #
                    if not fh:
                        if ARGS.write_stdout:
                            try:
                                fh = sys.stdout
                            except Exception as e:
                                logging.error("{0}".format(e.message))
                                logging.error(
                                    "Failed to open STDOUT. Can not continue.")
                                sys.exit(-1)
                        else:
                            #
                            ###   Set up file naming
                            #
                            try:
                                nickname = P5.Experiment_t['rows'][-1][
                                    'nickname_s']
                            except:
                                nickname = "X"
                            #
                            base = "{0}_{1}_{2}_{3}".format(
                                nickname, ARGS.station_array[-3:], evt,
                                chan_name)
                            outfilename = "{1:s}/{0:s}_0001.SGY".format(
                                base, ARGS.out_dir)
                            #   Make sure that the name in unique
                            j = 1
                            while os.path.exists(outfilename):
                                j += 1
                                tmp = outfilename[:-8]
                                outfilename = "{0}{1:04d}.SGY".format(tmp, j)
                            #   Open SEG-Y file
                            try:
                                fh = open(outfilename, 'w+')
                                logging.info("Opened: {0}".format(outfilename))
                            except Exception as e:
                                logging.error(
                                    "Error: Failed to open {0}.\t{1}".format(
                                        outfilename, e.message))
                                sys.stderr.write(
                                    "Error: Failed to open {0}.\t{1}".format(
                                        outfilename, e.message))
                                sys.exit()
                        #   Write reel headers and first trace
                        logs = segyfactory.write_segy_hdr(
                            trace, fh, sf, num_traces)
                        #   Write any messages
                        for l in logs:
                            logging.info(l)
                    else:
                        #   Write trace
                        logs = segyfactory.write_segy(trace, fh, sf)
                        for l in logs:
                            logging.info(l)
            #   chan
        #   Traces found does not match traces expected
        if i != num_traces and fh:
            #   Need to update reel_header
            if (num_traces - skipped_chans) < i:
                logging.warn(
                    "Warning: Wrote {0} of {1} trace/channels listed in {2}.".
                    format(i, num_traces - skipped_chans, ARGS.station_array))
            sf.set_text_header(i)
            fh.seek(0, os.SEEK_SET)
            sf.write_text_header(fh)
            sf.set_reel_header(i)
            fh.seek(3200, os.SEEK_SET)
            sf.write_reel_header(fh)
        ##   Decimation
        #if ARGS.decimation :
        ##   Need to update reel_header
        #sf.set_sample_rate (wsr)
        #sf.set_length_points (trace.nsamples)
        #sf.set_text_header (i)
        #fh.seek (0, os.SEEK_SET)
        #sf.write_text_header (fh)
        #sf.set_reel_header (i)
        #fh.seek (3200, os.SEEK_SET)
        #sf.write_reel_header (fh)
        try:
            fh.close()
        except AttributeError:
            pass
Example #14
0
def gather(args, p5):
    '''   Create receiver gather   '''
    for sta in args.stations_to_gather:
        try:
            # Read the appropriate line from Array_t
            if args.station_array in p5.Array_t:
                array_t = p5.Array_t[args.station_array]['byid'][sta]
            else:
                p5.read_array_t(args.station_array)
                array_t = p5.Array_t[args.station_array]['byid'][sta]
            LOGGER.info("Extracting receiver(s) at station {0:s}.".format(sta))
            LOGGER.info("Found the following components:")
            for c in array_t.keys():
                LOGGER.info("DAS: {0} component: {1}".format(
                    array_t[c][0]['das/serial_number_s'], c))
                LOGGER.info("Lat: {0} Lon: {1} Elev: {2}".format(
                    array_t[c][0]['location/Y/value_d'],
                    array_t[c][0]['location/X/value_d'],
                    array_t[c][0]['location/Z/value_d']))
                LOGGER.info("{0}".format(array_t[c][0]['description_s']))
            # Read the appropriate line from Das_t and get the sample rate
            p5.read_das_t(array_t[c][0]['das/serial_number_s'],
                          array_t[c][0]['deploy_time/epoch_l'],
                          array_t[c][0]['pickup_time/epoch_l'])
            sr = float(p5.Das_t[array_t[c][0]['das/serial_number_s']]['rows']
                       [0]['sample_rate_i']) / float(
                           p5.Das_t[array_t[c][0]['das/serial_number_s']]
                           ['rows'][0]['sample_rate_multiplier_i'])
        except KeyError as e:
            LOGGER.warn(
                "Warning: The station {0} not found in the current array.\n".
                format(sta))
            continue

        i = 0  # Number of traces found
        fh = None  # SEG-Y file
        # Get a mostly empty instance of segyfactory
        sf = segyfactory.Ssegy(None, None, utm=args.use_utm)
        # Set the type of extended header to write
        sf.set_ext_header_type(args.ext_header)
        # Should we allow traces that are 2^16 samples long
        sf.set_break_standard(args.break_standard)
        # Filter out un-wanted channels here
        chans_available = array_t.keys()
        chans = []
        # Put the desired channels in the desired order
        for c in args.channels:
            if c in chans_available:
                chans.append(c)
        # Channel name for output file name
        chan_name = ''
        for c in chans:
            chan_name += "{0}".format(c)

        # Read Event_t_xxx
        Event_t = p5.Event_t[args.shot_line]['byid']
        order = p5.Event_t[args.shot_line]['order']

        # Take a guess at the number of traces in this SEG-Y file based on
        # number of shots
        num_traces = len(order) * len(chans)
        # Try to read offset distances (keyed on shot id's)
        Offset_t = p5.read_offsets_receiver_order(args.station_array, sta,
                                                  args.shot_line)
        # Loop through each shot by shot id
        for o in order:
            # Check event list (and also shot_range), args.evt_list, here!
            if args.evt_list:
                if o not in args.evt_list:
                    continue

            # Appropriate line from Event_t
            event_t = Event_t[o]
            # Need to handle time offset here, args.seconds_offset_from_shot
            event_tdoy = timedoy.TimeDOY(
                microsecond=event_t['time/micro_seconds_i'],
                epoch=event_t['time/epoch_l'])
            # Adjust start time based on offset entered on command line
            if args.seconds_offset_from_shot:
                event_tdoy += args.seconds_offset_from_shot
            end_tdoy = event_tdoy + args.length

            start_fepoch = event_tdoy.epoch(fepoch=True)
            stop_fepoch = end_tdoy.epoch(fepoch=True)
            # Set start time in segyfactory
            sf.set_cut_start_epoch(start_fepoch)
            # Set event
            sf.set_event_t(event_t)
            # Set shot to receiver distance
            sf.set_offset_t(Offset_t[o])
            # Set number of samples in trace, gets reset if decimated
            sf.set_length_points(int((stop_fepoch - start_fepoch) * sr))
            # Loop through each channel (channel_number_i)
            for c in chans:
                if c not in array_t:
                    continue
                # Filter out unwanted seed loc codes
                if args.seed_location and\
                   array_t[c][0]['seed_location_code_s'] != args.seed_location:
                    LOGGER.info("Location code mismatch: {0}/{1}/{2}".format(
                        array_t[c][0]['seed_location_code_s'],
                        args.seed_location, c))
                    continue
                # Filter out unwanted seed channels
                seed_channel_code_s = ph5api.seed_channel_code(array_t[c][0])
                if args.seed_channel and\
                   seed_channel_code_s != args.seed_channel:
                    LOGGER.info("Channel code mismatch: {0}/{1}/{2}".format(
                        array_t[c][0]['seed_channel_code_s'],
                        args.seed_channel, c))
                    continue
                # DAS
                das = array_t[c][0]['das/serial_number_s']
                for t in range(len(array_t[c])):
                    # Deploy time
                    start_epoch = array_t[c][t]['deploy_time/epoch_l']
                    # Pickup time
                    stop_epoch = array_t[c][t]['pickup_time/epoch_l']
                    # Is this shot within the deploy and pickup times
                    if not ph5api.is_in(start_epoch, stop_epoch,
                                        event_tdoy.epoch(), end_tdoy.epoch()):
                        LOGGER.info("Data logger {0} not deployed between\
                        {1} to {2} at {3}.".format(
                            array_t[c][t]['das/serial_number_s'], event_tdoy,
                            end_tdoy, sta))
                        if args.deploy_pickup:
                            LOGGER.info("Skipping.")
                            continue

                    # Need to apply reduction velocity here
                    if args.red_vel > 0.:
                        try:
                            secs, errs = segyfactory.calc_red_vel_secs(
                                Offset_t[o], args.red_vel)
                        except Exception as e:

                            secs = 0.
                            errs = [
                                "Can not calculate "
                                "reduction velocity: {0}.".format(e.message)
                            ]
                        for e in errs:
                            LOGGER.info(e)
                        start_fepoch += secs
                        stop_fepoch += secs
                    # Set array_t in segyfactory
                    sf.set_array_t(array_t[c][t])
                    # Read Das table
                    p5.forget_das_t(das)
                    #
                    # Cut trace
                    #
                    traces = p5.cut(das,
                                    start_fepoch,
                                    stop_fepoch,
                                    chan=c,
                                    sample_rate=sr)
                    trace = ph5api.pad_traces(traces)
                    if args.do_time_correct:
                        LOGGER.info("Applied time drift correction by\
                        shifting trace by {0} samples.".format(
                            -1 * sr * (trace.time_correction_ms / 1000.)))
                        LOGGER.info("Correction is {0} ms.".format(
                            trace.time_correction_ms))
                        LOGGER.info("Clock drift (seconds/second): {0}".format(
                            trace.clock.slope))
                        for tccomment in trace.clock.comment:
                            tccmt = tccomment.split('\n')
                            for tcc in tccmt:
                                LOGGER.info("Clock comment: {0}".format(tcc))
                    if trace.nsamples == 0:
                        LOGGER.info("No data found for DAS "
                                    "{0} between {1} and {2}.".format(
                                        das, event_tdoy.getPasscalTime(),
                                        end_tdoy.getPasscalTime()))
                        continue
                    if trace.padding != 0:
                        LOGGER.warn(
                            "Warning: There were {0} samples of padding\
                            added to fill gap(s) in original traces.".trace.
                            padding)
                    # Need to apply decimation here
                    if args.decimation:
                        # Decimate
                        shift, data = decimate.decimate(
                            DECIMATION_FACTORS[args.decimation], trace.data)
                        # Set new sample rate
                        wsr = int(sr / int(args.decimation))
                        sf.set_sample_rate(wsr)
                        trace.sample_rate = wsr
                        # Set length of trace in samples
                        sf.set_length_points(len(data))
                        trace.nsamples = len(data)

                    if trace.nsamples == 0:
                        # Failed to read any data
                        LOGGER.warning("Warning: No data for data\
                        logger {0} starting at {1}.".format(
                            das, trace.start_time))
                        continue
                    # Read receiver and response tables
                    receiver_t = trace.receiver_t
                    if 'response_table_n_i' in array_t[c][t] and\
                       array_t[c][t]['response_table_n_i'] != -1:
                        response_t = p5.get_response_t_by_n_i(
                            int(array_t[c][t]['response_table_n_i']))
                    else:
                        response_t = p5.Response_t['rows']
                        [trace.das_t[0]['response_table_n_i']]
                    # Set sort_t in segyfactory
                    sf.set_sort_t(
                        p5.get_sort_t(start_fepoch, args.station_array))
                    # Set das_t
                    sf.set_das_t(trace.das_t[0])
                    # Line sequence (trace number)
                    sf.set_line_sequence(i)
                    i += 1
                    if response_t:
                        sf.set_response_t(response_t)
                    else:
                        LOGGER.warning(
                            "No gain or bit weight found in ph5 file.")
                    if receiver_t:
                        sf.set_receiver_t(receiver_t)
                    else:
                        LOGGER.warning(
                            "No sensor orientation found in ph5 file.")
                    # Some informational logging
                    LOGGER.info("trace: {0}".format(i))
                    LOGGER.info("-=" * 20)
                    LOGGER.info("Extracting: Event ID %s" % event_t['id_s'])
                    LOGGER.info("Chan: {2} Start: {0:s}, Stop: {1:s}.".format(
                        event_tdoy, end_tdoy, c))
                    LOGGER.info("Lat: %f Lon: %f Elev:\
                    %f %s" % (event_t['location/Y/value_d'],
                              event_t['location/X/value_d'],
                              event_t['location/Z/value_d'],
                              event_t['location/Z/units_s'].strip()))
                    #
                    # Open SEG-Y file
                    #
                    if not fh:
                        if args.write_stdout:
                            try:
                                fh = sys.stdout
                            except Exception as e:
                                LOGGER.error("{0}".format(e.message))
                                LOGGER.error(
                                    "Failed to open STDOUT. Can not continue.")
                                sys.exit(-1)
                        else:
                            #
                            # Set up file nameing
                            #
                            try:
                                nickname = p5.Experiment_t['rows']
                                [-1]['nickname_s']
                            except BaseException:
                                nickname = "X"
                            #
                            base = "{0}_{1}_{2}_{3}".format(
                                nickname, args.station_array[-3:], sta,
                                chan_name)
                            outfilename = "{1:s}/{0:s}_0001.SGY".format(
                                base, args.out_dir)
                            # Make sure that the name in unique
                            j = 1
                            while os.path.exists(outfilename):
                                j += 1
                                tmp = outfilename[:-8]
                                outfilename = "{0}{1:04d}.SGY".format(tmp, j)
                            # Open SEG-Y file
                            try:
                                fh = open(outfilename, 'w+')
                                LOGGER.info("Opened: {0}".format(outfilename))
                            except Exception as e:
                                LOGGER.error("Failed to open {0}.\t{1}".format(
                                    outfilename, e.message))
                                sys.exit()
                        # Write reel headers and first trace
                        try:
                            logs = segyfactory.write_segy_hdr(
                                trace, fh, sf, num_traces)
                            # Write any messages
                            for l in logs:
                                LOGGER.info(l)
                        except segyfactory.SEGYError as e:
                            LOGGER.error("Header write failure.")
                            sys.exit()
                    else:
                        # Write trace
                        try:
                            logs = segyfactory.write_segy(trace, fh, sf)
                            for l in logs:
                                LOGGER.info(l)
                            LOGGER.info('=-' * 40)
                        except segyfactory.SEGYError as e:
                            LOGGER.error("Trace write failure.")
                            sys.exit()
        # Traces found does not match traces expected
        if fh and i != num_traces:
            # Need to update reel_header
            LOGGER.warn("Wrote {0} of {1} traces listed in {2}.".format(
                i, num_traces, args.station_array))
            sf.set_text_header(i)
            fh.seek(0, os.SEEK_SET)
            sf.write_text_header(fh)
            sf.set_reel_header(i)
            fh.seek(3200, os.SEEK_SET)
            sf.write_reel_header(fh)

        if fh:
            fh.close()
Example #15
0
def main():
    global RESP, INDEX_T, CURRENT_DAS, SIZE_GUESS, F

    get_args()
    initializeExperiment()
    logging.info("grao2ph5 {0}".format(PROG_VERSION))
    logging.info("{0}".format(sys.argv))

    if len(FILES) > 0:
        RESP = Resp(EX.ph5_g_responses)
        rows, keys = EX.ph5_g_receivers.read_index()
        INDEX_T = Rows_Keys(rows, keys)

    for f in FILES:
        F = f
        sys.stdout.write(":<Processing>: %s\n" % (f))
        sys.stdout.flush()
        logging.info("Processing: %s..." % f)

        if f[0] == '#': continue
        if f[:3] == 'WS,':
            flds = f.split(',')
            if len(flds) != 8: continue
            deploy_flds = map(float, flds[5].split(':'))
            pickup_flds = map(float, flds[6].split(':'))
            tdoy0 = timedoy.TimeDOY(year=int(deploy_flds[0]),
                                    hour=int(deploy_flds[2]),
                                    minute=int(deploy_flds[3]),
                                    second=deploy_flds[4],
                                    doy=int(deploy_flds[1]))
            tdoyN = timedoy.TimeDOY(year=int(pickup_flds[0]),
                                    hour=int(pickup_flds[2]),
                                    minute=int(pickup_flds[3]),
                                    second=pickup_flds[4],
                                    doy=int(pickup_flds[1]))
            SIZE_GUESS = (tdoyN.epoch() - tdoy0.epoch()) * LAST_SAMPLE_RATE
            #WS:net_code:station:location:channel:deploy_time:pickup_time:length
            start_time = tdoy0.getFdsnTime()
            while True:
                if timedoy.delta(tdoy0, tdoyN) <= 0: break
                stream = get_ds(flds[1], flds[2], flds[3], flds[4], start_time,
                                int(flds[7]))
                if stream != None:
                    logging.info(
                        "Adding stream for {0}:{3} starting at {1} and ending at {2} to PH5"
                        .format(stream[0].stats.station,
                                stream[0].stats.starttime,
                                stream[0].stats.endtime,
                                stream[0].stats.channel))
                    updatePH5(stream)
                else:
                    logging.info("No data found for {0} at {1}.".format(
                        flds[2], start_time))
                    time.sleep(3)
                e = tdoy0.epoch(fepoch=True) + int(flds[7])
                tdoy0 = timedoy.TimeDOY(epoch=e)
                start_time = tdoy0.getFdsnTime()

        update_external_references()
        sys.stdout.write(":<Finished>: {0}\n".format(f))
        sys.stdout.flush()
Example #16
0
def writeEvent(points, event):
    global EX, EXREC, RESP, SR
    p_das_t = {}
    p_response_t = {}

    if event is None:
        return

    def as_ints(v):
        if v >= 1:
            return int(v), 1

        mult = 10.0
        while mult < 10000:
            r = v * mult
            f, i = math.modf(r)
            if f * 1000.0 < 1.0:
                return int(i), int(mult)

            mult *= 10.0

        return None, None

    for c in range(NUM_CHANNELS):
        if not event[c].unitID:
            continue
        iis = sorted(event[c].trace.keys())
        for ii in iis:
            if SR is not None and event[c].sampleRate is not None:
                if float(event[c].sampleRate) != float(SR):
                    continue

            das_number = event[c].unitID
            if das_number is None or event[c].sampleCount == 0:
                continue

            try:
                if event[c].gain[0] == 'x':
                    # Gain times
                    gain = int(event[c].gain[1:])
                else:
                    # Gain dB
                    gain = int(event[c].gain[:-2])
            except Exception as e:
                LOGGER.warning("Can't determine gain from gain value '{0:s}'. "
                               "Exception: {1:s}".format(event[c].gain, e))
                gain = 0

            # The gain and bit weight
            p_response_t['gain/value_i'] = gain
            try:
                p_response_t['bit_weight/units_s'] = '%s/count' % event[
                    c].bitWeight[-2:]
                p_response_t['bit_weight/value_d'] = float(
                    event[c].bitWeight[:-2])

                n_i = RESP.match(p_response_t['bit_weight/value_d'],
                                 p_response_t['gain/value_i'])
                if n_i < 0:
                    RESP.update()
                    n_i = RESP.next_i()
                    p_response_t['n_i'] = n_i
                    EX.ph5_g_responses.populateResponse_t(p_response_t)
                    RESP.update()
            except Exception as e:
                LOGGER.error("Bit weight undefined. Can't convert '{1:s}'. "
                             "Exception: {0:s}".format(e, event[c].bitWeight))

            # Check to see if group exists for this das, if not build it
            das_g, das_t, receiver_t, time_t = EXREC.ph5_g_receivers.newdas(
                das_number)
            # Fill in das_t
            p_das_t['raw_file_name_s'] = os.path.basename(F)
            p_das_t['array_name_SOH_a'] = EXREC.ph5_g_receivers.nextarray(
                'SOH_a_')
            p_das_t['array_name_log_a'] = EXREC.ph5_g_receivers.nextarray(
                'Log_a_')
            p_das_t['response_table_n_i'] = n_i
            p_das_t['receiver_table_n_i'] = c
            p_das_t['channel_number_i'] = event[c].channel_number + 1
            p_das_t['event_number_i'] = event[c].event
            # force sample rate to 1 sps or greater
            irate, mult = as_ints(float(event[c].sampleRate))
            p_das_t['sample_rate_i'] = irate
            p_das_t['sample_rate_multiplier_i'] = mult
            p_das_t['sample_count_i'] = int(event[c].sampleCount)
            p_das_t['stream_number_i'] = event[c].stream_number + 1
            # Note: We use the time of the first trace. This is because rtleap
            # fix only changes DT packets!
            tDOY = timedoy.TimeDOY(
                year=event[c].trace[ii][0].year,
                month=None,
                day=None,
                hour=event[c].trace[ii][0].hour,
                minute=event[c].trace[ii][0].minute,
                second=int(event[c].trace[ii][0].seconds),
                microsecond=event[c].trace[ii][0].milliseconds * 1000,
                doy=event[c].trace[ii][0].doy,
                epoch=None)
            p_das_t['time/epoch_l'] = tDOY.epoch(fepoch=False)
            # XXX   need to cross check here   XXX
            p_das_t['time/ascii_s'] = time.asctime(
                time.gmtime(p_das_t['time/epoch_l']))
            p_das_t['time/type_s'] = 'BOTH'
            # XXX   Should this get set????   XXX
            p_das_t['time/micro_seconds_i'] = event[c].trace[ii][
                0].milliseconds * 1000
            # XXX   Need to check if array name exists and generate unique
            # name.   XXX
            p_das_t['array_name_data_a'] = EXREC.ph5_g_receivers.nextarray(
                'Data_a_')
            # XXX   Write data   XXX
            t = event[c].trace[ii][0]
            if DEBUG:
                tcount = len(t.trace)
            earray = EXREC.ph5_g_receivers.newarray(
                p_das_t['array_name_data_a'], t.trace, dtype='int32')
            for t in event[c].trace[ii][1:]:
                if DEBUG:
                    tcount += len(t.trace)
                earray.append(t.trace)
            if DEBUG:
                LOGGER.debug(
                    "{0} SR: {1:12.2f}sps Channel: {2} Samples: {3}/{4}".
                    format(tDOY,
                           float(irate) / float(mult),
                           p_das_t['channel_number_i'],
                           p_das_t['sample_count_i'], tcount))
            # XXX   This should be changed to handle exceptions   XXX
            p_das_t['sample_count_i'] = earray.nrows
            EXREC.ph5_g_receivers.populateDas_t(p_das_t)
            if p_das_t['channel_number_i'] == 1:
                update_index_t_info(
                    p_das_t['time/epoch_l'] +
                    (float(p_das_t['time/micro_seconds_i']) / 1000000.),
                    p_das_t['sample_count_i'],
                    float(p_das_t['sample_rate_i']) /
                    float(p_das_t['sample_rate_multiplier_i']))
Example #17
0
import os
import sys
import unittest
import logging
import time
import shutil

import tables
from mock import patch
from testfixtures import LogCapture

from ph5.utilities import fix_srm
from ph5.core import ph5api, timedoy, experiment
from ph5.core.tests.test_base import LogTestCase, TempDirTestCase

tdoy = timedoy.TimeDOY(epoch=time.time())
yeardoy = "{0:04d}{1:03d}".format(tdoy.dtobject.year, tdoy.dtobject.day)


def count_smr_0_1(filename):
    # count 'sample_rate_multiplier_i=0' and 'sample_rate_multiplier_i=0'
    # in filename
    with open(filename, 'r') as file:
        content = file.read()
        smr0_no = content.count('sample_rate_multiplier_i=0')
        smr1_no = content.count('sample_rate_multiplier_i=1')
        return smr0_no, smr1_no


class TestFixSRM(TempDirTestCase, LogTestCase):
    def test_set_logger(self):
Example #18
0
    def process_das():
        '''   Save trace data   '''
        p_response_t = {}
        # Make Data_a and fill in Das_t
        global EXREC, MINIPH5

        EXREC = get_current_data_only(SIZE, Das)
        if EXREC.filename != MINIPH5:
            LOGGER.info("Opened: {0}...\n".format(EXREC.filename))
            MINIPH5 = EXREC.filename

        # This is gain in dB since it is from SEG-Y
        try:
            p_response_t['gain/value_i'] = rh['gainConst']
            p_response_t['gain/units_s'] = 'dB'
            p_response_t['bit_weight/value_d'] = rh['traceWeightingFactor']
            p_response_t['bit_weight/units_s'] = 'Unknown'
            n_i = RESP.match(
                p_response_t['bit_weight/value_d'],
                p_response_t['gain/value_i'])
            if n_i < 0:
                n_i = RESP.next_i()
                p_response_t['n_i'] = n_i
                EX.ph5_g_responses.populateResponse_t(p_response_t)
                RESP.update()
        except Exception as e:
            LOGGER.warn("Bit weight or gain improperly "
                        "defined in SEG-Y file - {0}"
                        .format(e))

        # Check to see if group exists for this das, if not build it
        das_g, das_t, receiver_t, time_t = EXREC.ph5_g_receivers.newdas(Das)
        # Build maps group (XXX)
        EXREC.ph5_g_maps.newdas('Das_g_', Das)
        p_das_t['array_name_log_a'] = EXREC.ph5_g_receivers.nextarray('Log_a_')
        p_das_t['response_table_n_i'] = n_i

        year = rh['year']
        doy = rh['day']
        hour = rh['hour']
        minute = rh['minute']
        seconds = rh['second']
        tdoy = timedoy.TimeDOY(year=year,
                               month=None,
                               day=None,
                               hour=hour,
                               minute=minute,
                               second=seconds,
                               microsecond=0,
                               doy=doy,
                               epoch=None,
                               dtobject=None)
        if SR.ext_hdr_type == 'U':
            # Menlo USGS
            p_das_t['time/micro_seconds_i'] = eh['start_usec']
        elif SR.ext_hdr_type == 'P':
            # PASSCAL
            p_das_t['time/micro_seconds_i'] = int(eh['m_secs'] / 1000.)
        else:
            p_das_t['time/micro_seconds_i'] = 0

        p_das_t['sample_count_i'] = rh['sampleLength']
        sample_rate = (1. / rh['deltaSample']) * 1000000.
        sample_rate, factor = as_ints(sample_rate)
        p_das_t['sample_rate_i'] = int(sample_rate)
        p_das_t['sample_rate_multiplier_i'] = factor

        p_das_t['time/epoch_l'] = tdoy.epoch()
        p_das_t['time/ascii_s'] = time.ctime(p_das_t['time/epoch_l'])
        p_das_t['time/type_s'] = 'BOTH'

        if rh['lineSeq'] == 0:
            rh['lineSeq'] = 1

        chan = 1
        if CHAN3:
            mm = rh['lineSeq'] % 3
            if mm == 0:
                chan = 3
            else:
                chan = mm

        p_das_t['channel_number_i'] = chan
        p_das_t['event_number_i'] = rh['event_number']
        p_das_t['array_name_data_a'] = EXREC.ph5_g_receivers.nextarray(
            'Data_a_')
        EXREC.ph5_g_receivers.populateDas_t(p_das_t)
        des = "Epoch: " + str(p_das_t['time/epoch_l']) + \
              " Channel: " + str(p_das_t['channel_number_i'])
        # Write trace data here
        EXREC.ph5_g_receivers.newarray(
            p_das_t['array_name_data_a'], tr, dtype=DTYPE[SR.trace_fmt],
            description=des)
        update_index_t_info(p_das_t['time/epoch_l'] + (
                    float(p_das_t['time/micro_seconds_i']) / 1000000.),
                            p_das_t['sample_count_i'],
                            p_das_t['sample_rate_i'] / p_das_t[
                                'sample_rate_multiplier_i'])
Example #19
0
def gwriteEvent(points, event):
    '''   Create an event list with all the gaps and overlaps cleansed   '''
    def clone(event):
        # Clone event list but without the traces
        clean_event = []
        for i in range(pn130.NUM_CHANNELS):
            clean_event.append(pn130.Event130())
            clean_event[i].bitWeight = event[i].bitWeight
            clean_event[i].channel_number = event[i].channel_number
            clean_event[i].doy = event[i].doy
            clean_event[i].event = event[i].event
            clean_event[i].gain = event[i].gain
            clean_event[i].hour = event[i].hour
            clean_event[i].last_sample_time = event[i].last_sample_time
            clean_event[i].milliseconds = event[i].milliseconds
            clean_event[i].minute = event[i].minute
            clean_event[i].sampleCount = event[i].sampleCount
            clean_event[i].sampleRate = event[i].sampleRate
            clean_event[i].seconds = event[i].seconds
            clean_event[i].stream_number = event[i].stream_number
            clean_event[i].trace = {}
            clean_event[i].unitID = event[i].unitID
            clean_event[i].year = event[i].year

        return clean_event

    clean_event = clone(event)
    for c in range(NUM_CHANNELS):
        if not event[c].unitID:
            continue
        sample_rate = event[c].sampleRate
        sample_interval = 1. / float(sample_rate)
        tdoy1 = None
        # Prepare new trace structure that allows us to break it up on gaps and
        # overlaps
        i = 0
        clean_event[c].trace[i] = []
        for t in event[c].trace:
            tdoy0 = timedoy.TimeDOY(year=t.year,
                                    month=None,
                                    day=None,
                                    hour=t.hour,
                                    minute=t.minute,
                                    second=int(t.seconds),
                                    microsecond=t.milliseconds * 1000,
                                    doy=t.doy,
                                    epoch=None)
            if tdoy1 is not None:
                # Start of this DT packet
                fepoch0 = tdoy0.epoch(fepoch=True)
                # Calculated start of packet from last DT packet
                fepoch1 = tdoy1.epoch(fepoch=True)
                delta = fepoch1 - fepoch0
                if delta < 0.:
                    i += 1
                    clean_event[c].trace[i] = []
                elif delta > 0.:
                    i += 1
                    clean_event[c].trace[i] = []

            clean_event[c].trace[i].append(t)
            num_samples = len(t.trace)
            secs = float(num_samples) * sample_interval
            tdoy1 = tdoy0 + secs
            event[c].trace = []

    writeEvent(points, clean_event)
Example #20
0
def get_args():
    '''   Read command line argments   '''
    global ARGS, P5
    import argparse

    parser = argparse.ArgumentParser()

    parser.usage = "Version: %s\n" % PROG_VERSION
    parser.usage += "ph5toevt --eventnumber=shot --nickname=experiment_nickname --length=seconds [--path=ph5_directory_path] [options]\n"
    parser.usage += "\toptions:\n\t--array=array, --offset=seconds (float), --reduction_velocity=km-per-second (float) --format=['SEGY']\n\n"
    parser.usage += "ph5toevt --allevents --nickname=experiment_nickname --length=seconds [--path=ph5_directory_path] [options]\n"
    parser.usage += "\toptions:\n\t--array=array, --offset=seconds (float), --reduction_velocity=km-per-second (float) --format=['SEGY']\n\n"
    parser.usage += "ph5toevt --starttime=yyyy:jjj:hh:mm:ss[:.]sss --nickname=experiment_nickname --length=seconds [--path=ph5_directory_path] [options]\n"
    parser.usage += "\toptions:\n\t--stoptime=yyyy:jjj:hh:mm:ss[:.]sss, --array=array, --reduction_velocity=km-per-second (float) --format=['SEGY']\n\n"
    #parser.usage += "ph5toseg --all, --nickname=experiment_nickname [--path=ph5_directory_path] [--das=das_sn] [--station=station_id] [--doy=comma seperated doy list] [options]"
    parser.usage += "\n\n\tgeneral options:\n\t--channel=[1,2,3]\n\t--sample_rate_keep=sample_rate\n\t--notimecorrect\n\t--decimation=[2,4,5,8,10,20]\n\t--out_dir=output_directory"

    parser.description = "Generate SEG-Y gathers in shot order..."
    #   Usually master.ph5
    parser.add_argument("-n",
                        "--nickname",
                        dest="ph5_file_prefix",
                        help="The ph5 file prefix (experiment nickname).",
                        metavar="ph5_file_prefix",
                        required=True)
    #   Path to the directory that holds master.ph5
    parser.add_argument(
        "-p",
        "--path",
        dest="ph5_path",
        help="Path to ph5 files. Defaults to current directory.",
        metavar="ph5_path",
        default='.')
    #   SEED channel
    parser.add_argument("--channel",
                        dest="seed_channel",
                        help="Filter on SEED channel.",
                        metavar="seed_channel")
    #   SEED network code
    parser.add_argument("--network",
                        dest="seed_network",
                        help="Filter on SEED net code.",
                        metavar="seed_network")
    #   SEED loc code
    parser.add_argument("--location",
                        dest="seed_location",
                        help="Filter on SEED loc code.",
                        metavar="seed_location")
    #   Channels. Will extract in order listed here. 'Usually' 1 -> Z, 2-> N, 3 -> E
    parser.add_argument(
        "-c",
        "--channels",
        action="store",
        help="List of comma seperated channels to extract. Default = 1,2,3.",
        type=str,
        dest="channels",
        metavar="channels",
        default='1,2,3')
    #   Extract a single event
    parser.add_argument("-e",
                        "--eventnumber",
                        action="store",
                        dest="event_number",
                        type=int,
                        metavar="event_number")
    #   Event id's in order, comma seperated
    parser.add_argument(
        "--event_list",
        dest="evt_list",
        help=
        "Comma separated list of event id's to gather from defined or selected events.",
        metavar="evt_list")
    #   Extract all events in Event_t
    parser.add_argument("-E",
                        "--allevents",
                        action="store_true",
                        dest="all_events",
                        help="Extract all events in event table.",
                        default=False)
    #   The shot line number, 0 for Event_t
    parser.add_argument("--shot_line",
                        dest="shot_line",
                        action="store",
                        help="The shot line number that holds the shots.",
                        type=int,
                        metavar="shot_line")
    #   External shot line file
    parser.add_argument(
        "--shot_file",
        dest="shot_file",
        action="store",
        help=
        "Input an external kef file that contains event information, Event_t.kef.",
        type=str,
        metavar="shot_file")
    #   Extract data for all stations starting at this time
    parser.add_argument("-s",
                        "--starttime",
                        action="store",
                        dest="start_time",
                        type=str,
                        metavar="start_time")
    #   The array number
    parser.add_argument("-A",
                        "--station_array",
                        dest="station_array",
                        action="store",
                        help="The array number that holds the station(s).",
                        type=int,
                        metavar="station_array",
                        required=True)
    #   Length of traces to put in gather
    parser.add_argument("-l",
                        "--length",
                        action="store",
                        required=True,
                        type=int,
                        dest="length",
                        metavar="length")
    #   Start trace at time offset from shot time
    parser.add_argument(
        "-O",
        "--seconds_offset_from_shot",
        "--offset",
        metavar="seconds_offset_from_shot",
        help="Time in seconds from shot time to start the trace.",
        type=float,
        default=0.)
    #   Do not time correct texan data
    parser.add_argument("-N",
                        "--notimecorrect",
                        action="store_false",
                        default=True,
                        dest="do_time_correct")
    #   Output directory
    parser.add_argument("-o",
                        "--out_dir",
                        action="store",
                        dest="out_dir",
                        metavar="out_dir",
                        type=str,
                        default=".")
    #   Write to stdout
    parser.add_argument("--stream",
                        action="store_true",
                        dest="write_stdout",
                        help="Write to stdout instead of a file.",
                        default=False)
    #   Use deploy and pickup times to determine where an instrument was deployed
    parser.add_argument(
        "--use_deploy_pickup",
        action="store_true",
        default=False,
        help=
        "Use deploy and pickup times to determine if data exists for a station.",
        dest="deploy_pickup")
    #   Stations to gather, comma seperated
    parser.add_argument(
        "-S",
        "--stations",
        "--station_list",
        dest="stations_to_gather",
        help="Comma separated list of stations to receiver gather.",
        metavar="stations_to_gather",
        required=False)
    #   Filter out all sample rates except the listed
    parser.add_argument("-r",
                        "--sample_rate_keep",
                        action="store",
                        dest="sample_rate",
                        metavar="sample_rate",
                        type=float)
    #   Apply a reduction velocity, km
    parser.add_argument("-V",
                        "--reduction_velocity",
                        action="store",
                        dest="red_vel",
                        help="Reduction velocity in km/sec.",
                        metavar="red_vel",
                        type=float,
                        default="-1.")
    #   Decimate data. Decimation factor
    parser.add_argument("-d",
                        "--decimation",
                        action="store",
                        choices=["2", "4", "5", "8", "10", "20"],
                        dest="decimation",
                        metavar="decimation")
    #   Convert geographic coordinated in ph5 to UTM before creating gather
    parser.add_argument("-U",
                        "--UTM",
                        action="store_true",
                        dest="use_utm",
                        help="Fill SEG-Y headers with UTM instead of lat/lon.",
                        default=False)
    #   How to fill in the extended trace header
    parser.add_argument("-x",
                        "--extended_header",
                        action="store",
                        dest="ext_header",
                        help="Extended trace header style: \
                        'P' -> PASSCAL, \
                        'S' -> SEG, \
                        'U' -> Menlo USGS, \
                        default = U",
                        choices=["P", "S", "U", "I", "N"],
                        default="U",
                        metavar="extended_header_style")
    #   Ignore channel in Das_t. Only useful with texans
    parser.add_argument("--ic",
                        action="store_true",
                        dest="ignore_channel",
                        default=False)
    #   Allow traces to be 2^16 samples long vs 2^15
    parser.add_argument("--break_standard",
                        action="store_false",
                        dest="break_standard",
                        help="Force traces to be no longer than 2^15 samples.",
                        default=True)
    parser.add_argument("--debug",
                        dest="debug",
                        action="store_true",
                        default=False)

    ARGS = parser.parse_args()
    #print ARGS
    try:
        P5 = ph5api.PH5(path=ARGS.ph5_path, nickname=ARGS.ph5_file_prefix)
    except Exception as e:
        sys.stderr.write("Error: Can't open {0} at {1}.".format(
            ARGS.ph5_file_prefix, ARGS.ph5_path))
        sys.exit(-1)
    #
    if ARGS.shot_file:
        if not ARGS.shot_line:
            sys.stderr.write(
                "Error: Shot line switch, --shot_line, required when using external shot file."
            )
            sys.exit(-3)
        external = external_file.External(ARGS.shot_file)
        ARGS.shot_file = external.Event_t
        P5.Event_t_names = ARGS.shot_file.keys()
    else:
        P5.read_event_t_names()
    #
    if ARGS.event_number:
        ARGS.evt_list = list([str(ARGS.event_number)])
    elif ARGS.evt_list:
        ARGS.evt_list = map(str, ARGS.evt_list.split(','))
    elif ARGS.start_time:
        ARGS.start_time = timedoy.TimeDOY(
            epoch=timedoy.passcal2epoch(ARGS.start_time, fepoch=True))
        ARGS.evt_list = [ARGS.start_time]
    #
    if not ARGS.evt_list and not ARGS.all_events:
        sys.stderr.write(
            "Error: Required argument missing. event_number|evt_list|all_events.\n"
        )
        sys.exit(-1)
    #   Event or shot line
    if ARGS.shot_line != None:
        if ARGS.shot_line == 0:
            ARGS.shot_line = "Event_t"
        else:
            ARGS.shot_line = "Event_t_{0:03d}".format(ARGS.shot_line)
        #
    elif not ARGS.start_time:
        sys.stderr.write("Error: Shot line or start time required.")
        sys.exit(-2)
    #   Array or station line
    ARGS.station_array = "Array_t_{0:03d}".format(ARGS.station_array)
    #   Order of channels in gather
    ARGS.channels = map(int, ARGS.channels.split(','))
    #   Stations to gather
    if ARGS.stations_to_gather:
        ARGS.stations_to_gather = map(int, ARGS.stations_to_gather.split(','))
        ARGS.stations_to_gather.sort()
        ARGS.stations_to_gather = map(str, ARGS.stations_to_gather)

    if not os.path.exists(ARGS.out_dir):
        os.mkdir(ARGS.out_dir)
        os.chmod(ARGS.out_dir, 0777)
Example #21
0
def build_recv(order, line, n):
    ''' order => the keys we have used from FIELD
        line  => the fields of the line from the field
        n     => the line number from original file
    '''
    global RECVSTN
    vals = {
        'ID': '',
        'Station': '',
        'Line': '999',
        'Type': '',
        'Channel': '1',
        'Sensor': '',
        'Uphole': '',
        'Lat': '',
        'Y': '',
        'Lon': '',
        'X': '',
        'Elev': '',
        'DT': '',
        'DTime': '',
        'PUTime': '',
        'Shots': '',
        'Comment': '',
        'LED': '',
        'DorP': '',
        'n': ''
    }

    # Shot info in this file
    if 'Shot-ID' in order:
        return None

    if 'Receiver-ID' not in order:
        LOGGER.error("Receiver-ID needed to create dep file.\n")
        return None

    DTime = ''
    PTime = ''
    # Get deploy and pickup time
    if has_time(order):
        try:
            if 'DTimeY:J:H:M:S' not in order and is_deploy(order, line):
                yr = int(line[order['TimeYear']])
                if 'TimeH:M' in order:
                    hr, mn = map(int, line[order['TimeH:M']].split(':'))

                if 'TimeMo/Da' in order:
                    mo, da = map(int, line[order['TimeMo/Da']].split('/'))
                    tdoy = timedoy.TimeDOY(year=yr,
                                           month=mo,
                                           day=da,
                                           hour=hr,
                                           minute=mn,
                                           second=0,
                                           microsecond=0,
                                           doy=None,
                                           epoch=None,
                                           dtobject=None)
                    doy = tdoy.doy()

                sc = 0.0

                DTime = "{0:4d}:{1:03d}:{2:02d}:{3:02d}:{4:06.3f}".format(
                    yr, doy, hr, mn, sc)
            else:
                try:
                    DTime = line[order['DTimeY:J:H:M:S']]
                except BaseException:
                    DTime = None

            if 'PTimeY:J:H:M:S' not in order and not is_deploy(order, line):
                yr = int(line[order['TimeYear']])
                if 'TimeH:M' in order:
                    hr, mn = map(int, line[order['TimeH:M']].split(':'))

                if 'TimeMo/Da' in order:
                    mo, da = map(int, line[order['TimeMo/Da']].split('/'))
                    tdoy = timedoy.TimeDOY(year=yr,
                                           month=mo,
                                           day=da,
                                           hour=hr,
                                           minute=mn,
                                           second=0,
                                           microsecond=0,
                                           doy=None,
                                           epoch=None,
                                           dtobject=None)
                    doy = tdoy.doy()

                sc = 0.0

                PTime = "{0:4d}:{1:03d}:{2:02d}:{3:02d}:{4:06.3f}".format(
                    yr, doy, hr, mn, sc)
            else:
                try:
                    PTime = line[order['PTimeY:J:H:M:S']]
                except BaseException:
                    PTime = None
        except Exception as e:
            LOGGER.error("{1}:\n\tCan't convert time {0}\n".format(line, e))
            return

    keys = order.keys()
    # Look through rest of columns
    for k in keys:
        try:
            if k == 'Receiver-ID':
                try:
                    vals['ID'] = int(line[order[k]])
                    if USE_FACE_PLATE_SN and vals['ID'] < 10000:
                        vals['ID'] += 10000

                    vals['ID'] = str(vals['ID'])
                except BaseException:
                    vals['ID'] = line[order[k]]
            elif k == 'Station':
                try:
                    vals['Station'] = str(int(line[order[k]]))
                except BaseException:
                    vals['Station'] = line[order[k]]
                if vals['Station'] == '100':
                    pass
            elif k == 'Line':
                vals['Line'] = line[order[k]]
            elif k == 'Type':
                vals['Type'] = line[order[k]]
            elif k == 'Channel':
                vals['Channel'] = line[order[k]]
                try:
                    int(vals['Channel'])
                except ValueError:
                    vals['Channel'] = '1'
            elif k == 'Sensor':
                vals['Sensor'] = line[order[k]]
            elif k == ['Uphole']:
                vals['Uphole'] = line[order[k]]
            elif k == 'Lat':
                if DEP:
                    vals['Lat'] = _sign(line[order[k]], 'lat')
                else:
                    vals['Lat'] = __sign(line[order[k]], 'lat')
            elif k == 'Y':
                vals['Y'] = line[order[k]]
            elif k == 'Lon':
                if DEP:
                    vals['Lon'] = _sign(line[order[k]], 'lon')
                else:
                    vals['Lon'] = __sign(line[order[k]], 'lon')
            elif k == 'X':
                vals['X'] = line[order[k]]
            elif k == 'Elev':
                vals['Elev'] = line[order[k]]
            elif k == 'Team':
                vals['DT'] = line[order[k]]
            elif k == 'Shots':
                vals['Shots'] = line[order[k]]
            elif k == 'Comment':
                vals['Comment'] = line[order[k]]
            elif k == 'LED':
                vals['LED'] = line[order[k]]
            if k == 'DorP':
                vals['DorP'] = line[order[k]]
        except IndexError:
            pass

    vals['n'] = n
    vals['DTime'] = DTime
    vals['PUTime'] = PTime

    if int(vals['Line']) not in RECVSTN:
        RECVSTN[int(vals['Line'])] = {}

    tmpkey = vals['Station']
    if int(vals['Station']) not in RECVSTN[int(vals['Line'])]:
        RECVSTN[int(vals['Line'])][int(vals['Station'])] = {}

    RECVSTN[int(vals['Line'])][int(vals['Station'])][int(
        vals['Channel'])] = False

    i = 0
    while tmpkey in RECVQC:
        tmpkey = tmpkey.split(':')[0] + ":{0}".format(i)
        i += 1
    rkey = "{0}:{1}".format(vals['Station'], vals['Channel'])
    if rkey not in RECVKEY:
        RECVKEY[rkey] = []

    RECVKEY[rkey].append(tmpkey)
    RECVQC[tmpkey] = vals
Example #22
0
    def process_event():
        # Process channel 1 (Z)
        if rh['lineSeq'] != 1:
            return

        # Have we already seen this event?
        if rh['event_number'] in EVENT_T:
            return

        p_event_t = {}

        p_event_t['id_s'] = rh['event_number']
        year = rh['year']
        doy = rh['day']
        hour = rh['hour']
        minute = rh['minute']
        seconds = rh['second']
        delay_time_secs = rh['delay'] / 1000.
        if SR.ext_hdr_type == 'U':
            # Menlo USGS
            year = eh['shot_year']
            doy = eh['shot_doy']
            hour = eh['shot_hour']
            minute = eh['shot_minute']
            seconds = eh['shot_second']
            p_event_t['time/micro_seconds_i'] = eh['shot_us']
            delay_time_secs = 0.0
        elif SR.ext_hdr_type == 'P':
            # PASSCAL
            year = eh['trigyear']
            doy = eh['trigday']
            hour = eh['trighour']
            minute = eh['trigminute']
            seconds = eh['trigsecond']
            p_event_t['time/micro_seconds_i'] = int(eh['trigmills'] / 1000.)
            delay_time_secs = 0.0
        else:
            p_event_t['time/micro_seconds_i'] = 0

        tdoy = timedoy.TimeDOY(year=year,
                               month=None,
                               day=None,
                               hour=hour,
                               minute=minute,
                               second=seconds,
                               microsecond=0,
                               doy=doy,
                               epoch=None,
                               dtobject=None)

        tmp_epoch = tdoy.epoch() + delay_time_secs
        f, i = modf(tmp_epoch)
        p_event_t['time/epoch_l'] = int(i)
        p_event_t['time/micro_seconds_i'] = int(f / 1000000.)
        p_event_t['time/ascii_s'] = time.ctime(p_event_t['time/epoch_l'])
        p_event_t['time/type_s'] = 'BOTH'

        if SR.ext_hdr_type == 'S':
            # SEG
            if eh['Spn'] != 0:
                p_event_t['id_s'] = eh['Spn']
        elif SR.ext_hdr_type == 'I':
            # iNova
            if eh['ShotID'] != 0:
                p_event_t['id_s'] = eh['ShotID']
        else:
            # As used by PIC
            if rh['energySourcePt'] != 0:
                p_event_t['id_s'] = rh['energySourcePt']

        coordScale = rh['coordScale']
        if coordScale < 0:
            coordScale = -1. / coordScale

        if rh['coordUnits'] == 1:
            units = MFEET[bh['mfeet']]
        else:
            units = CUNITS[rh['coordUnits']]

        elevationScale = rh['elevationScale']
        if elevationScale < 0:
            elevationScale = -1. / elevationScale

        p_event_t['location/X/value_d'] = rh['sourceLongOrX'] * coordScale
        p_event_t['location/X/units_s'] = units

        p_event_t['location/Y/value_d'] = rh['sourceLatOrY'] * coordScale
        p_event_t['location/Y/units_s'] = units

        p_event_t['location/Z/value_d'] =\
            rh['sourceSurfaceElevation'] * elevationScale
        p_event_t['location/Z/units_s'] = MFEET[bh['mfeet']]

        p_event_t['depth/value_d'] = rh['sourceDepth'] * elevationScale
        p_event_t['depth/units_s'] = MFEET[bh['mfeet']]

        if p_event_t['id_s'] not in EVENT_T:
            EVENT_T[p_event_t['id_s']] = []

        EVENT_T[p_event_t['id_s']].append(p_event_t)
Example #23
0
def build_shot(order, line, n):
    vals = {
        'ID': '',
        'Station': '',
        'Line': '999',
        'Channel': '1',
        'Lat': '',
        'Y': '',
        'Lon': '',
        'X': '',
        'Elev': '',
        'Time': '',
        'Pre': '',
        'Post': '',
        'SR': '',
        'Depth': '',
        'Size': '',
        'RVel': '',
        'Radius': '',
        'Comment': ''
    }

    if 'Receiver-ID' in order:
        return None

    if 'Shot-ID' not in order:
        # XXX   Need a error dialog here   XXX
        LOGGER.error("Shot-ID needed to create dep file.\n")
        return None

    try:
        if 'STimeY:J:H:M:S.s' not in order:
            yr = int(line[order['STimeYear']])
            if 'STimeMo' in order:
                mo = int(line[order['STimeMo']])
                da = int(line[order['STimeDa']])
                tdoy = timedoy.TimeDOY(year=yr,
                                       month=mo,
                                       day=da,
                                       hour=0,
                                       minute=0,
                                       second=0,
                                       microsecond=0,
                                       doy=None,
                                       epoch=None,
                                       dtobject=None)
                doy = tdoy.doy()
            else:
                doy = int(line[order['STimeJd']])

            hr = int(line[order['STimeHr']])
            mn = int(line[order['STimeMn']])
            if 'STimeSc' in order:
                sc = float(line[order['STimeSc']])
            else:
                sc = 0.0

            if 'STimeMs' in order:
                sc += float(line[order['STimeMs']]) / 1000.

            STime = "{0:4d}:{1:03d}:{2:02d}:{3:02d}:{4:06.3f}".format(
                yr, doy, hr, mn, sc)
        else:
            STime = line[order['STimeY:J:H:M:S.s']]
    except Exception as e:
        LOGGER.error("{1}:\n\tCan't convert time {0}\n".format(line, e))
        return

    keys = order.keys()
    for k in keys:
        try:
            if k == 'Shot-ID':
                try:
                    vals['ID'] = str(int(line[order[k]]))
                except BaseException:
                    vals['ID'] = line[order[k]]
            elif k == 'Station':
                try:
                    vals['Station'] = str(int(line[order[k]]))
                except BaseException:
                    vals['Station'] = line[order[k]]
            elif k == 'Line':
                vals['Line'] = line[order[k]]
            elif k == 'Channel':
                vals['Channel'] = line[order[k]]
            elif k == 'Lat':
                if DEP:
                    vals['Lat'] = _sign(line[order[k]], 'lat')
                else:
                    vals['Lat'] = __sign(line[order[k]], 'lat')
            elif k == 'Y':
                vals['Y'] = line[order[k]]
            elif k == 'Lon':
                if DEP:
                    vals['Lon'] = _sign(line[order[k]], 'lon')
                else:
                    vals['Lon'] = __sign(line[order[k]], 'lon')
            elif k == 'X':
                vals['X'] = line[order[k]]
            elif k == 'Elev':
                vals['Elev'] = line[order[k]]
            elif k == 'PreSec':
                vals['Pre'] = line[order[k]]
            elif k == 'PostSec':
                vals['Post'] = line[order[k]]
            elif k == 'SR':
                vals['SR'] = line[order[k]]
            elif k == 'Depth':
                vals['Depth'] = line[order[k]]
            elif k == 'Size':
                vals['Size'] = line[order[k]]
            elif k == 'RVel':
                vals['RVel'] = line[order[k]]
            elif k == 'Radius':
                vals['Radius'] = line[order[k]]
            elif k == 'Comment':
                vals['Comment'] = line[order[k]]
        except IndexError:
            pass

    vals['Time'] = STime
    tmpkey = vals['Station']
    i = 0
    while tmpkey in SHOTQC:
        tmpkey = tmpkey.split(':')[0] + ":{0}".format(i)
        i += 1

    SHOTQC[tmpkey] = [
        vals['ID'], vals['Station'], vals['Line'], vals['Lat'], vals['Lon'],
        vals['Elev'], vals['Time'], vals['Pre'], vals['Post'], vals['SR'],
        vals['Depth'], vals['Size'], vals['RVel'], vals['Radius'],
        vals['Comment']
    ]

    if DEP:
        return "SHOT;{0};{1};{2};{3};{4};{5};{6};{7};{8};{9};{10};{11};{12};" \
               "{13};{14}"\
            .format(vals['ID'],
                    vals['Station'],
                    vals['Line'],
                    vals['Lat'],
                    vals['Lon'],
                    vals['Elev'],
                    vals['Time'],
                    vals['Pre'],
                    vals['Post'],
                    vals['SR'],
                    vals['Depth'],
                    vals['Size'],
                    vals['RVel'],
                    vals['Radius'],
                    vals['Comment'])
    else:
        return get_event_row(vals)
Example #24
0
def get_event_row(vals):
    pass
    #
    # Get epoch and us
    if vals['X'] and vals['Y']:
        X = vals['X']
        Y = vals['Y']
        units = 'meters'
        coordinate_system = 'arbritrary'
        ellipsoid = 'unknown'
    else:
        X = vals['Lon']
        Y = vals['Lat']
        units = 'degrees'
        coordinate_system = 'geodetic'
        ellipsoid = 'WGS84'

    yr, doy, hr, mn, sc = vals['Time'].split(':')
    yr, doy, hr, mn = map(int, [yr, doy, hr, mn])
    tdoy = timedoy.TimeDOY(year=yr,
                           month=None,
                           day=None,
                           hour=hr,
                           minute=mn,
                           second=float(sc),
                           microsecond=0,
                           doy=doy,
                           epoch=None,
                           dtobject=None)

    epoch = tdoy.epoch()
    us = tdoy.dtobject.microsecond

    event_t = '/Experiment_g/Sorts_g/Event_t\n'
    # id_s, description_s
    event_t += "\tid_s = {0}\n\tdescription_s = {1}\n".format(
        vals['ID'], vals['Comment'])
    # time/ascii_s, time/epoch_l, time/micro_seconds_i, time/type_s
    event_t += "\ttime/ascii_s = {0}\n\ttime/epoch_l = " \
               "{1}\n\ttime/micro_seconds_i = {2}\n\ttime/type_s = {3}\n" \
        .format(time.ctime(epoch),
                int(epoch),
                us,
                'BOTH')
    # location/X/value_d, location/X/units_s, location/Y/value_d,
    # location/Y/units_s, location/Z/value_d, location/Z/units_s
    event_t += "\tlocation/X/value_d = {0}\n\tlocation/X/units_s =" \
               " {1}\n\tlocation/Y/value_d = {2}\n\tlocation/Y/units_s = " \
               "{3}\n\tlocation/Z/value_d = {4}\n\tlocation/Z/units_s = {5}\n"\
        .format(X,
                units,
                Y,
                units,
                vals['Elev'],
                'meters')
    # location/coordinate_system_s, location/projection_s,
    # location/ellipsoid_s, location/description_s
    event_t += "\tlocation/coordinate_system_s =" \
               "{0}\n\tlocation/projection_s = {1}\n\tlocation/ellipsoid_s =" \
               " {2}\n\tlocation/description_s = {3}\n"\
        .format(coordinate_system,
                'none',
                ellipsoid,
                vals['ID'])
    # size/value_d, size/units_s, depth/value_d, depth/units_s
    event_t += "\tsize/value_d = {0}\n\tsize/units_s = {1}" \
               "\n\tdepth/value_d = {2}\n\tdepth/units_s = {3}"\
        .format(vals['Size'],
                'lbs',
                vals['Depth'],
                'meters')
    return event_t
Example #25
0
    def get_recv_row(vals):
        # Build an Array_t_xxx kef file
        global RECVSTN
        vals_dep, vals_pu, msg = stripdeppu(vals)
        if vals_dep['X'] and vals_dep['Y']:
            X = vals_dep['X']
            Y = vals_dep['Y']
            units = 'meters'
            coordinate_system = 'arbritrary'
            ellipsoid = 'unknown'
        else:
            X = vals_dep['Lon']
            Y = vals_dep['Lat']
            units = 'degrees'
            coordinate_system = 'geodetic'
            ellipsoid = 'WGS84'

        # Get deploy time epoch and us
        dyr, ddoy, dhr, dmn, dsc = vals_dep['DTime'].split(':')
        dyr, ddoy, dhr, dmn = map(int, [dyr, ddoy, dhr, dmn])
        dtdoy = timedoy.TimeDOY(year=dyr,
                                month=None,
                                day=None,
                                hour=dhr,
                                minute=dmn,
                                second=float(dsc),
                                microsecond=0,
                                doy=ddoy,
                                epoch=None,
                                dtobject=None)
        depoch = dtdoy.epoch()
        dus = dtdoy.millisecond()
        # Get pickup time epoch and us
        pyr, pdoy, phr, pmn, psc = vals_pu['PUTime'].split(':')
        pyr, pdoy, phr, pmn = map(int, [pyr, pdoy, phr, pmn])
        ptdoy = timedoy.TimeDOY(year=pyr,
                                month=None,
                                day=None,
                                hour=phr,
                                minute=pmn,
                                second=float(psc),
                                microsecond=0,
                                doy=pdoy,
                                epoch=None,
                                dtobject=None)
        pepoch = ptdoy.epoch()
        pus = ptdoy.millisecond()
        arrayID = int(vals_dep['Line'])
        stationID = int(vals_dep['Station'])
        chan = int(vals_dep['Channel'])
        comment = vals_dep['Comment'] + " " + vals_pu['Comment']

        array_t = '/Experiment_g/Sorts_g/Array_t_{0:03d}\n'.format(arrayID)
        array_t += '\tid_s = {0}\n\tdescription_s = {1}\n'.format(
            vals_dep['Station'], comment)
        # DAS information
        array_t += '\tdas/serial_number_s = {0}\n\tdas/model_s = {1}' \
                   '\n\tdas/manufacturer_s = {2}\n\tdas/notes_s = {3}\n' \
            .format(vals_dep['ID'],
                    vals_dep['Type'],
                    'RefTek',
                    vals_dep['LED'])

        # Deployment time
        array_t += '\tdeploy_time/ascii_s = {0}\n\tdeploy_time/epoch_l = {1}' \
                   '\n\tdeploy_time/micro_seconds_i = {2}' \
                   '\n\tdeploy_time/type_s = {3}\n'\
            .format(time.ctime(int(depoch)),
                    int(depoch),
                    int(dus),
                    'BOTH')
        # Pickup time
        array_t += '\tpickup_time/ascii_s = {0}' \
                   '\n\tpickup_time/epoch_l = {1}' \
                   '\n\tpickup_time/micro_seconds_i = {2}' \
                   '\n\tpickup_time/type_s = {3}\n'\
            .format(time.ctime(int(pepoch)),
                    int(pepoch),
                    int(pus),
                    'BOTH')
        # Longitude and Latitude
        array_t += '\tlocation/X/value_d = {0}' \
                   '\n\tlocation/X/units_s = {1}' \
                   '\n\tlocation/Y/value_d = {2}' \
                   '\n\tlocation/Y/units_s = {3}\n'\
            .format(X,
                    units,
                    Y,
                    units)
        # Elevation
        array_t += '\tlocation/Z/value_d = {0}' \
                   '\n\tlocation/Z/units_s = {1}' \
                   '\n\tlocation/coordinate_system_s = {2}' \
                   '\n\tlocation/projection_s = {3}' \
                   '\n\tlocation/description_s = {4}\n'\
            .format(vals_dep['Elev'],
                    'meters',
                    coordinate_system,
                    'none',
                    ellipsoid,
                    '')
        # Sensor information
        array_t += '\tsensor/serial_number_s = {0}' \
                   '\n\tsensor/model_s = {1}' \
                   '\n\tsensor/manufacturer_s = {2}' \
                   '\n\tsensor/notes_s = {3}' \
                   '\n\tchannel_number_i = {4}'\
            .format(vals_dep['Sensor'],
                    '',
                    '',
                    '',
                    vals_dep['Channel'])

        ret.append(array_t)
        RECVSTN[arrayID][stationID][chan] = array_t
Example #26
0
    def process_array():
        '''   Save station meta-data   '''
        global FIRST_TIME, LAST_TIME
        p_array_t = {}
        p_array_t['id_s'] = str(int(rh['channel_number']) & 0x7FFF)
        p_array_t['das/serial_number_s'] = Das
        p_array_t['channel_number_i'] = p_das_t['channel_number_i']
        coordScale = rh['coordScale']
        if coordScale < 0:
            coordScale = -1. / coordScale

        if rh['coordUnits'] == 1:
            units = MFEET[bh['mfeet']]
        else:
            units = CUNITS[rh['coordUnits']]

        p_array_t['location/X/value_d'] = rh['recLongOrX'] * coordScale
        p_array_t['location/X/units_s'] = units
        p_array_t['location/Y/value_d'] = rh['recLatOrY'] * coordScale
        p_array_t['location/Y/units_s'] = units
        elevationScale = rh['elevationScale']
        if elevationScale < 0:
            elevationScale = -1. / elevationScale

        p_array_t['location/Z/value_d'] = rh['datumElevRec'] * elevationScale
        p_array_t['location/Z/units_s'] = MFEET[bh['mfeet']]

        year = rh['year']
        doy = rh['day']
        hour = rh['hour']
        minute = rh['minute']
        seconds = rh['second']
        tdoy = timedoy.TimeDOY(year=year,
                               month=None,
                               day=None,
                               hour=hour,
                               minute=minute,
                               second=seconds,
                               microsecond=0,
                               doy=doy,
                               epoch=None,
                               dtobject=None)
        if SR.ext_hdr_type == 'U':
            # Menlo USGS
            p_array_t['deploy_time/micro_seconds_i'] = eh['start_usec']
            p_array_t['pickup_time/micro_seconds_i'] = eh['start_usec']
        elif SR.ext_hdr_type == 'P':
            # PASSCAL
            p_array_t['deploy_time/micro_seconds_i'] = int(
                eh['m_secs'] / 1000.)
            p_array_t['pickup_time/micro_seconds_i'] = int(
                eh['m_secs'] / 1000.)
        else:
            p_array_t['deploy_time/micro_seconds_i'] = 0
            p_array_t['pickup_time/micro_seconds_i'] = 0

        samples = rh['sampleLength']
        sample_rate = (1. / rh['deltaSample']) * 1000000.
        sample_rate, factor = as_ints(sample_rate)
        sample_rate = sample_rate / factor

        p_array_t['deploy_time/epoch_l'] = tdoy.epoch()
        p_array_t['deploy_time/ascii_s'] = time.ctime(
            p_array_t['deploy_time/epoch_l'])
        p_array_t['deploy_time/type_s'] = 'BOTH'
        if p_array_t['deploy_time/epoch_l'] < FIRST_TIME:
            FIRST_TIME = p_array_t['deploy_time/epoch_l'] + \
                         (p_array_t['deploy_time/micro_seconds_i'] / 1000000.)

        seconds = int(modf(samples / sample_rate)
                      [1]) + p_array_t['deploy_time/epoch_l']
        usec = int(modf(samples / sample_rate)[0] * 1000000.)
        p_array_t['pickup_time/micro_seconds_i'] += usec
        if p_array_t['pickup_time/micro_seconds_i'] > 1000000:
            x = p_array_t['pickup_time/micro_seconds_i'] / 1000000.
            seconds += int(modf(x)[1])
            p_array_t['pickup_time/micro_seconds_i'] = int(
                modf(x)[0] * 1000000.)

        p_array_t['pickup_time/epoch_l'] = seconds
        p_array_t['pickup_time/ascii_s'] = time.ctime(seconds)
        p_array_t['pickup_time/type_s'] = 'BOTH'
        if p_array_t['pickup_time/epoch_l'] > LAST_TIME:
            LAST_TIME = p_array_t['pickup_time/epoch_l'] + \
                        (p_array_t['pickup_time/micro_seconds_i'] / 1000000.)

        ffid = rh['event_number']
        if ffid not in ARRAY_T:
            ARRAY_T[ffid] = []

        ARRAY_T[ffid].append(p_array_t)