Example #1
0
    def get_sync_report(self, result):
        """
        Link for Format:
        http://www.iris.washington.edu/bud_stuff/goat/syncformat.html
        """
        # header line
        today = dt.datetime.now()
        year = today.year
        day_of_year = (today - dt.datetime(today.year, 1, 1)).days + 1
        date = "%s,%s" % (year, day_of_year)
        s = "%s|%s" % ('PIC', date)

        # Time Span Lines
        template = [self.netcode] + [''] * 15
        for r in result:
            template[1] = r[f_id['sta']]
            template[2] = r[f_id['loc']]
            template[3] = r[f_id['chan']]
            t = timedoy.epoch2passcal(r[f_id['earliest']])
            template[4] = t[:-4].replace(":", ",", 2)
            t = timedoy.epoch2passcal(r[f_id['latest']])
            template[5] = t[:-4].replace(":", ",", 2)
            if self.SR_included:
                if r[f_id['sRate']] == 0:
                    template[7] = "000.0"
                else:
                    template[7] = str(r[f_id['sRate']])
            # template[13] = 'primary'

            s += "\n" + "|".join(template)
        return s
Example #2
0
def write_data():
    global INDEX_T, DASS
    L = {}
    L['Data'] = []
    fh = sys.stdout

    dass = sorted(DASS.keys())
    for das_group in dass:
        for i in INDEX_T.rows:
            d = das_group[6:]
            if i['serial_number_s'] == d:
                try:
                    D = {'das': d,
                         'first_sample': timedoy.epoch2passcal(
                             i['start_time/epoch_l'] +
                             (i['start_time/micro_seconds_i'] / 1000000.)),
                         'last_sample': timedoy.epoch2passcal(
                             i['end_time/epoch_l'] +
                             (i['end_time/micro_seconds_i'] / 1000000.)),
                         'first_epoch': i['start_time/epoch_l'],
                         'last_epoch': i['end_time/epoch_l']}
                except timedoy.TimeError as e:
                    LOGGER.warning(e.message)
                    continue

                L['Data'].append(D)

    fh.write(json.dumps(L, sort_keys=True, indent=4))
Example #3
0
def write_arrays():
    global ARRAY_T

    fh = sys.stdout
    A = {}
    for k in ARRAY_T.keys():
        a = ARRAY_T[k]
        start, stop = array_start_stop(a)
        array_i = int(k[-3:])
        A[array_i] = (start, stop)

    arrays = sorted(ARRAY_T.keys())

    AR = {}
    AR['Arrays'] = []
    for a in arrays:
        stations = []
        start, stop = A[int(a[-3:])]
        sample_rate = get_sample_rate(a, start, stop)
        try:
            deploy_time = timedoy.epoch2passcal(start)
        except timedoy.TimeError as e:
            LOGGER.error("Time conversion error {0}".format(e.message))
            deploy_time = ""

        try:
            pickup_time = timedoy.epoch2passcal(stop)
        except timedoy.TimeError as e:
            LOGGER.error("Time conversion error {0}".format(e.message))
            pickup_time = ""

        this_array = {'array': str(a[-3:]), 'sample_rate': sample_rate,
                      'deploy_time': deploy_time, 'pickup_time': pickup_time,
                      'Stations': None}
        for e in ARRAY_T[a].rows:
            S = {'id': e['id_s'], 'das': e['das/serial_number_s'],
                 'lat': e['location/Y/value_d'],
                 'lon': e['location/X/value_d'],
                 'elev': e['location/Z/value_d'],
                 'chan': e['channel_number_i'],
                 'seed_band_code': e['seed_band_code_s'],
                 'seed_instrument_code': e['seed_instrument_code_s'],
                 'seed_orientation_code': e['seed_orientation_code_s'],
                 'seed_station_name': e['seed_station_name_s']}
            stations.append(S)

        this_array['Stations'] = stations
        AR['Arrays'].append(this_array)

    fh.write(json.dumps(AR, sort_keys=True, indent=4))
Example #4
0
def write_events():
    global EVENT_T

    L = {}
    L['Events'] = []
    fh = sys.stdout
    #
    shot_lines = EVENT_T.keys()
    for sl in shot_lines:
        events = []
        this_line = {'shot_line': str(sl[-3:])}
        for e in EVENT_T[sl].rows:
            pictime = timedoy.epoch2passcal(
                e['time/epoch_l'] + (e['time/micro_seconds_i'] / 1000000.))

            E = {'id': e['id_s'], 'time': pictime,
                 'lat': e['location/Y/value_d'],
                 'lon': e['location/X/value_d'],
                 'elev': e['location/Z/value_d'], 'mag': e['size/value_d'],
                 'depth': e['depth/value_d']}
            events.append(E)

        this_line['Events'] = events
        L['Events'].append(this_line)
        if L:
            fh.write(json.dumps(L, sort_keys=True, indent=4))
Example #5
0
def doy_breakup(start_fepoch, length=86400):
    """
    Given a start time epoch returns a next days equivalent epoch time and the
    difference in seconds between the start and stop epoch times.
    :param: start_fepoch
    :type: float
    :returns: stop_fepoch : next days stop epoch :type: float
              seconds: difference in seconds between the start and end
              epoch times :type: float
    """
    passcal_start = epoch2passcal(float(start_fepoch))
    start_passcal_list = passcal_start.split(":")
    start_year = start_passcal_list[0]
    start_doy = start_passcal_list[1]
    start_hour = start_passcal_list[2]
    start_minute = start_passcal_list[3]
    start_second = start_passcal_list[4]

    datestr = "{0}:{1}:{2}:{3}:{4}".format(start_year, start_doy, start_hour,
                                           start_minute, start_second)
    passcal_date = datetime.strptime(datestr, "%Y:%j:%H:%M:%S.%f")

    next_passcal_date = passcal_date + timedelta(seconds=length)
    next_passcal_date_str = next_passcal_date.strftime("%Y:%j:%H:%M:%S.%f")

    stop_fepoch = passcal2epoch(next_passcal_date_str, fepoch=True)
    seconds = stop_fepoch - start_fepoch
    return stop_fepoch, seconds
Example #6
0
def inday_breakup(start_fepoch):
    """
    Given a start time epoch returns the midnight epoch time of that day
    and the difference in seconds between the start and midnight epoch times.
    :param: start_fepoch
    :type: float
    :returns: midnight_fepoch : midnight epoch :type: float
              seconds: difference in seconds between the start and end
              epoch times :type: float
    """
    passcal_start = epoch2passcal(float(start_fepoch))
    start_passcal_list = passcal_start.split(":")
    try:
        midnight = TimeDOY(year=int(start_passcal_list[0]),
                           doy=int(start_passcal_list[1]) + 1,
                           hour=0,
                           minute=0,
                           second=0,
                           microsecond=0)
    except TimeError:
        midnight = TimeDOY(year=int(start_passcal_list[0]) + 1,
                           doy=1,
                           hour=0,
                           minute=0,
                           second=0,
                           microsecond=0)

    midnight_fepoch = midnight.epoch()
    seconds = midnight_fepoch - start_fepoch
    return midnight_fepoch, seconds
Example #7
0
def write_data():
    global INDEX_T, DASS
    L = {}
    L['Data'] = []
    fh = sys.stdout

    dass = DASS.keys()
    dass.sort()
    for das_group in dass:
        for i in INDEX_T.rows:
            d = das_group[6:]
            #print i['serial_number_s'], d
            if i['serial_number_s'] == d:
                #D = _Data ()
                #D.das = d
                #D.first_sample = time.ctime (i['start_time/epoch_l'])
                #D.last_sample = time.ctime (i['end_time/epoch_l'])
                #D.first_epoch = i['start_time/epoch_l']
                #D.last_epoch = i['end_time/epoch_l']
                #D = { 'das': d, 'first_sample': time.ctime (i['start_time/epoch_l']), 'last_sample': time.ctime (i['end_time/epoch_l']), 'first_epoch': i['start_time/epoch_l'], 'last_epoch': i['end_time/epoch_l'] }
                try:
                    D = {
                        'das':
                        d,
                        'first_sample':
                        timedoy.epoch2passcal(i['start_time/epoch_l'] + (
                            i['start_time/micro_seconds_i'] / 1000000.)),
                        'last_sample':
                        timedoy.epoch2passcal(i['end_time/epoch_l'] +
                                              (i['end_time/micro_seconds_i'] /
                                               1000000.)),
                        'first_epoch':
                        i['start_time/epoch_l'],
                        'last_epoch':
                        i['end_time/epoch_l']
                    }
                except timedoy.TimeError as e:
                    sys.stderr.write("{0}".format(e.message))
                    continue

                L['Data'].append(D)

    fh.write(json.dumps(L, sort_keys=True, indent=4))
Example #8
0
    def filenamemsimg_gen(self, stream):

        s = stream.traces[0].stats
        secs = int(s.starttime.timestamp)
        pre = epoch2passcal(secs, sep='_')
        ret = "{0}.{1}.{2}.{3}.{4}.png".format(pre, s.network, s.station,
                                               s.location, s.channel)
        if not self.stream:
            if not os.path.exists(
                    os.path.join(self.out_dir, "preview_images")):
                os.makedirs(os.path.join(self.out_dir, "preview_images"))

            ret = os.path.join(self.out_dir, "preview_images", ret)
        return ret
Example #9
0
    def create_cut(self, seed_network, ph5_station, seed_station,
                   station_cut_times, station_list, deployment, st_num,
                   array_code, experiment_id):
        deploy = station_list[deployment][st_num]['deploy_time/epoch_l']
        deploy_micro = station_list[deployment][
            st_num]['deploy_time/micro_seconds_i']
        pickup = station_list[deployment][st_num]['pickup_time/epoch_l']
        pickup_micro = station_list[deployment][
            st_num]['pickup_time/micro_seconds_i']
        location = station_list[deployment][
            st_num]['seed_location_code_s']
        das = station_list[deployment][st_num]['das/serial_number_s']
        das_manufacturer = station_list[deployment][st_num][
                                        'das/manufacturer_s']
        das_model = station_list[deployment][st_num][
                                        'das/model_s']
        sensor_type = " ".join([x for x in
                                [station_list[deployment][st_num][
                                                    'sensor/manufacturer_s'],
                                 station_list[deployment][st_num][
                                                    'sensor/model_s']] if x])

        receiver_n_i = station_list[deployment][st_num]['receiver_table_n_i']
        response_n_i = station_list[deployment][st_num]['response_table_n_i']

        if 'sample_rate_i' in station_list[deployment][0]:
            sample_rate = station_list[deployment][st_num]['sample_rate_i']
        sample_rate_multiplier = 1
        if ('sample_rate_multiplier_i' in
                station_list[deployment][st_num]):
            sample_rate_multiplier = station_list[
                deployment][st_num]['sample_rate_multiplier_i']

        if self.sample_rate_list:
            sample_list = self.sample_rate_list
            if not ph5utils.does_pattern_exists(sample_list, sample_rate):
                return

        seed_channel, component = self.get_channel_and_component(
            station_list, deployment, st_num)

        if self.component:
            component_list = self.component
            if not ph5utils.does_pattern_exists(component_list, component):
                return
        if self.channel:
            cha_patterns = self.channel
            if not ph5utils.does_pattern_exists(cha_patterns, seed_channel):
                return
        if self.das_sn and self.das_sn != das:
            return

        if self.reqtype == "FDSN":
            # trim user defined time range if it extends beyond the
            # deploy/pickup times
            if self.start_time:
                if "T" not in self.start_time:
                    check_start_time = passcal2epoch(
                        self.start_time, fepoch=True)
                    if float(check_start_time) > float(deploy):
                        start_fepoch = self.start_time
                        sct = StationCutTime(
                                passcal2epoch(start_fepoch, fepoch=True)
                        )
                        station_cut_times.append(sct)
                    else:
                        sct = StationCutTime(deploy)
                        station_cut_times.append(sct)
                else:
                    check_start_time = ph5utils.datestring_to_epoch(
                        self.start_time)
                    if float(check_start_time) > float(deploy):
                        sct = StationCutTime(
                                ph5utils.datestring_to_epoch(self.start_time))
                        station_cut_times.append(sct)
                    else:
                        sct = StationCutTime(deploy)
                        station_cut_times.append(sct)
                if float(check_start_time) > float(pickup):
                    return
            else:
                sct = StationCutTime(
                    ph5api.fepoch(deploy, deploy_micro)
                )
                station_cut_times.append(sct)

        for sct in station_cut_times:
            start_fepoch = sct.time
            if self.reqtype == "SHOT" or self.reqtype == "RECEIVER":
                if self.offset:
                    # adjust starttime by an offset
                    start_fepoch += int(self.offset)

                if self.length:
                    stop_fepoch = start_fepoch + self.length
                else:
                    raise PH5toMSAPIError(
                        "Error - length is required for request by shot.")
            elif self.reqtype == "FDSN":
                if self.end_time:
                    if "T" not in self.end_time:
                        check_end_time = passcal2epoch(
                            self.end_time, fepoch=True)

                        if float(check_end_time) < float(pickup):

                            stop_fepoch = self.end_time
                            stop_fepoch = passcal2epoch(
                                stop_fepoch, fepoch=True)

                        else:
                            stop_fepoch = pickup

                    else:
                        check_end_time = ph5utils.datestring_to_epoch(
                            self.end_time)
                        if float(check_end_time) < float(pickup):
                            stop_fepoch = ph5utils.datestring_to_epoch(
                                self.end_time)
                        else:
                            stop_fepoch = pickup

                    if float(check_end_time) < float(deploy):
                        continue
                elif self.length:
                    stop_fepoch = start_fepoch + self.length
                else:
                    stop_fepoch = ph5api.fepoch(pickup, pickup_micro)

            if (self.use_deploy_pickup is True and not
                    ((int(start_fepoch) >= deploy and
                      int(stop_fepoch) <= pickup))):
                # das not deployed within deploy/pickup time
                continue
            start_passcal = epoch2passcal(start_fepoch, sep=':')
            start_passcal_list = start_passcal.split(":")
            start_doy = start_passcal_list[1]

            if self.doy_keep:
                if start_doy not in self.doy:
                    continue

            midnight_fepoch, secondLeftInday = \
                ph5utils.inday_breakup(start_fepoch)

            # if (stop_fepoch - start_fepoch) > 86400:
            if (stop_fepoch - start_fepoch) > secondLeftInday:
                seconds_covered = 0
                total_seconds = stop_fepoch - start_fepoch
                times_to_cut = []
                if self.cut_len != 86400:
                    stop_time, seconds = ph5utils.doy_breakup(
                        start_fepoch, self.cut_len)
                else:
                    stop_time, seconds = ph5utils.inday_breakup(start_fepoch)
                seconds_covered = seconds_covered + seconds
                times_to_cut.append([start_fepoch, stop_time])
                start_time = stop_time

                while seconds_covered < total_seconds:
                    if self.cut_len != 86400:
                        stop_time, seconds = ph5utils.doy_breakup(
                            start_time, self.cut_len)
                    else:
                        stop_time, seconds = ph5utils.inday_breakup(start_time)

                    seconds_covered += seconds
                    if stop_time > stop_fepoch:
                        times_to_cut.append([start_time, stop_fepoch])
                        break
                    times_to_cut.append([start_time, stop_time])
                    start_time = stop_time
            else:
                times_to_cut = [[start_fepoch, stop_fepoch]]
                times_to_cut[-1][-1] = stop_fepoch

            latitude = station_list[deployment][
                st_num]['location/Y/value_d']
            longitude = station_list[deployment][
                st_num]['location/X/value_d']
            elev = station_list[deployment][
                st_num]['location/Z/value_d']

            for starttime, endtime in tuple(times_to_cut):
                try:
                    self.ph5.query_das_t(das,
                                         component,
                                         starttime,
                                         endtime,
                                         sample_rate,
                                         sample_rate_multiplier
                                         )
                except experiment.HDF5InteractionError:
                    continue

                station_x = StationCut(
                    seed_network,
                    experiment_id,
                    ph5_station,
                    seed_station,
                    array_code,
                    das,
                    das_manufacturer,
                    das_model,
                    sensor_type,
                    component,
                    seed_channel,
                    starttime,
                    endtime,
                    sample_rate,
                    sample_rate_multiplier,
                    self.notimecorrect,
                    location,
                    latitude,
                    longitude,
                    elev,
                    receiver_n_i,
                    response_n_i,
                    shot_id=sct.shot_id,
                    shot_lat=sct.shot_lat,
                    shot_lng=sct.shot_lng,
                    shot_elevation=sct.shot_elevation)

                station_hash = hash(frozenset([seed_station, das, latitude,
                                               longitude, sample_rate,
                                               sample_rate_multiplier,
                                               starttime, endtime]))
                if station_hash in self.hash_list:
                    continue
                else:
                    self.hash_list.append(station_hash)
                    yield station_x
Example #10
0
def write_des_report():
    global EXPERIMENT_T, ARRAY_T, EVENT_T

    A = {}
    if ARRAY_T:
        for k in ARRAY_T.keys():
            a = ARRAY_T[k]
            start, stop = array_start_stop(a)
            array_i = int(k[-3:])
            A[array_i] = (start, stop)

    fh = open("data_description.txt", "w+")

    for e in EXPERIMENT_T.rows:
        pass

    fh.write("\t\t\t%s\n\n%s\n\n%s\n\n%s\n\n%s\n\n" %
             (e['nickname_s'],
              e['longname_s'],
              e['PIs_s'],
              e['institutions_s'],
              e['summary_paragraph_s']))

    fh.write(
        "***   Please check the following lines and remove this line before\
         submission to DMC.   ***\n")
    fh.write("\t\t\tShots\n\n")
    fh.write(
        "shot id\ttime    lat      lon         elev (m) size (kg) depth (m)\n")
    fh.write("-" * 85)
    fh.write('\n')
    if EVENT_T:
        for e in EVENT_T.rows:
            ttuple = time.gmtime(int(e['time/epoch_l']))
            secs = ttuple[5] + (e['time/micro_seconds_i'] / 1000000.)
            pictime = "%4d:%03d:%02d:%02d:%06.3f" % (ttuple[0],
                                                     ttuple[7],
                                                     ttuple[3],
                                                     ttuple[4],
                                                     secs)
            fh.write("%-5s\t%s %12.6f %12.6f %9.3f %9.3f %9.3f\n" %
                     (e['id_s'],
                      pictime,
                      e['location/Y/value_d'],
                      e['location/X/value_d'],
                      e['location/Z/value_d'],
                      e['size/value_d'],
                      e['depth/value_d']))

    fh.write("\n\t\t\tArrays\n\n")

    arrays = sorted(ARRAY_T.keys())
    if ARRAY_T:
        for a in arrays:
            start, stop = A[int(a[-3:])]
            fh.write(
                "***   Please check the following lines and remove this line\
                before submission to DMC.   ***\n")
            sample_rate = get_sample_rate(a, start, stop)

            fh.write("\nArray: %s\n" % a[-3:])
            # Sample rate:
            fh.write("\t\tSample Rate: %d sps\n" % sample_rate)
            # Sensor type
            # Deployment time
            fh.write("\t\tDeployment Time: %s\n" %
                     tdoy.epoch2passcal(start)[:-10])
            # Pickup time
            fh.write("\t\tPickup Time:     %s\n" %
                     tdoy.epoch2passcal(stop)[:-10])
            fh.write("\t\tComponents: 1 => Z, 2 => N, 3 => E\n\n")
            fh.write(
                "station\t"
                "das      lat        lon        elev (m)    component\n")
            fh.write('-' * 65)
            fh.write('\n')
            for e in ARRAY_T[a].rows:
                fh.write("%-5s\t%s %12.6f %12.6f %9.3f\t%d\n" %
                         (e['id_s'],
                          e['das/serial_number_s'],
                          float(e['location/Y/value_d']),
                          float(e['location/X/value_d']),
                          float(e['location/Z/value_d']),
                          e['channel_number_i']))

        # Need to write sorts here!

        fh.close()
Example #11
0
def get_header():
    header = "# Written by novenkef v{0} at {1}\n"\
        .format(PROG_VERSION,
                timedoy.epoch2passcal(
                    time.time()))
    return header
Example #12
0
def gather():
    '''   Create event gather   '''
    if not ARGS.stations_to_gather:
        ARGS.stations_to_gather = P5.Array_t[ARGS.station_array]['order']
    if ARGS.all_events:
        ARGS.evt_list = P5.Event_t[ARGS.shot_line]['order']

    for evt in ARGS.evt_list:
        try:
            if not ARGS.start_time:
                event_t = P5.Event_t[ARGS.shot_line]['byid'][evt]
            else:
                event_t = None

            logging.info("Extracting receivers for event {0:s}.".format(evt))
        except Exception as e:
            logging.warn("Warning: The event {0} not found.\n".format(evt))
            continue
        #
        fh = None
        #   Initialize
        sf = segyfactory.Ssegy(None, event_t, utm=ARGS.use_utm)
        #   Allow lenght of traces to be up to 2^16 samples long
        sf.set_break_standard(ARGS.break_standard)
        #   Set external header type
        sf.set_ext_header_type(ARGS.ext_header)
        #   Set event information
        if event_t:
            sf.set_event_t(event_t)
            #   Event time
            event_tdoy = timedoy.TimeDOY(
                microsecond=event_t['time/micro_seconds_i'],
                epoch=event_t['time/epoch_l'])
            Offset_t = P5.read_offsets_shot_order(ARGS.station_array, evt,
                                                  ARGS.shot_line)
            #Offset_t = P5.calc_offsets (ARGS.station_array, evt, ARGS.shot_line)
        else:
            event_tdoy = evt
            Offset_t = None
            logging.warn("Warning: No shot to receiver distances found.")
        if ARGS.seconds_offset_from_shot:
            event_tdoy += ARGS.seconds_offset_from_shot
        end_tdoy = event_tdoy + ARGS.length
        #   Event start time
        start_fepoch = event_tdoy.epoch(fepoch=True)
        #   Trace cut end time
        stop_fepoch = end_tdoy.epoch(fepoch=True)
        #
        #if event_t :
        #Offset_t = P5.read_offsets_shot_order (ARGS.station_array, evt, ARGS.shot_line)
        Array_t = P5.Array_t[ARGS.station_array]['byid']
        #   All channels (components) available for this array
        chans_available = P5.channels_Array_t(ARGS.station_array)
        #   The trace sequence
        i = 0
        skipped_chans = 0
        for sta in ARGS.stations_to_gather:
            logging.info("-=" * 20)
            logging.info(
                "Attempting to find data for station {0}.".format(sta))
            #   Shot to station information
            if Offset_t and Offset_t.has_key(sta):
                offset_t = Offset_t[sta]
                sf.set_offset_t(offset_t)
            #   Array geometry
            if not Array_t.has_key(sta):
                logging.info(
                    "Warning: The station {0} is not in array {1}.".format(
                        sta, ARGS.station_array))
                continue
            array_t = Array_t[sta]
            #   Filter out unwanted channels
            chans = []
            for c in ARGS.channels:
                if c in chans_available:
                    chans.append(c)
            #   Create channel name for output file name
            chan_name = ''
            for c in chans:
                chan_name += "{0}".format(c)
            num_traces = len(chans) * len(ARGS.stations_to_gather)
            #   Loop through channels
            for c in chans:
                if not array_t.has_key(c):
                    logging.warn(
                        "Warning: No channel information for {0} in array {1}."
                        .format(c, ARGS.station_array))
                    skipped_chans += 1
                    continue
                try:
                    #   Filter out unwanted seed loc codes
                    if ARGS.seed_location and array_t[c][0][
                            'seed_location_code_s'] != ARGS.seed_location:
                        logging.info(
                            "Location code mismatch: {0}/{1}/{2}".format(
                                array_t[c][0]['seed_location_code_s'],
                                ARGS.seed_location, c))
                        continue
                    #   Filter out unwanted seed channels
                    seed_channel_code_s = ph5api.seed_channel_code(
                        array_t[c][0])
                    if ARGS.seed_channel and seed_channel_code_s != ARGS.seed_channel:
                        logging.info(
                            "Channel code mismatch: {0}/{1}/{2}".format(
                                array_t[c][0]['seed_channel_code_s'],
                                ARGS.seed_channel, c))
                        continue
                except:
                    pass
                #   Loop for each array_t per id_s and channel
                for t in range(len(array_t[c])):
                    #   DAS
                    das = array_t[c][t]['das/serial_number_s']
                    #   Deploy time
                    start_epoch = array_t[c][t]['deploy_time/epoch_l']
                    #   Pickup time
                    stop_epoch = array_t[c][t]['pickup_time/epoch_l']
                    #   Is this shot within the deploy and pickup times
                    if not ph5api.is_in(start_epoch, stop_epoch,
                                        event_tdoy.epoch(), end_tdoy.epoch()):
                        logging.info(
                            "Data logger {0} not deployed between {1} to {2} at {3}."
                            .format(array_t[c][t]['das/serial_number_s'],
                                    event_tdoy, end_tdoy, sta))
                        if ARGS.deploy_pickup:
                            logging.info("Skipping.")
                            continue
                    #   Read Das table, may already be read so don't reread it
                    #   XXX   Debug only
                    try:
                        das_or_fail = P5.read_das_t(das,
                                                    start_epoch=start_fepoch,
                                                    stop_epoch=stop_fepoch,
                                                    reread=False)
                    except:
                        logging.warn(
                            "Failed to read DAS: {0} between {1} and {2}.".
                            format(das, timedoy.epoch2passcal(start_epoch),
                                   timedoy.epoch2passcal(stop_epoch)))
                        continue

                    if das_or_fail == None:
                        logging.warn(
                            "Failed to read DAS: {0} between {1} and {2}.".
                            format(das, timedoy.epoch2passcal(start_epoch),
                                   timedoy.epoch2passcal(stop_epoch)))
                        continue

                    #   Sample rate
                    if P5.Das_t.has_key(array_t[c][t]['das/serial_number_s']):
                        sr = float(
                            P5.Das_t[array_t[c][t]['das/serial_number_s']]
                            ['rows'][0]['sample_rate_i']) / float(
                                P5.Das_t[array_t[c][t]['das/serial_number_s']]
                                ['rows'][0]['sample_rate_multiplier_i'])
                    else:
                        sr = 0.  #   Oops! No data for this DAS
                    #   Check v4 sample rate from array_t
                    try:
                        if sr != array_t[c][0]['sample_rate_i'] / float(
                                array_t[c][0]['sample_rate_multiplier_i']):
                            continue
                    except:
                        pass
                    ###   Need to check against command line sample rate here
                    if ARGS.sample_rate and ARGS.sample_rate != sr:
                        logging.warn(
                            "Warning: Sample rate for {0} is not {1}. Skipping."
                            .format(das, sr))
                        continue
                    sf.set_length_points(int(
                        (stop_fepoch - start_fepoch) * sr))

                    ###   Need to apply reduction velocity here
                    #   Set cut start and stop times
                    cut_start_fepoch = start_fepoch
                    cut_stop_fepoch = stop_fepoch
                    if ARGS.red_vel > 0.:

                        try:
                            secs, errs = segyfactory.calc_red_vel_secs(
                                offset_t, ARGS.red_vel)
                        except Exception as e:
                            secs = 0.
                            errs = "Can not calculate reduction velocity: {0}.".format(
                                e.message)
                        for e in errs:
                            logging.info(e)
                        cut_start_fepoch += secs
                        cut_stop_fepoch += secs
                    #
                    sf.set_cut_start_epoch(cut_start_fepoch)
                    sf.set_array_t(array_t[c][t])
                    #
                    ###   Cut trace
                    #     Need to pad iff multiple traces
                    traces = P5.cut(das,
                                    cut_start_fepoch,
                                    cut_stop_fepoch,
                                    chan=c,
                                    sample_rate=sr,
                                    apply_time_correction=ARGS.do_time_correct)
                    if len(traces[0].data) == 0:
                        logging.warn(
                            "Warning: No data found for {0} for station {1}.".
                            format(das, sta))
                        continue
                    trace = ph5api.pad_traces(traces)
                    if ARGS.do_time_correct:
                        logging.info(
                            "Applied time drift correction by shifting trace by {0} samples."
                            .format(-1 * sr *
                                    (trace.time_correction_ms / 1000.)))
                        logging.info("Correction is {0} ms.".format(
                            trace.time_correction_ms))
                        logging.info(
                            "Clock drift (seconds/second): {0}".format(
                                trace.clock.slope))
                        for tccomment in trace.clock.comment:
                            tccmt = tccomment.split('\n')
                            for tcc in tccmt:
                                logging.info("Clock comment: {0}".format(tcc))
                    if trace.padding != 0:
                        logging.warn(
                            "Warning: There were {0} samples of padding added to fill gap at middle or end of trace."
                            .format(trace.padding))
                    ##   This may be a command line option later
                    #if True :
                    #if trace.response_t :
                    #try :
                    #tmp_data = trace.data * trace.response_t['bit_weight/value_d']
                    #trace.data = tmp_data
                    #except Exception as e :
                    #logging.warn ("Warning: Failed to apply bit weight. {0}".format (e.message))
                    ###   Need to apply decimation here
                    if ARGS.decimation:
                        #   Decimate
                        shift, data = decimate.decimate(
                            DECIMATION_FACTORS[ARGS.decimation], trace.data)
                        #   Set new sample rate
                        wsr = int(sr / int(ARGS.decimation))
                        sf.set_sample_rate(wsr)
                        trace.sample_rate = wsr
                        #   Set length of trace in samples
                        sf.set_length_points(len(data))
                        sf.length_points_all = len(data)
                        trace.nsamples = len(data)
                        trace.data = data
                    #   Did we read any data?
                    if trace.nsamples == 0:
                        #   Failed to read any data
                        logging.warning(
                            "Warning: No data for data logger {2}/{0} starting at {1}."
                            .format(das, trace.start_time, sta))
                        continue
                    #   Read receiver and response tables
                    receiver_t = trace.receiver_t
                    if receiver_t:
                        sf.set_receiver_t(receiver_t)
                    else:
                        logging.warning(
                            "No sensor orientation found in ph5 file. Contact PIC."
                        )
                    #   Read gain and bit weight

                    if array_t[c][t].has_key('response_table_n_i') and array_t[
                            c][t]['response_table_n_i'] is not -1:
                        response_t = P5.get_response_t_by_n_i(
                            int(array_t[c][t]['response_table_n_i']))
                    else:
                        response_t = trace.response_t

                    if response_t:
                        sf.set_response_t(response_t)
                    else:
                        logging.warning(
                            "No gain or bit weight found in ph5 file. Contact PIC."
                        )
                    #   Increment line sequence
                    i += 1
                    sf.set_line_sequence(i)
                    sf.set_das_t(trace.das_t[0])
                    logging.info("=-" * 20)
                    logging.info("trace: {0}".format(i))
                    logging.info("Extracted: Station ID {0}".format(sta))
                    logging.info("Chan: {2} Start: {0:s}, Stop: {1:s}.".format(
                        event_tdoy, end_tdoy, c))
                    logging.info("Lat: %f Lon: %f Elev: %f %s" %
                                 (array_t[c][t]['location/Y/value_d'],
                                  array_t[c][t]['location/X/value_d'],
                                  array_t[c][t]['location/Z/value_d'],
                                  array_t[c][t]['location/Z/units_s'].strip()))
                    logging.info("{0}".format(array_t[c][t]['description_s']))
                    #
                    ###   Open SEG-Y file
                    #
                    if not fh:
                        if ARGS.write_stdout:
                            try:
                                fh = sys.stdout
                            except Exception as e:
                                logging.error("{0}".format(e.message))
                                logging.error(
                                    "Failed to open STDOUT. Can not continue.")
                                sys.exit(-1)
                        else:
                            #
                            ###   Set up file naming
                            #
                            try:
                                nickname = P5.Experiment_t['rows'][-1][
                                    'nickname_s']
                            except:
                                nickname = "X"
                            #
                            base = "{0}_{1}_{2}_{3}".format(
                                nickname, ARGS.station_array[-3:], evt,
                                chan_name)
                            outfilename = "{1:s}/{0:s}_0001.SGY".format(
                                base, ARGS.out_dir)
                            #   Make sure that the name in unique
                            j = 1
                            while os.path.exists(outfilename):
                                j += 1
                                tmp = outfilename[:-8]
                                outfilename = "{0}{1:04d}.SGY".format(tmp, j)
                            #   Open SEG-Y file
                            try:
                                fh = open(outfilename, 'w+')
                                logging.info("Opened: {0}".format(outfilename))
                            except Exception as e:
                                logging.error(
                                    "Error: Failed to open {0}.\t{1}".format(
                                        outfilename, e.message))
                                sys.stderr.write(
                                    "Error: Failed to open {0}.\t{1}".format(
                                        outfilename, e.message))
                                sys.exit()
                        #   Write reel headers and first trace
                        logs = segyfactory.write_segy_hdr(
                            trace, fh, sf, num_traces)
                        #   Write any messages
                        for l in logs:
                            logging.info(l)
                    else:
                        #   Write trace
                        logs = segyfactory.write_segy(trace, fh, sf)
                        for l in logs:
                            logging.info(l)
            #   chan
        #   Traces found does not match traces expected
        if i != num_traces and fh:
            #   Need to update reel_header
            if (num_traces - skipped_chans) < i:
                logging.warn(
                    "Warning: Wrote {0} of {1} trace/channels listed in {2}.".
                    format(i, num_traces - skipped_chans, ARGS.station_array))
            sf.set_text_header(i)
            fh.seek(0, os.SEEK_SET)
            sf.write_text_header(fh)
            sf.set_reel_header(i)
            fh.seek(3200, os.SEEK_SET)
            sf.write_reel_header(fh)
        ##   Decimation
        #if ARGS.decimation :
        ##   Need to update reel_header
        #sf.set_sample_rate (wsr)
        #sf.set_length_points (trace.nsamples)
        #sf.set_text_header (i)
        #fh.seek (0, os.SEEK_SET)
        #sf.write_text_header (fh)
        #sf.set_reel_header (i)
        #fh.seek (3200, os.SEEK_SET)
        #sf.write_reel_header (fh)
        try:
            fh.close()
        except AttributeError:
            pass