Exemple #1
0
def gather(args, p5):
    '''   Create receiver gather   '''
    for sta in args.stations_to_gather:
        try:
            # Read the appropriate line from Array_t
            if args.station_array in p5.Array_t:
                array_t = p5.Array_t[args.station_array]['byid'][sta]
            else:
                p5.read_array_t(args.station_array)
                array_t = p5.Array_t[args.station_array]['byid'][sta]
            LOGGER.info("Extracting receiver(s) at station {0:s}.".format(sta))
            LOGGER.info("Found the following components:")
            for c in array_t.keys():
                LOGGER.info("DAS: {0} component: {1}".format(
                    array_t[c][0]['das/serial_number_s'], c))
                LOGGER.info("Lat: {0} Lon: {1} Elev: {2}".format(
                    array_t[c][0]['location/Y/value_d'],
                    array_t[c][0]['location/X/value_d'],
                    array_t[c][0]['location/Z/value_d']))
                LOGGER.info("{0}".format(array_t[c][0]['description_s']))
            # Read the appropriate line from Das_t and get the sample rate
            p5.read_das_t(array_t[c][0]['das/serial_number_s'],
                          array_t[c][0]['deploy_time/epoch_l'],
                          array_t[c][0]['pickup_time/epoch_l'])
            sr = float(p5.Das_t[array_t[c][0]['das/serial_number_s']]['rows']
                       [0]['sample_rate_i']) / float(
                           p5.Das_t[array_t[c][0]['das/serial_number_s']]
                           ['rows'][0]['sample_rate_multiplier_i'])
        except KeyError as e:
            LOGGER.warn(
                "Warning: The station {0} not found in the current array.\n".
                format(sta))
            continue

        i = 0  # Number of traces found
        fh = None  # SEG-Y file
        # Get a mostly empty instance of segyfactory
        sf = segyfactory.Ssegy(None, None, utm=args.use_utm)
        # Set the type of extended header to write
        sf.set_ext_header_type(args.ext_header)
        # Should we allow traces that are 2^16 samples long
        sf.set_break_standard(args.break_standard)
        # Filter out un-wanted channels here
        chans_available = array_t.keys()
        chans = []
        # Put the desired channels in the desired order
        for c in args.channels:
            if c in chans_available:
                chans.append(c)
        # Channel name for output file name
        chan_name = ''
        for c in chans:
            chan_name += "{0}".format(c)

        # Read Event_t_xxx
        Event_t = p5.Event_t[args.shot_line]['byid']
        order = p5.Event_t[args.shot_line]['order']

        # Take a guess at the number of traces in this SEG-Y file based on
        # number of shots
        num_traces = len(order) * len(chans)
        # Try to read offset distances (keyed on shot id's)
        Offset_t = p5.read_offsets_receiver_order(args.station_array, sta,
                                                  args.shot_line)
        # Loop through each shot by shot id
        for o in order:
            # Check event list (and also shot_range), args.evt_list, here!
            if args.evt_list:
                if o not in args.evt_list:
                    continue

            # Appropriate line from Event_t
            event_t = Event_t[o]
            # Need to handle time offset here, args.seconds_offset_from_shot
            event_tdoy = timedoy.TimeDOY(
                microsecond=event_t['time/micro_seconds_i'],
                epoch=event_t['time/epoch_l'])
            # Adjust start time based on offset entered on command line
            if args.seconds_offset_from_shot:
                event_tdoy += args.seconds_offset_from_shot
            end_tdoy = event_tdoy + args.length

            start_fepoch = event_tdoy.epoch(fepoch=True)
            stop_fepoch = end_tdoy.epoch(fepoch=True)
            # Set start time in segyfactory
            sf.set_cut_start_epoch(start_fepoch)
            # Set event
            sf.set_event_t(event_t)
            # Set shot to receiver distance
            sf.set_offset_t(Offset_t[o])
            # Set number of samples in trace, gets reset if decimated
            sf.set_length_points(int((stop_fepoch - start_fepoch) * sr))
            # Loop through each channel (channel_number_i)
            for c in chans:
                if c not in array_t:
                    continue
                # Filter out unwanted seed loc codes
                if args.seed_location and\
                   array_t[c][0]['seed_location_code_s'] != args.seed_location:
                    LOGGER.info("Location code mismatch: {0}/{1}/{2}".format(
                        array_t[c][0]['seed_location_code_s'],
                        args.seed_location, c))
                    continue
                # Filter out unwanted seed channels
                seed_channel_code_s = ph5api.seed_channel_code(array_t[c][0])
                if args.seed_channel and\
                   seed_channel_code_s != args.seed_channel:
                    LOGGER.info("Channel code mismatch: {0}/{1}/{2}".format(
                        array_t[c][0]['seed_channel_code_s'],
                        args.seed_channel, c))
                    continue
                # DAS
                das = array_t[c][0]['das/serial_number_s']
                for t in range(len(array_t[c])):
                    # Deploy time
                    start_epoch = array_t[c][t]['deploy_time/epoch_l']
                    # Pickup time
                    stop_epoch = array_t[c][t]['pickup_time/epoch_l']
                    # Is this shot within the deploy and pickup times
                    if not ph5api.is_in(start_epoch, stop_epoch,
                                        event_tdoy.epoch(), end_tdoy.epoch()):
                        LOGGER.info("Data logger {0} not deployed between\
                        {1} to {2} at {3}.".format(
                            array_t[c][t]['das/serial_number_s'], event_tdoy,
                            end_tdoy, sta))
                        if args.deploy_pickup:
                            LOGGER.info("Skipping.")
                            continue

                    # Need to apply reduction velocity here
                    if args.red_vel > 0.:
                        try:
                            secs, errs = segyfactory.calc_red_vel_secs(
                                Offset_t[o], args.red_vel)
                        except Exception as e:

                            secs = 0.
                            errs = [
                                "Can not calculate "
                                "reduction velocity: {0}.".format(e.message)
                            ]
                        for e in errs:
                            LOGGER.info(e)
                        start_fepoch += secs
                        stop_fepoch += secs
                    # Set array_t in segyfactory
                    sf.set_array_t(array_t[c][t])
                    # Read Das table
                    p5.forget_das_t(das)
                    #
                    # Cut trace
                    #
                    traces = p5.cut(das,
                                    start_fepoch,
                                    stop_fepoch,
                                    chan=c,
                                    sample_rate=sr)
                    trace = ph5api.pad_traces(traces)
                    if args.do_time_correct:
                        LOGGER.info("Applied time drift correction by\
                        shifting trace by {0} samples.".format(
                            -1 * sr * (trace.time_correction_ms / 1000.)))
                        LOGGER.info("Correction is {0} ms.".format(
                            trace.time_correction_ms))
                        LOGGER.info("Clock drift (seconds/second): {0}".format(
                            trace.clock.slope))
                        for tccomment in trace.clock.comment:
                            tccmt = tccomment.split('\n')
                            for tcc in tccmt:
                                LOGGER.info("Clock comment: {0}".format(tcc))
                    if trace.nsamples == 0:
                        LOGGER.info("No data found for DAS "
                                    "{0} between {1} and {2}.".format(
                                        das, event_tdoy.getPasscalTime(),
                                        end_tdoy.getPasscalTime()))
                        continue
                    if trace.padding != 0:
                        LOGGER.warn(
                            "Warning: There were {0} samples of padding\
                            added to fill gap(s) in original traces.".trace.
                            padding)
                    # Need to apply decimation here
                    if args.decimation:
                        # Decimate
                        shift, data = decimate.decimate(
                            DECIMATION_FACTORS[args.decimation], trace.data)
                        # Set new sample rate
                        wsr = int(sr / int(args.decimation))
                        sf.set_sample_rate(wsr)
                        trace.sample_rate = wsr
                        # Set length of trace in samples
                        sf.set_length_points(len(data))
                        trace.nsamples = len(data)

                    if trace.nsamples == 0:
                        # Failed to read any data
                        LOGGER.warning("Warning: No data for data\
                        logger {0} starting at {1}.".format(
                            das, trace.start_time))
                        continue
                    # Read receiver and response tables
                    receiver_t = trace.receiver_t
                    if 'response_table_n_i' in array_t[c][t] and\
                       array_t[c][t]['response_table_n_i'] != -1:
                        response_t = p5.get_response_t_by_n_i(
                            int(array_t[c][t]['response_table_n_i']))
                    else:
                        response_t = p5.Response_t['rows']
                        [trace.das_t[0]['response_table_n_i']]
                    # Set sort_t in segyfactory
                    sf.set_sort_t(
                        p5.get_sort_t(start_fepoch, args.station_array))
                    # Set das_t
                    sf.set_das_t(trace.das_t[0])
                    # Line sequence (trace number)
                    sf.set_line_sequence(i)
                    i += 1
                    if response_t:
                        sf.set_response_t(response_t)
                    else:
                        LOGGER.warning(
                            "No gain or bit weight found in ph5 file.")
                    if receiver_t:
                        sf.set_receiver_t(receiver_t)
                    else:
                        LOGGER.warning(
                            "No sensor orientation found in ph5 file.")
                    # Some informational logging
                    LOGGER.info("trace: {0}".format(i))
                    LOGGER.info("-=" * 20)
                    LOGGER.info("Extracting: Event ID %s" % event_t['id_s'])
                    LOGGER.info("Chan: {2} Start: {0:s}, Stop: {1:s}.".format(
                        event_tdoy, end_tdoy, c))
                    LOGGER.info("Lat: %f Lon: %f Elev:\
                    %f %s" % (event_t['location/Y/value_d'],
                              event_t['location/X/value_d'],
                              event_t['location/Z/value_d'],
                              event_t['location/Z/units_s'].strip()))
                    #
                    # Open SEG-Y file
                    #
                    if not fh:
                        if args.write_stdout:
                            try:
                                fh = sys.stdout
                            except Exception as e:
                                LOGGER.error("{0}".format(e.message))
                                LOGGER.error(
                                    "Failed to open STDOUT. Can not continue.")
                                sys.exit(-1)
                        else:
                            #
                            # Set up file nameing
                            #
                            try:
                                nickname = p5.Experiment_t['rows']
                                [-1]['nickname_s']
                            except BaseException:
                                nickname = "X"
                            #
                            base = "{0}_{1}_{2}_{3}".format(
                                nickname, args.station_array[-3:], sta,
                                chan_name)
                            outfilename = "{1:s}/{0:s}_0001.SGY".format(
                                base, args.out_dir)
                            # Make sure that the name in unique
                            j = 1
                            while os.path.exists(outfilename):
                                j += 1
                                tmp = outfilename[:-8]
                                outfilename = "{0}{1:04d}.SGY".format(tmp, j)
                            # Open SEG-Y file
                            try:
                                fh = open(outfilename, 'w+')
                                LOGGER.info("Opened: {0}".format(outfilename))
                            except Exception as e:
                                LOGGER.error("Failed to open {0}.\t{1}".format(
                                    outfilename, e.message))
                                sys.exit()
                        # Write reel headers and first trace
                        try:
                            logs = segyfactory.write_segy_hdr(
                                trace, fh, sf, num_traces)
                            # Write any messages
                            for l in logs:
                                LOGGER.info(l)
                        except segyfactory.SEGYError as e:
                            LOGGER.error("Header write failure.")
                            sys.exit()
                    else:
                        # Write trace
                        try:
                            logs = segyfactory.write_segy(trace, fh, sf)
                            for l in logs:
                                LOGGER.info(l)
                            LOGGER.info('=-' * 40)
                        except segyfactory.SEGYError as e:
                            LOGGER.error("Trace write failure.")
                            sys.exit()
        # Traces found does not match traces expected
        if fh and i != num_traces:
            # Need to update reel_header
            LOGGER.warn("Wrote {0} of {1} traces listed in {2}.".format(
                i, num_traces, args.station_array))
            sf.set_text_header(i)
            fh.seek(0, os.SEEK_SET)
            sf.write_text_header(fh)
            sf.set_reel_header(i)
            fh.seek(3200, os.SEEK_SET)
            sf.write_reel_header(fh)

        if fh:
            fh.close()
Exemple #2
0
def gather():
    '''   Create event gather   '''
    if not ARGS.stations_to_gather:
        ARGS.stations_to_gather = P5.Array_t[ARGS.station_array]['order']
    if ARGS.all_events:
        ARGS.evt_list = P5.Event_t[ARGS.shot_line]['order']

    for evt in ARGS.evt_list:
        try:
            if not ARGS.start_time:
                event_t = P5.Event_t[ARGS.shot_line]['byid'][evt]
            else:
                event_t = None

            logging.info("Extracting receivers for event {0:s}.".format(evt))
        except Exception as e:
            logging.warn("Warning: The event {0} not found.\n".format(evt))
            continue
        #
        fh = None
        #   Initialize
        sf = segyfactory.Ssegy(None, event_t, utm=ARGS.use_utm)
        #   Allow lenght of traces to be up to 2^16 samples long
        sf.set_break_standard(ARGS.break_standard)
        #   Set external header type
        sf.set_ext_header_type(ARGS.ext_header)
        #   Set event information
        if event_t:
            sf.set_event_t(event_t)
            #   Event time
            event_tdoy = timedoy.TimeDOY(
                microsecond=event_t['time/micro_seconds_i'],
                epoch=event_t['time/epoch_l'])
            Offset_t = P5.read_offsets_shot_order(ARGS.station_array, evt,
                                                  ARGS.shot_line)
            #Offset_t = P5.calc_offsets (ARGS.station_array, evt, ARGS.shot_line)
        else:
            event_tdoy = evt
            Offset_t = None
            logging.warn("Warning: No shot to receiver distances found.")
        if ARGS.seconds_offset_from_shot:
            event_tdoy += ARGS.seconds_offset_from_shot
        end_tdoy = event_tdoy + ARGS.length
        #   Event start time
        start_fepoch = event_tdoy.epoch(fepoch=True)
        #   Trace cut end time
        stop_fepoch = end_tdoy.epoch(fepoch=True)
        #
        #if event_t :
        #Offset_t = P5.read_offsets_shot_order (ARGS.station_array, evt, ARGS.shot_line)
        Array_t = P5.Array_t[ARGS.station_array]['byid']
        #   All channels (components) available for this array
        chans_available = P5.channels_Array_t(ARGS.station_array)
        #   The trace sequence
        i = 0
        skipped_chans = 0
        for sta in ARGS.stations_to_gather:
            logging.info("-=" * 20)
            logging.info(
                "Attempting to find data for station {0}.".format(sta))
            #   Shot to station information
            if Offset_t and Offset_t.has_key(sta):
                offset_t = Offset_t[sta]
                sf.set_offset_t(offset_t)
            #   Array geometry
            if not Array_t.has_key(sta):
                logging.info(
                    "Warning: The station {0} is not in array {1}.".format(
                        sta, ARGS.station_array))
                continue
            array_t = Array_t[sta]
            #   Filter out unwanted channels
            chans = []
            for c in ARGS.channels:
                if c in chans_available:
                    chans.append(c)
            #   Create channel name for output file name
            chan_name = ''
            for c in chans:
                chan_name += "{0}".format(c)
            num_traces = len(chans) * len(ARGS.stations_to_gather)
            #   Loop through channels
            for c in chans:
                if not array_t.has_key(c):
                    logging.warn(
                        "Warning: No channel information for {0} in array {1}."
                        .format(c, ARGS.station_array))
                    skipped_chans += 1
                    continue
                try:
                    #   Filter out unwanted seed loc codes
                    if ARGS.seed_location and array_t[c][0][
                            'seed_location_code_s'] != ARGS.seed_location:
                        logging.info(
                            "Location code mismatch: {0}/{1}/{2}".format(
                                array_t[c][0]['seed_location_code_s'],
                                ARGS.seed_location, c))
                        continue
                    #   Filter out unwanted seed channels
                    seed_channel_code_s = ph5api.seed_channel_code(
                        array_t[c][0])
                    if ARGS.seed_channel and seed_channel_code_s != ARGS.seed_channel:
                        logging.info(
                            "Channel code mismatch: {0}/{1}/{2}".format(
                                array_t[c][0]['seed_channel_code_s'],
                                ARGS.seed_channel, c))
                        continue
                except:
                    pass
                #   Loop for each array_t per id_s and channel
                for t in range(len(array_t[c])):
                    #   DAS
                    das = array_t[c][t]['das/serial_number_s']
                    #   Deploy time
                    start_epoch = array_t[c][t]['deploy_time/epoch_l']
                    #   Pickup time
                    stop_epoch = array_t[c][t]['pickup_time/epoch_l']
                    #   Is this shot within the deploy and pickup times
                    if not ph5api.is_in(start_epoch, stop_epoch,
                                        event_tdoy.epoch(), end_tdoy.epoch()):
                        logging.info(
                            "Data logger {0} not deployed between {1} to {2} at {3}."
                            .format(array_t[c][t]['das/serial_number_s'],
                                    event_tdoy, end_tdoy, sta))
                        if ARGS.deploy_pickup:
                            logging.info("Skipping.")
                            continue
                    #   Read Das table, may already be read so don't reread it
                    #   XXX   Debug only
                    try:
                        das_or_fail = P5.read_das_t(das,
                                                    start_epoch=start_fepoch,
                                                    stop_epoch=stop_fepoch,
                                                    reread=False)
                    except:
                        logging.warn(
                            "Failed to read DAS: {0} between {1} and {2}.".
                            format(das, timedoy.epoch2passcal(start_epoch),
                                   timedoy.epoch2passcal(stop_epoch)))
                        continue

                    if das_or_fail == None:
                        logging.warn(
                            "Failed to read DAS: {0} between {1} and {2}.".
                            format(das, timedoy.epoch2passcal(start_epoch),
                                   timedoy.epoch2passcal(stop_epoch)))
                        continue

                    #   Sample rate
                    if P5.Das_t.has_key(array_t[c][t]['das/serial_number_s']):
                        sr = float(
                            P5.Das_t[array_t[c][t]['das/serial_number_s']]
                            ['rows'][0]['sample_rate_i']) / float(
                                P5.Das_t[array_t[c][t]['das/serial_number_s']]
                                ['rows'][0]['sample_rate_multiplier_i'])
                    else:
                        sr = 0.  #   Oops! No data for this DAS
                    #   Check v4 sample rate from array_t
                    try:
                        if sr != array_t[c][0]['sample_rate_i'] / float(
                                array_t[c][0]['sample_rate_multiplier_i']):
                            continue
                    except:
                        pass
                    ###   Need to check against command line sample rate here
                    if ARGS.sample_rate and ARGS.sample_rate != sr:
                        logging.warn(
                            "Warning: Sample rate for {0} is not {1}. Skipping."
                            .format(das, sr))
                        continue
                    sf.set_length_points(int(
                        (stop_fepoch - start_fepoch) * sr))

                    ###   Need to apply reduction velocity here
                    #   Set cut start and stop times
                    cut_start_fepoch = start_fepoch
                    cut_stop_fepoch = stop_fepoch
                    if ARGS.red_vel > 0.:

                        try:
                            secs, errs = segyfactory.calc_red_vel_secs(
                                offset_t, ARGS.red_vel)
                        except Exception as e:
                            secs = 0.
                            errs = "Can not calculate reduction velocity: {0}.".format(
                                e.message)
                        for e in errs:
                            logging.info(e)
                        cut_start_fepoch += secs
                        cut_stop_fepoch += secs
                    #
                    sf.set_cut_start_epoch(cut_start_fepoch)
                    sf.set_array_t(array_t[c][t])
                    #
                    ###   Cut trace
                    #     Need to pad iff multiple traces
                    traces = P5.cut(das,
                                    cut_start_fepoch,
                                    cut_stop_fepoch,
                                    chan=c,
                                    sample_rate=sr,
                                    apply_time_correction=ARGS.do_time_correct)
                    if len(traces[0].data) == 0:
                        logging.warn(
                            "Warning: No data found for {0} for station {1}.".
                            format(das, sta))
                        continue
                    trace = ph5api.pad_traces(traces)
                    if ARGS.do_time_correct:
                        logging.info(
                            "Applied time drift correction by shifting trace by {0} samples."
                            .format(-1 * sr *
                                    (trace.time_correction_ms / 1000.)))
                        logging.info("Correction is {0} ms.".format(
                            trace.time_correction_ms))
                        logging.info(
                            "Clock drift (seconds/second): {0}".format(
                                trace.clock.slope))
                        for tccomment in trace.clock.comment:
                            tccmt = tccomment.split('\n')
                            for tcc in tccmt:
                                logging.info("Clock comment: {0}".format(tcc))
                    if trace.padding != 0:
                        logging.warn(
                            "Warning: There were {0} samples of padding added to fill gap at middle or end of trace."
                            .format(trace.padding))
                    ##   This may be a command line option later
                    #if True :
                    #if trace.response_t :
                    #try :
                    #tmp_data = trace.data * trace.response_t['bit_weight/value_d']
                    #trace.data = tmp_data
                    #except Exception as e :
                    #logging.warn ("Warning: Failed to apply bit weight. {0}".format (e.message))
                    ###   Need to apply decimation here
                    if ARGS.decimation:
                        #   Decimate
                        shift, data = decimate.decimate(
                            DECIMATION_FACTORS[ARGS.decimation], trace.data)
                        #   Set new sample rate
                        wsr = int(sr / int(ARGS.decimation))
                        sf.set_sample_rate(wsr)
                        trace.sample_rate = wsr
                        #   Set length of trace in samples
                        sf.set_length_points(len(data))
                        sf.length_points_all = len(data)
                        trace.nsamples = len(data)
                        trace.data = data
                    #   Did we read any data?
                    if trace.nsamples == 0:
                        #   Failed to read any data
                        logging.warning(
                            "Warning: No data for data logger {2}/{0} starting at {1}."
                            .format(das, trace.start_time, sta))
                        continue
                    #   Read receiver and response tables
                    receiver_t = trace.receiver_t
                    if receiver_t:
                        sf.set_receiver_t(receiver_t)
                    else:
                        logging.warning(
                            "No sensor orientation found in ph5 file. Contact PIC."
                        )
                    #   Read gain and bit weight

                    if array_t[c][t].has_key('response_table_n_i') and array_t[
                            c][t]['response_table_n_i'] is not -1:
                        response_t = P5.get_response_t_by_n_i(
                            int(array_t[c][t]['response_table_n_i']))
                    else:
                        response_t = trace.response_t

                    if response_t:
                        sf.set_response_t(response_t)
                    else:
                        logging.warning(
                            "No gain or bit weight found in ph5 file. Contact PIC."
                        )
                    #   Increment line sequence
                    i += 1
                    sf.set_line_sequence(i)
                    sf.set_das_t(trace.das_t[0])
                    logging.info("=-" * 20)
                    logging.info("trace: {0}".format(i))
                    logging.info("Extracted: Station ID {0}".format(sta))
                    logging.info("Chan: {2} Start: {0:s}, Stop: {1:s}.".format(
                        event_tdoy, end_tdoy, c))
                    logging.info("Lat: %f Lon: %f Elev: %f %s" %
                                 (array_t[c][t]['location/Y/value_d'],
                                  array_t[c][t]['location/X/value_d'],
                                  array_t[c][t]['location/Z/value_d'],
                                  array_t[c][t]['location/Z/units_s'].strip()))
                    logging.info("{0}".format(array_t[c][t]['description_s']))
                    #
                    ###   Open SEG-Y file
                    #
                    if not fh:
                        if ARGS.write_stdout:
                            try:
                                fh = sys.stdout
                            except Exception as e:
                                logging.error("{0}".format(e.message))
                                logging.error(
                                    "Failed to open STDOUT. Can not continue.")
                                sys.exit(-1)
                        else:
                            #
                            ###   Set up file naming
                            #
                            try:
                                nickname = P5.Experiment_t['rows'][-1][
                                    'nickname_s']
                            except:
                                nickname = "X"
                            #
                            base = "{0}_{1}_{2}_{3}".format(
                                nickname, ARGS.station_array[-3:], evt,
                                chan_name)
                            outfilename = "{1:s}/{0:s}_0001.SGY".format(
                                base, ARGS.out_dir)
                            #   Make sure that the name in unique
                            j = 1
                            while os.path.exists(outfilename):
                                j += 1
                                tmp = outfilename[:-8]
                                outfilename = "{0}{1:04d}.SGY".format(tmp, j)
                            #   Open SEG-Y file
                            try:
                                fh = open(outfilename, 'w+')
                                logging.info("Opened: {0}".format(outfilename))
                            except Exception as e:
                                logging.error(
                                    "Error: Failed to open {0}.\t{1}".format(
                                        outfilename, e.message))
                                sys.stderr.write(
                                    "Error: Failed to open {0}.\t{1}".format(
                                        outfilename, e.message))
                                sys.exit()
                        #   Write reel headers and first trace
                        logs = segyfactory.write_segy_hdr(
                            trace, fh, sf, num_traces)
                        #   Write any messages
                        for l in logs:
                            logging.info(l)
                    else:
                        #   Write trace
                        logs = segyfactory.write_segy(trace, fh, sf)
                        for l in logs:
                            logging.info(l)
            #   chan
        #   Traces found does not match traces expected
        if i != num_traces and fh:
            #   Need to update reel_header
            if (num_traces - skipped_chans) < i:
                logging.warn(
                    "Warning: Wrote {0} of {1} trace/channels listed in {2}.".
                    format(i, num_traces - skipped_chans, ARGS.station_array))
            sf.set_text_header(i)
            fh.seek(0, os.SEEK_SET)
            sf.write_text_header(fh)
            sf.set_reel_header(i)
            fh.seek(3200, os.SEEK_SET)
            sf.write_reel_header(fh)
        ##   Decimation
        #if ARGS.decimation :
        ##   Need to update reel_header
        #sf.set_sample_rate (wsr)
        #sf.set_length_points (trace.nsamples)
        #sf.set_text_header (i)
        #fh.seek (0, os.SEEK_SET)
        #sf.write_text_header (fh)
        #sf.set_reel_header (i)
        #fh.seek (3200, os.SEEK_SET)
        #sf.write_reel_header (fh)
        try:
            fh.close()
        except AttributeError:
            pass