Exemple #1
0
 def setUp(self):
     """
     Setup benchmark data.
     """
     self.segy = readSEGY(get_example_file("ew0210_o30.segy"), unpack_headers=True)
     self.pickdb = PickDatabaseConnection(":memory:")
     for pick in uniq_picks:
         self.pickdb.update_pick(**pick)
Exemple #2
0
def segy2db(segy, pickdb, events, branches=None, subbranch=None,
            constant_values=None):
    """
    Create a pick database with picks for each trace in a SEG-Y file.

    :param segy: :class:`rockfish.segy.segy.SEGYFile` instance or filename of 
        a SEG-Y file.
    :param pickdb: Open
            :class:`rockfish.picking.database.PickDatabaseConnection` or
            filename of a database to add picks to.
    :param events: ``list`` of event names to add to the database for each 
        trace.
    :param branches: ``dict`` of branch values for each event
    :param branches: ``dict`` of subbranch values for each event
    :param constant_values: ``dict`` of field and values to assign to all
        new picks.
    :returns: :class:`rockfish.picking.database.PickDatabaseConnection`
    """
    if type(pickdb) is str:
        pickdb = PickDatabaseConnection(pickdb)
    if type(segy) is str:
        segy = readSEGY(segy)
    for i, tr in enumerate(segy.traces):
        d = {'ensemble': tr.header.ensemble_number,
             'trace' : tr.header.trace_number_within_the_ensemble,
             'trace_in_file' : i,
             'time' : 1e30,
             'time_reduced' : 1e30,
             'error': 0,
             'source_x': tr.header.scaled_source_coordinate_x,
             'source_y': tr.header.scaled_source_coordinate_y,
             'source_z': - tr.header.scaled_source_depth_below_surface,
             'receiver_x': tr.header.scaled_group_coordinate_x,
             'receiver_y': tr.header.scaled_group_coordinate_y,
             'receiver_z': tr.header.scaled_receiver_group_elevation,
             'offset': tr.header.source_receiver_offset_in_m,
             'faz': tr.header.computed_azimuth_in_deg,
             'data_file' : segy.file.name,
             'method': 'segy2db()'}
        if constant_values is not None:
            for k in constant_values:
                d[k] = constant_values[k]
        for event in events:
            d['event'] = event
            if branches is not None:
                d['branch'] = branches[event]
            else:
                d['branch'] = 0
            if subbranch is not None:
                d['subbranch'] = subbranch[event]
            else:
                d['subbranch'] = 0 
            pickdb.update_pick(**d)
    pickdb.commit()
    return pickdb
def main():
    args = get_args()
    is_first_file = True
    for filename in args.segyfiles:
        segy = readSEGY(filename, unpack_headers = not args.no_standard_headers,
                        scale_headers=args.scale_headers,
                        computed_headers=args.computed_headers,
                        unpack_data=False)
        if is_first_file:
            sdb = SEGYHeaderDatabase(database=args.dbfile, segy=segy,
                                     table_name=args.table_name,
                                     force_new=args.force,
                                     include_filename=args.include_filename)
            is_first_file = False
        else:
            sdb.append_from_segy(segy)
 def setUp(self):
     """
     Setup benchmark data.
     """
     self.segy = readSEGY(get_example_file('ew0210_o30.segy'),
                          unpack_headers=True)
     self.pickdb = PickDatabaseConnection(':memory:')
     for pick in uniq_picks:
         self.pickdb.update_pick(**pick)
     self.default_params = [
         'ABSCISSA_KEY', 'GAIN', 'CLIP',
         'NORMALIZATION_METHOD', 'OFFSET_GAIN_POWER',
         'WIGGLE_PEN_COLOR', 'WIGGLE_PEN_WIDTH',
         'NEG_FILL_COLOR', 'POS_FILL_COLOR', 'DISTANCE_UNIT',
         'TIME_UNIT', 'SEGY_TIME_UNITS', 'SEGY_DISTANCE_UNITS',
         'SEGY_HEADER_ALIASES']
Exemple #5
0
    def insert_odt_picks(self, event, odtfile, line2segyfile, model_line=None,
                         vred=None, error=0.0,
                         rid_field='ensemble_number',
                         sid_field='trace_number_within_the_ensemble',
                         verify=False, load_segy_geom=True, **kwargs):
        """
        Insert picks from OpendTect
        """
        epsg = kwargs.pop('epsg', self.EPSG)
        segyfile0 = None
        f = open(odtfile, 'rb')
        for line in f:

            d = line.split()
            
            odt_line = d[0]
            sx = float(d[1])
            dy = float(d[2])
            trace_in_file = int(d[3])
            time_reduced = float(d[4])

            segyfile = line2segyfile(odt_line)
            data_file = os.path.basename(segyfile)

            if segyfile != segyfile0:
                segy = readSEGY(segyfile, unpack_headers=True)
                sql = 'DELETE FROM PICKS WHERE'
                sql += " data_file = '{:}' AND event='{:}'"\
                        .format(data_file, event)
                self.execute(sql)
                
                if load_segy_geom and\
                   (self._count('dummy_picks',
                               data_file=os.path.basename(segyfile)) == 0):
                    print 'Loading geometry from: {:}'.format(segyfile)
                    self.insert_geom_from_SEGY(segy, epsg=epsg,
                        model_line=model_line, rid_field=rid_field,
                        sid_field=sid_field)
            segyfile0 = segyfile

            try:
                tr = segy.traces[trace_in_file - 1]
            except IndexError:
                raise IndexError('Trace number {:} out of range for file {:}.'\
                                 .format(trace_in_file, segyfile))
                

            offset = abs(tr.header.computed_source_receiver_offset_in_m)
            rid = tr.header.__getattribute__(rid_field)
            sid = tr.header.__getattribute__(sid_field)

            if vred is not None:
                time = time_reduced + offset / 1000. / vred
            else:
                time = time_reduced

            if verify:
                self._check_count('receiver_pts', rid=rid)
                self._check_count('source_pts', sid=sid)

            dat = [pad(event), sid, rid, time, error, pad(model_line),
                   trace_in_file, pad(data_file)]

            sql = 'INSERT INTO picks(event, sid, rid, time, error,'
            sql += ' model_line, trace_in_file, data_file)'
            sql += ' VALUES(' + ', '.join([str(v) for v in dat]) + ')'


           
            print sql
            self.execute(sql)

        self.commit()