def test_get_picks(self): """ Should return data rows. """ # create a new database in memory pickdb = PickDatabaseConnection(':memory:') # add picks, and get our own times times0 = [] for pick in uniq_picks: pickdb.add_pick(**pick) times0.append(pick['time']) pickdb.commit() # should return a list of sqlite3.Row objects for row in pickdb.get_picks(event=pick['event']): self.assertTrue(isinstance(row, sqlite3.Row)) # should return the same data times1 = [] for pick in uniq_picks: row = pickdb.get_picks(**pick)[0] times1.append(row['time']) self.assertEqual(sorted(times0), sorted(times1)) # should return all data self.assertEqual(len(uniq_picks), len(pickdb.get_picks())) # should return empty list if no matches self.assertEqual(len(pickdb.get_picks(event='golly_gee')), 0) # should also return empty list if no data pickdb = PickDatabaseConnection(':memory:') self.assertEqual(len(pickdb.get_picks()), 0)
def test_counts(self): """ Count functions should return number of distinct rows in tables. """ pickdb = PickDatabaseConnection(':memory:') # add a single pick event = 'TestCount' pick = uniq_picks[0] pick['event'] = event pickdb.add_pick(**pick) pickdb.commit() # should have a single pick for this event in picks table n = pickdb.count_picks_by_event(event) self.assertEqual(n, 1) # should also have a single entry in the event table n = pickdb._count_distinct(pickdb.EVENT_TABLE, event=event) self.assertEqual(n, 1) # now, another pick for the same event pick['ensemble'] += 1 # ensure unique pickdb.add_pick(**pick) pickdb.commit() # should have two picks for this event in the picks table n = pickdb.count_picks_by_event(event) self.assertEqual(n, 2) # should still just have one entry in the event table for this event n = pickdb._count_distinct(pickdb.EVENT_TABLE, event=event) self.assertEqual(n, 1)
def test_raytrace_branch_id(self): """ Raytracing should honor branch ids """ #vmfile = get_example_file('jump1d.vm') vmfile = get_example_file('inactive_layers.vm') # Create pick database pickdbfile = 'temp.sqlite' if os.path.isfile(pickdbfile): os.remove(pickdbfile) pickdb = PickDatabaseConnection(pickdbfile) pickdb.update_pick(event='P1', ensemble=100, trace=1, branch=1, subbranch=0, time=5.0, source_x=10, source_y=0.0, source_z=0.006, receiver_x=40, receiver_y=0.0, receiver_z=4.9) pickdb.update_pick(event='P2', ensemble=100, trace=1, branch=2, subbranch=0, time=5.0) pickdb.update_pick(event='P3', ensemble=100, trace=1, branch=3, subbranch=0, time=5.0) pickdb.commit() # Raytrace vm = readVM(vmfile) rayfile = 'temp.ray' for branch in range(1, 4): if os.path.isfile(rayfile): os.remove(rayfile) pick_keys = {'branch' : branch} raytrace(vmfile, pickdb, rayfile, **pick_keys) # Should have created a rayfile self.assertTrue(os.path.isfile(rayfile)) # Load rayfans rays = readRayfanGroup(rayfile) # Should have traced just one ray self.assertEqual(len(rays.rayfans), 1) rfn = rays.rayfans[0] self.assertEqual(len(rfn.paths), 1) # Rays should turn in specified layer zmax = max([p[2] for p in rfn.paths[0]]) self.assertGreaterEqual(zmax, vm.rf[branch - 1][0][0]) # cleanup for filename in [rayfile, pickdbfile]: if os.path.isfile(filename): os.remove(filename)
def test_parallel_raytrace(self): """ Should run raytracing in parallel """ # Create pick database pickdbfile = 'temp.sqlite' if os.path.isfile(pickdbfile): os.remove(pickdbfile) pickdb = PickDatabaseConnection(pickdbfile) for i, event in enumerate( ['P1', 'P2', 'P3']): branch = i + 1 for ens in range(3): pickdb.update_pick(event=event, ensemble=ens, trace=1, branch=branch, subbranch=0, time=5.0, source_x=10, source_y=0.0, source_z=0.006, receiver_x=40, receiver_y=0.0, receiver_z=4.9) pickdb.commit() # set velocity model vmfile = get_example_file('inactive_layers.vm') # raytrace for nproc in [1, 2, 8]: input_dir = 'test.input' output_dir = 'test.output' t_start = time.clock() parallel_raytrace(vmfile, pickdb, branches=[1, 2, 3], input_dir=input_dir, output_dir=output_dir, nproc=nproc, ensemble_field='ensemble') t_elapsed = time.clock() - t_start shutil.rmtree(input_dir) shutil.rmtree(output_dir) os.remove(pickdbfile)
def test_add_remove_picks(self): """ Should add a pick to the picks table. """ pickdb = PickDatabaseConnection(':memory:') # should add all picks to the database for pick in uniq_picks: pickdb.add_pick(**pick) pickdb.commit() ndb = len(pickdb.execute('SELECT * FROM picks').fetchall()) self.assertEqual(ndb, len(uniq_picks)) # attempting to add pick without primary fields should raise an error with self.assertRaises(sqlite3.IntegrityError): pickdb.add_pick(event='Foobar', time=9.834) # attempting to add duplicate picks should raise error for pick in uniq_picks: with self.assertRaises(sqlite3.IntegrityError): pickdb.add_pick(**pick) # directly removing pick and then re-adding should work for pick in uniq_picks: pickdb.remove_pick(**pick) pickdb.add_pick(**pick) pickdb.commit() ndb = len(pickdb.execute('SELECT * FROM picks').fetchall()) self.assertEqual(ndb, len(uniq_picks)) # invalid collumn names should raise OperationalError with self.assertRaises(sqlite3.OperationalError): pickdb.remove_pick(not_a_field=999) with self.assertRaises(sqlite3.OperationalError): pickdb.add_pick(not_a_field=999) # remove the last pick that we added pickdb.remove_pick(**pick) # attempting to remove a non-existant pick should do nothing pickdb.remove_pick(**pick) # updating picks should add picks if they don't exist and update picks # if they do exist for pick in uniq_picks: pickdb.update_pick(**pick) pickdb.commit() ndb = len(pickdb.execute('SELECT * FROM picks').fetchall()) self.assertEqual(ndb, len(uniq_picks)) # should not be able to add pick without required fields with self.assertRaises(sqlite3.IntegrityError): pickdb.add_pick(event='Pg', ensemble=1, trace=1)
d = {'event': 'Pg', 'vm_branch': 1, 'vm_subid': 0, 'ensemble': irec + 10, 'trace': isrc + 5000, 'time': 0.0, 'time_reduced': 0.0, 'source_x': _sx, 'source_y': 0.0, 'source_z': 1.0, 'receiver_x': _rx, 'receiver_y': 0.0, 'receiver_z': 2.0, 'offset' : np.abs(_rx - _sx)} pickdb.update_pick(**d) pickdb.commit() # Raytrace with these picks raytrace(vmfile, pickdb, rayfile) pickdb.close() # Transfer traced to a picks to a new pick database and add noise pickdb = rayfan2db(rayfile, pickdb_file, synthetic=True, noise=0.02) # Raytrace with the new pick database raytrace(vmfile, pickdb, rayfile) # Plot the traced rays and traveltimes fig = plt.figure() ax = fig.add_subplot(211) vm.plot(ax=ax)
def rayfan2db(rayfan_file, raydb_file=':memory:', synthetic=False, noise=None, pickdb=None, raypaths=False): """ Read a rayfan file and store its data in a database. Data are stored in a modified version of a :class:`rockfish.picking.database.PickDatabaseConnection`. Parameters ---------- rayfan_file: {str, file} An open file-like object or a string which is assumed to be a filename of a rayfan binary file. raydb_file: str, optional The filename of the new database. Default is to create a new database in memory. synthetic: bool, optional Determines whether or not to record traced traveltimes as picked traveltimes. noise: {float, None} Maximum amplitude of random noise to add the travel times. If ``None``, no noise is added. pickdb: :class:`rockfish.database.PickDatabaseConnection`, optional An active connection to the pick database that was used trace rays in ``rayfan_file``. Values for extra fields (e.g., 'trace_in_file') are copied from this database to the new database along with rayfan data. Default is ignore these extra fields. raypaths: bool, optional If ``True``, raypath coordinates are stored as text in a new table 'raypaths'. """ raydb = PickDatabaseConnection(raydb_file) rays = readRayfanGroup(rayfan_file) print "Adding {:} traveltimes to {:} ..."\ .format(rays.nrays, raydb_file) # add fields for raypaths if raypaths: raydb._create_table_if_not_exists(RAYPATH_TABLE, RAYPATH_FIELDS) ndb0 = raydb.execute('SELECT COUNT(rowid) FROM picks').fetchone()[0] for rfn in rays.rayfans: for i, _t in enumerate(rfn.travel_times): if noise is not None: _noise = noise * 2 * (np.random.random() - 0.5) else: _noise = 0.0 sx, sy, sz = rfn.paths[i][0] rx, ry, rz = rfn.paths[i][-1] if pickdb is not None: event = pickdb.vmbranch2event[rfn.event_ids[i]] else: event = rfn.event_ids[i] if synthetic: time = rfn.travel_times[i] + _noise time_reduced = time predicted = None residual = 0. else: time = rfn.pick_times[i] time_reduced = time predicted = rfn.travel_times[i] residual = rfn.residuals[i] d = {'event': event, 'ensemble': rfn.start_point_id, 'trace': rfn.end_point_ids[i], 'vm_branch': rfn.event_ids[i], 'vm_subid': rfn.event_subids[i], 'time' : time, 'time_reduced' : time_reduced, 'predicted' : predicted, 'residual' : residual, 'error': rfn.pick_errors[i], 'source_x': sx, 'source_y': sy, 'source_z': sz, 'receiver_x': rx, 'receiver_y': ry, 'receiver_z': rz, 'offset': rfn.offsets[i], 'faz': rfn.azimuths[i], 'method': 'rayfan2db({:})'.format(rayfan_file), 'data_file': rays.file.name} # Copy data from pickdb if pickdb is not None: pick = pickdb.get_picks(event=[event], ensemble=[d['ensemble']], trace=[d['trace']]) if len(pick) > 0: for f in ['trace_in_file', 'line', 'site', 'data_file']: try: d[f] = pick[0][f] except KeyError: pass # add data to standard tables raydb.update_pick(**d) # add raypath data to new table if raypaths: d = {'event': event, 'ensemble': rfn.start_point_id, 'trace': rfn.end_point_ids[i], 'ray_btm_x': rays.bottom_points[i][0], 'ray_btm_y': rays.bottom_points[i][1], 'ray_btm_z': rays.bottom_points[i][2], 'ray_x': str([p[0] for p in rfn.paths[i]]), 'ray_y': str([p[1] for p in rfn.paths[i]]), 'ray_z': str([p[2] for p in rfn.paths[i]])} raydb._insert(RAYPATH_TABLE, **d) raydb.commit() ndb = raydb.execute('SELECT COUNT(rowid) FROM picks').fetchone()[0] if (ndb - ndb0) != rays.nrays: msg = 'Only added {:} of {:} travel times to the database.'\ .format(ndb - ndb0, rays.nrays) warnings.warn(msg) return raydb