def test_get_db_connection_twice(self, mock_psycopg2_connect, mock_FHIRBASE): """Calling get_db_connection twice returns the same connection.""" assert mock_psycopg2_connect.call_count == 0 assert mock_FHIRBASE.call_count == 0 fb = get_db_connection() assert mock_psycopg2_connect.call_count == 1 assert mock_FHIRBASE.call_count == 1 assert fb == mock_FHIRBASE.return_value fb = get_db_connection() assert mock_psycopg2_connect.call_count == 1 assert mock_FHIRBASE.call_count == 1 assert fb == mock_FHIRBASE.return_value
def delete(event, context): log.info('DELETE to delete') db_connection = db.get_db_connection() key = event['pathParameters']['id'] db.delete(db_connection, key) return {'statusCode': 200}
def build_matches_indices(): query = """ CREATE INDEX jid_index ON matches (jid); """ db_cxn = get_db_connection(True) db_cxn.cursor().execute(query)
def delete(id): # validate existance get_post(id) db = get_db_connection() db.execute('DELETE FROM post WHERE id = ?', (id, )) db.commit() return redirect(url_for('blog.index'))
def save_song(track_name: str, artist_name: str, year: int, rank: int): conn = get_db_connection("billboard") cursor = conn.cursor() cursor.execute( ''' SELECT * FROM chart WHERE song = ? AND artist = ? AND year = ? AND position = ? ''', (track_name, artist_name, year, rank)) found_track = cursor.fetchone() if found_track: logger.debug( f"Track already exists in database: {track_name} by {artist_name} ({year})" ) if found_track is None: try: cursor.execute( ''' INSERT INTO chart (song, artist, year, position) VALUES (?, ?, ?, ?) ''', (track_name, artist_name, year, rank)) conn.commit() except Exception as e: logger.error( f"Error inserting chart track: {track_name} by {artist_name} ({year})" ) logger.error(e)
def test_getTripsList(self): connection, cursor = db.get_db_connection(self.db_config) trip1_mock = { 'name': 'First trip', 'description': 'This is the first trip.', 'image': 'Image parsed as string', 'country_id': 38 #Canada } trip2_mock = { 'name': 'Second trip', 'description': 'This is the second trip.', 'image': 'Image parsed as string', 'country_id': 39 #Cape Verde } db.insert_trip(connection, cursor, trip1_mock) db.insert_trip(connection, cursor, trip2_mock) query = """SELECT * FROM trips""" cursor.execute(query) trips_list = cursor.fetchall() test_list = db.get_trips_list(cursor) trip1Id = test_list[0][0] trip2Id = test_list[1][0] self.assertEqual(trips_list, test_list) db.delete_trip(connection, cursor, trip1Id) db.delete_trip(connection, cursor, trip2Id)
def test_updateTrip(self): connection, cursor = db.get_db_connection(self.db_config) trip_original = { 'name': 'Original', 'description': 'Unchanged', 'image': 'first', 'country_id': 38 # Canada } db.insert_trip(connection, cursor, trip_original) query = """SELECT * FROM trips""" cursor.execute(query) result = cursor.fetchall() original_id = result[0][0] trip_modified = { 'name': 'Modified', 'description': 'Changed', 'image': 'second', 'country_id': 39 #Cape Verde } db.update_trip(connection, cursor, trip_modified, original_id) cursor.execute(query) result = cursor.fetchall() grabbed = (result[0][1], result[0][2], result[0][3], result[0][4]) self.assertEqual(grabbed, ('Modified', 'Changed', 'second', 39)) db.delete_trip(connection, cursor, original_id)
def build_job_indices(): query = """ CREATE INDEX cid_index ON jobs (company_id); """ db_cxn = get_db_connection(True) db_cxn.cursor().execute(query)
def index(): db = get_db_connection() posts = db.execute( 'SELECT p.id, title, body, created, author_id, username ' 'FROM post p JOIN user u on p.author_id = u.id ' 'ORDER BY created DESC').fetchall() return render_template('blog/index.html', posts=posts)
def generate_csv(): conn = get_db_connection() cursor = conn.cursor() cursor.execute(''' SELECT track.id, track.name track_name, group_concat(artist.name, ', ') artist, album.name album, album.release_date, album.release_date_precision, json_extract(aa.analysis, '$.track.loudness') loudness FROM TRACK JOIN track_artist_join ON track_artist_join.track_id = track.id JOIN artist ON track_artist_join.artist_id = artist.id JOIN track_album_join ON track_album_join.track_id = track.id JOIN album ON track_album_join.album_id = album.id JOIN audio_analysis aa ON aa.track_id = track.id WHERE TRUE AND track.name NOT LIKE '%deluxe%' AND album.name NOT LIKE '%deluxe%' GROUP BY track.id ''') tracks = cursor.fetchall() logger.info(f"Fetched {len(tracks)} tracks") with open('out.csv', 'w') as f: csv = writer(f) headers = [ "track_id", "track_name", "artist_name", "album_name", "release_date", "release_date_precision", "loudness" ] csv.writerow(headers) for track in tracks: csv.writerow(track)
def test_trip_citiesInsertDelete(self): connection, cursor = db.get_db_connection(self.db_config) trip_mock = { 'name': 'First trip', 'description': 'This is the first trip.', 'image': 'Image parsed as string', 'country_id': 38 #Canada } db.insert_trip(connection, cursor, trip_mock) query1 = """SELECT * FROM trips""" cursor.execute(query1) result = cursor.fetchall() tripId = result[0][0] arrive = '2020-01-01 10:00:00' departure = '2020-01-05 10:00:00' city_mock = { 'name': 'First city', 'datetime_of_arrival': arrive, 'datetime_of_departure': departure, 'trip_id': tripId } db.insert_trip_cities(connection, cursor, city_mock) query2 = """SELECT * FROM trip_cities""" cursor.execute(query2) result = cursor.fetchall() cityId = result[0][0] grabbed = (result[0][1], result[0][2].strftime("%Y-%m-%d %H:%M:%S"), result[0][3].strftime("%Y-%m-%d %H:%M:%S"), result[0][4]) self.assertEqual(grabbed, ('First city', arrive, departure, tripId)) db.delete_trip_city(connection, cursor, cityId) db.delete_trip(connection, cursor, tripId)
def find_billboard_chart_track(track_name: str, artist_name: str, year: int): conn = get_db_connection("billboard") cursor = conn.cursor() cursor.execute( "SELECT * FROM chart WHERE song = ? AND artist = ? AND year = ?", (track_name, artist_name, year)) return cursor.fetchone()
def insert_user(uid, name, tg): print("ROW: " + str(uid), name, tg, sep="|") conn = get_db_connection() cur = conn.cursor() cur.execute( "INSERT INTO users(ifuserid, ifusername, tgusername, restricted) VALUES(?, ?, ?, 0)", (uid, name, tg)) conn.commit() cur.close()
def process_audio_analysis_for_track(track_id): analysis = get_audio_analysis(track_id) conn = get_db_connection() cursor = conn.cursor() cursor.execute("INSERT INTO audio_analysis (track_id, analysis) VALUES (?, ?)", (track_id, json.dumps(analysis))) conn.commit() logger.info(f"Processed audio analysis for track {track_id}")
def drop_account_and_children(): db_cxn = get_db_connection(True) cursor = db_cxn.cursor() # Drop table doesn't trigger on delete cascade in candidate/employer tables, hence will fail # unless we drop all foreign key constraints in the children table as well. # In this case just drop all the children tables cuz YOLO cursor.execute("DROP TABLE IF EXISTS candidates") cursor.execute("DROP TABLE IF EXISTS employers") cursor.execute("DROP TABLE IF EXISTS accounts")
def create_table(): query = """ CREATE TABLE IF NOT EXISTS companies ( _id INT PRIMARY KEY AUTO_INCREMENT, name VARCHAR(225), description TEXT ) """ db_cxn = get_db_connection(True) db_cxn.cursor().execute(query)
def __init__(self, id=None, resource=None): """Initializes a Resource resource instance. The ID must be provided if the resource already exists. """ if not resource and not id: raise OperationOutcome('An id or a resource must be provided') self.db = get_db_connection() self.id = id self.resource = resource self.resource_type = type(self).__name__
def mark_track_as_found(track_id): conn = get_db_connection("billboard") cursor = conn.cursor() cursor.execute( ''' UPDATE chart SET found = ? WHERE id = ? ''', (1, track_id)) conn.commit() logger.debug(f"Marked track as found: {track_id}")
def retrieve(event, context): log.info('GET to retrieve') key = event['pathParameters']['id'] db_connection = db.get_db_connection() value = db.read(db_connection, key) if value is not None: return {'statusCode': 200, 'body': value} else: return {'statusCode': 404, 'body': {'error': 'Not found'}}
def create_table(): query = """ CREATE TABLE IF NOT EXISTS accounts ( _id INT PRIMARY KEY AUTO_INCREMENT, username VARCHAR(225) UNIQUE, password VARCHAR(255) ) """ db_cxn = get_db_connection(True) db_cxn.cursor().execute(query)
def test_get_db_connection_once(self, mock_psycopg2_connect, mock_FHIRBASE): """Calls psycopg2.connect and instantiate a Fhirbase object.""" assert mock_psycopg2_connect.call_count == 0 assert mock_FHIRBASE.call_count == 0 fb = get_db_connection() assert mock_psycopg2_connect.call_count == 1 assert mock_FHIRBASE.call_count == 1 assert fb == mock_FHIRBASE.return_value
def create_table(): query = """ CREATE TABLE IF NOT EXISTS jobTags ( _id INT PRIMARY KEY AUTO_INCREMENT, tag_name VARCHAR(1000) UNIQUE, /* TODO: create non-clustered index for searching */ tag_description TEXT ) """ db_cxn = get_db_connection(True) db_cxn.cursor().execute(query)
def synchronize_artists_from_tracks(): logger.info("Synchronizing artists from tracks table") conn = get_db_connection() cursor = conn.cursor() cursor.execute("SELECT id, name FROM track") for id, name in cursor.fetchall(): track = get_track(id) for artist in track["artists"]: add_artist_to_database(artist)
def find_tracks_without_analysis(): conn = get_db_connection() cursor = conn.cursor() cursor.execute(''' SELECT id FROM track LEFT JOIN audio_analysis aa ON aa.track_id = track.id WHERE aa.track_id IS NULL ''') return [track[0] for track in cursor.fetchall()]
def create_track_album_join(track, album): conn = get_db_connection() cursor = conn.cursor() try: cursor.execute("INSERT INTO track_album_join (track_id, album_id) VALUES (?, ?)", (track["id"], album["id"])) conn.commit() logger.info(f"Created track_album_join record for {track['name']} - {album['name']}") except sqlite3.IntegrityError: # this is expected if the backfill script is run multiple times logger.debug(f"Failed to create track_album_join records for {track['name']} - {album['name']}")
def create_table(): query = """ CREATE TABLE IF NOT EXISTS tagsDescribeJobs ( tid INT, jid VARCHAR(255), PRIMARY KEY (tid, jid), FOREIGN KEY (tid) REFERENCES jobTags (_id) ON DELETE CASCADE, FOREIGN KEY (jid) REFERENCES jobs (_id) ON DELETE CASCADE ) """ db_cxn = get_db_connection(True) db_cxn.cursor().execute(query)
def create_table(): query = """ CREATE TABLE IF NOT EXISTS employers ( _id INT PRIMARY KEY, name VARCHAR(225), works_at INT, FOREIGN KEY (works_at) REFERENCES companies (_id) ON DELETE CASCADE, FOREIGN KEY (_id) REFERENCES accounts (_id) ON DELETE CASCADE ) """ db_cxn = get_db_connection(True) db_cxn.cursor().execute(query)
def retrieve_all(event, context): log.info('GET to retrive_all') db_connection = db.get_db_connection() response = { 'statusCode': 200, 'body': { 'keys': db.read_all(db_connection) } } return response
def backfill_albums_from_tracks(): conn = get_db_connection() cursor = conn.cursor() cursor.execute("SELECT id FROM track") pool = ThreadPoolExecutor() for (id,) in cursor.fetchall(): future = pool.submit(get_track, id) track = future.result() add_album_to_database(track["album"]) create_track_album_join(track, track["album"])
def create_track_artist_joins(track): conn = get_db_connection() cursor = conn.cursor() for artist in track["artists"]: try: cursor.execute("INSERT INTO track_artist_join (track_id, artist_id) VALUES (?, ?)", (track["id"], artist["id"])) conn.commit() logger.info(f"Created track_artist_join records for {track['name']} - {artist['name']}") except sqlite3.IntegrityError: # this will happen if we re-run the application from the beginning logger.debug(f"Failed to create track_artist_join records for {track['name']} - {artist['name']}")
#!/usr/bin/python # -*- coding: utf-8 -*- # http://docs.disqus.com/developers/export/ import db conn = db.get_db_connection() cursor = db.get_cursor(conn) sql = """ SELECT ua.dst, n.changed, c.comment FROM node n INNER JOIN url_alias ua ON ua.src = CONCAT('node/', n.nid) INNER JOIN comments c ON n.nid = c.nid WHERE n.status = 1 ORDER BY n.changed DESC """ cursor.execute(sql) for row in cursor.fetchall(): print row conn.close()