def setUp(self): if os.path.exists('tmp.db'): os.unlink('tmp.db') db.connect() db.create_tables([User, Note, Category]) self.dataset = DataSet('sqlite:///tmp.db')
def setUp(self): if os.path.exists(db.database): os.unlink(db.database) db.connect() db.create_tables([User, Note, Category]) self.dataset = DataSet('sqlite:///%s' % db.database)
def data_dump(data_format, directory_name): db.close() ds = DataSet(db) for table_name in ds.tables: file_name = os.path.abspath( os.path.join(directory_name, f'{table_name}.{data_format}')) table = ds[table_name] ds.freeze(table.all(), format=data_format, filename=file_name)
def test_with_views(self): self.dataset.query('CREATE VIEW notes_public AS ' 'SELECT content, timestamp FROM note ' 'WHERE status = 1 ORDER BY timestamp DESC') try: self.assertTrue('notes_public' in self.dataset.views) self.assertFalse('notes_public' in self.dataset.tables) users = self.dataset['user'] with self.dataset.transaction(): users.insert(username='******') users.insert(username='******') notes = self.dataset['note'] for i, (ct, st) in enumerate([('n1', 1), ('n2', 2), ('n3', 1)]): notes.insert(content=ct, status=st, user_id='charlie', timestamp=datetime.datetime(2022, 1, 1 + i)) self.assertFalse('notes_public' in self.dataset) # Create a new dataset instance with views enabled. dataset = DataSet(self.dataset._database, include_views=True) self.assertTrue('notes_public' in dataset) public = dataset['notes_public'] self.assertEqual(public.columns, ['content', 'timestamp']) self.assertEqual(list(public), [ {'content': 'n3', 'timestamp': datetime.datetime(2022, 1, 3)}, {'content': 'n1', 'timestamp': datetime.datetime(2022, 1, 1)}]) finally: self.dataset.query('DROP VIEW notes_public')
def test_column_preservation(self): ds = DataSet('sqlite:///:memory:') books = ds['books'] books.insert(book_id='BOOK1') books.insert(bookId='BOOK2') data = [(row['book_id'] or '', row['bookId'] or '') for row in books] self.assertEqual(sorted(data), [('', 'BOOK2'), ('BOOK1', '')])
def test_item_apis(self): dataset = DataSet('sqlite:///:memory:') users = dataset['users'] users.insert(username='******') self.assertEqual(list(users), [{'id': 1, 'username': '******'}]) users[2] = {'username': '******', 'color': 'white'} self.assertEqual(list(users), [ {'id': 1, 'username': '******', 'color': None}, {'id': 2, 'username': '******', 'color': 'white'}]) users[2] = {'username': '******', 'kind': 'cat'} self.assertEqual(list(users), [ {'id': 1, 'username': '******', 'color': None, 'kind': None}, {'id': 2, 'username': '******', 'color': 'white', 'kind': 'cat'}]) del users[2] self.assertEqual(list(users), [ {'id': 1, 'username': '******', 'color': None, 'kind': None}]) users[1] = {'kind': 'person'} users[2] = {'username': '******'} users[2] = {'kind': 'cat'} self.assertEqual(list(users), [ {'id': 1, 'username': '******', 'color': None, 'kind': 'person'}, {'id': 2, 'username': '******', 'color': None, 'kind': 'cat'}])
def test_pass_database(self): db = SqliteDatabase(':memory:') dataset = DataSet(db) self.assertEqual(dataset._database_path, ':memory:') users = dataset['users'] users.insert(username='******') self.assertEqual(list(users), [{'id': 1, 'username': '******'}])
def test_special_char_table(self): self.database.execute_sql('CREATE TABLE "hello!!world" ("data" TEXT);') self.database.execute_sql('INSERT INTO "hello!!world" VALUES (?)', ('test', )) ds = DataSet('sqlite:///%s' % self.database.database) table = ds['hello!!world'] model = table.model_class self.assertEqual(model._meta.table_name, 'hello!!world')
def data_load(data_format, directory_name): db.close() ds = DataSet(db) for table_name in ['users', 'revokedtoken', 'brewery', 'beer', 'rating']: file_name = os.path.abspath( os.path.join(directory_name, f'{table_name}.{data_format}')) if os.path.isfile(file_name): table = ds[table_name] table.thaw(format=data_format, filename=file_name, strict=True)
def main(): # This function exists to act as a console script entry-point. parser = get_option_parser() options, args = parser.parse_args() if not args: die('Error: A path to database file is required!.') db_file = args[0] global dataset global migrator dataset = DataSet('sqlite:///%s' % db_file) migrator = dataset._migrator # Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host=options.host, port=port, debug=options.debug)
def main(): # This function exists to act as a console script entry-point. parser = get_option_parser() options, args = parser.parse_args() if not args: die('Error: missing required path to database file.') db_file = args[0] global dataset global migrator dataset = DataSet('sqlite:///%s' % db_file) migrator = dataset._migrator if options.browser: open_browser_tab(options.host, options.port) app.run(host=options.host, port=options.port, debug=options.debug)
def importBkp(): zf = zip.ZipFile('App/database/import/bkp.zip', 'r') zf.extractall('App/database/import') zf.close() dbe = DataSet("postgresql://*****:*****@localhost:5432/my_app") tablePessoa = dbe['pessoa'] tableLocal = dbe['local'] tableTarefa = dbe['tarefa'] tablePessoa.thaw( format='json', filename='App/database/import/App/database/exported/pessoa.json') tableLocal.thaw( format='json', filename='App/database/import/App/database/exported/local.json') tableTarefa.thaw( format='json', filename='App/database/import/App/database/exported/tarefa.json')
def db(): social_network_db = DataSet('sqlite:///social_network.db') social_network_db['users'].delete() social_network_db['statuses'].delete() social_network_db['users'].insert(USER_ID='') social_network_db['users'].delete(USER_ID='') social_network_db['users'].insert(EMAIL='') social_network_db['users'].delete(EMAIL='') social_network_db['users'].insert(NAME='') social_network_db['users'].delete(NAME='') social_network_db['users'].insert(LASTNAME='') social_network_db['users'].delete(LASTNAME='') social_network_db['statuses'].insert(STATUS_ID='') social_network_db['statuses'].delete(STATUS_ID='') social_network_db['statuses'].insert(USER_ID='') social_network_db['statuses'].delete(USER_ID='') return social_network_db
def exportBkp(): dbe = DataSet("postgresql://*****:*****@localhost:5432/my_app") tablePessoa = dbe['pessoa'] tableLocal = dbe['local'] tableTarefa = dbe['tarefa'] dbe.freeze(tablePessoa.all(), format='json', filename='App/database/exported/pessoa.json') dbe.freeze(tableLocal.all(), format='json', filename='App/database/exported/local.json') dbe.freeze(tableTarefa.all(), format='json', filename='App/database/exported/tarefa.json') zf = zip.ZipFile('App/database/bkp.zip', 'w') for dirname, subdirs, files in os.walk('App/database/exported'): zf.write(dirname) for filename in files: zf.write(os.path.join(dirname, filename)) zf.close()
def load_status_updates(filename): ''' Requirements: - If a status_id already exists, it will ignore it and continue to the next. - Returns False if there are any errors (such as empty fields in the source CSV file) - Otherwise, it returns True. ''' db = DataSet('sqlite:///social_network.db') db['statuses'].delete() with open(filename, newline='') as csvfile: reader = csv.DictReader(csvfile, dialect='excel') def csv_reader_row(reader_list): return reader_list['STATUS_ID'], reader_list['USER_ID'], reader_list['STATUS_TEXT'] reader_rows = map(csv_reader_row, reader) no_errors = reduce(lambda x, y: x and y, map(partial(user_status.add_status, db=db), reader_rows)) return no_errors
def load_users(filename): ''' Requirements: - If a user_id already exists, it will ignore it and continue to the next. - Returns False if there are any errors (such as empty fields in the source CSV file) - Otherwise, it returns True. ''' db = DataSet('sqlite:///social_network.db') db['users'].delete() # Start with a new db every time we load a file for the sake of the demo. db['statuses'].delete() # db['users'].thaw(filename=filename, format='csv') # how to load and still check data validity by using thaw()?? # TO DO: Use multiprocessing with this. Break into chunks, could probably load several times faster. with open(filename, newline='') as csvfile: reader = csv.DictReader(csvfile, dialect='excel') # an iterable def csv_reader_row(reader_list): return reader_list['USER_ID'], reader_list['EMAIL'], reader_list['NAME'], reader_list['LASTNAME'] reader_rows = map(csv_reader_row, reader) db_loaded_successfully = reduce(lambda x, y: x and y, map(partial(users.add_user, db=db), reader_rows)) return db_loaded_successfully
class TestDataSet(unittest.TestCase): names = ['charlie', 'huey', 'peewee', 'mickey', 'zaizee'] def setUp(self): if os.path.exists('tmp.db'): os.unlink('tmp.db') db.connect() db.create_tables([User, Note, Category]) self.dataset = DataSet('sqlite:///tmp.db') def tearDown(self): self.dataset.close() db.close() def create_users(self, n=2): user = self.dataset['user'] for i in range(min(n, len(self.names))): user.insert(username=self.names[i]) def test_introspect(self): tables = sorted(self.dataset.tables) self.assertEqual(tables, ['category', 'note', 'user']) user = self.dataset['user'] columns = sorted(user.columns) self.assertEqual(columns, ['username']) note = self.dataset['note'] columns = sorted(note.columns) self.assertEqual(columns, ['content', 'id', 'timestamp', 'user']) category = self.dataset['category'] columns = sorted(category.columns) self.assertEqual(columns, ['id', 'name', 'parent']) def assertQuery(self, query, expected, sort_key='id'): key = operator.itemgetter(sort_key) self.assertEqual(sorted(list(query), key=key), sorted(expected, key=key)) def test_insert(self): self.create_users() user = self.dataset['user'] expected = [{'username': '******'}, {'username': '******'}] self.assertQuery(user.all(), expected, 'username') user.insert(username='******', age=5) expected = [{ 'username': '******', 'age': None }, { 'username': '******', 'age': None }, { 'username': '******', 'age': 5 }] self.assertQuery(user.all(), expected, 'username') query = user.find(username='******') expected = [{'username': '******', 'age': None}] self.assertQuery(query, expected, 'username') self.assertEqual(user.find_one(username='******'), { 'username': '******', 'age': 5 }) self.assertTrue(user.find_one(username='******') is None) def test_update(self): self.create_users() user = self.dataset['user'] self.assertEqual(user.update(favorite_color='green'), 2) expected = [{ 'username': '******', 'favorite_color': 'green' }, { 'username': '******', 'favorite_color': 'green' }] self.assertQuery(user.all(), expected, 'username') res = user.update(favorite_color='blue', username='******', columns=['username']) self.assertEqual(res, 1) expected[1]['favorite_color'] = 'blue' self.assertQuery(user.all(), expected, 'username') def test_delete(self): self.create_users() user = self.dataset['user'] self.assertEqual(user.delete(username='******'), 1) self.assertEqual(list(user.all()), [{'username': '******'}]) def test_find(self): self.create_users(5) user = self.dataset['user'] def assertUsernames(query, expected): self.assertEqual(sorted(row['username'] for row in query), sorted(expected)) assertUsernames(user.all(), self.names) assertUsernames(user.find(), self.names) assertUsernames(user.find(username='******'), ['charlie']) assertUsernames(user.find(username='******'), []) user.update(favorite_color='green') for username in ['zaizee', 'huey']: user.update(favorite_color='blue', username=username, columns=['username']) assertUsernames(user.find(favorite_color='green'), ['charlie', 'mickey', 'peewee']) assertUsernames(user.find(favorite_color='blue'), ['zaizee', 'huey']) assertUsernames(user.find(favorite_color='green', username='******'), ['peewee']) self.assertEqual(user.find_one(username='******'), { 'username': '******', 'favorite_color': 'green' }) def test_magic_methods(self): self.create_users(5) user = self.dataset['user'] # __len__() self.assertEqual(len(user), 5) # __iter__() users = sorted([u for u in user], key=operator.itemgetter('username')) self.assertEqual(users[0], {'username': '******'}) self.assertEqual(users[-1], {'username': '******'}) # __contains__() self.assertTrue('user' in self.dataset) self.assertFalse('missing' in self.dataset) def test_foreign_keys(self): user = self.dataset['user'] user.insert(username='******') note = self.dataset['note'] for i in range(1, 4): note.insert(content='note %s' % i, timestamp=datetime.date(2014, 1, i), user='******') notes = sorted(note.all(), key=operator.itemgetter('id')) self.assertEqual( notes[0], { 'content': 'note 1', 'id': 1, 'timestamp': datetime.datetime(2014, 1, 1), 'user': '******' }) self.assertEqual( notes[-1], { 'content': 'note 3', 'id': 3, 'timestamp': datetime.datetime(2014, 1, 3), 'user': '******' }) user.insert(username='******') note.update(user='******', id=3, columns=['id']) self.assertEqual(note.find(user='******').count(), 2) self.assertEqual(note.find(user='******').count(), 1) category = self.dataset['category'] category.insert(name='c1') c1 = category.find_one(name='c1') self.assertEqual(c1, {'id': 1, 'name': 'c1', 'parent': None}) category.insert(name='c2', parent=1) c2 = category.find_one(parent=1) self.assertEqual(c2, {'id': 2, 'name': 'c2', 'parent': 1}) self.assertEqual(category.delete(parent=1), 1) self.assertEqual(category.all(), [c1]) def test_transactions(self): user = self.dataset['user'] with self.dataset.transaction() as txn: user.insert(username='******') with self.dataset.transaction() as txn2: user.insert(username='******') txn2.rollback() with self.dataset.transaction() as txn3: user.insert(username='******') with self.dataset.transaction() as txn4: user.insert(username='******') txn3.rollback() with self.dataset.transaction() as txn5: user.insert(username='******') with self.dataset.transaction() as txn6: with self.dataset.transaction() as txn7: user.insert(username='******') txn7.rollback() user.insert(username='******') user.insert(username='******') self.assertQuery(user.all(), [ { 'username': '******' }, { 'username': '******' }, { 'username': '******' }, { 'username': '******' }, ], 'username') def test_export(self): self.create_users() user = self.dataset['user'] buf = StringIO() self.dataset.freeze(user.all(), 'json', file_obj=buf) self.assertEqual(buf.getvalue(), ('[{"username": "******"}, {"username": "******"}]')) buf = StringIO() self.dataset.freeze(user.all(), 'csv', file_obj=buf) self.assertEqual(buf.getvalue().splitlines(), ['username', 'charlie', 'huey']) def test_table_column_creation(self): table = self.dataset['people'] table.insert(name='charlie') self.assertEqual(table.columns, ['id', 'name']) self.assertEqual(list(table.all()), [{'id': 1, 'name': 'charlie'}]) def test_import_json(self): table = self.dataset['people'] table.insert(name='charlie') data = [{ 'name': 'zaizee', 'foo': 1 }, { 'name': 'huey' }, { 'name': 'mickey', 'foo': 2 }, { 'bar': None }] buf = StringIO() json.dump(data, buf) buf.seek(0) # All rows but the last will be inserted. count = self.dataset.thaw('people', 'json', file_obj=buf, strict=True) self.assertEqual(count, 3) names = [row['name'] for row in self.dataset['people'].all()] self.assertEqual(set(names), set(['charlie', 'huey', 'mickey', 'zaizee'])) # The columns have not changed. self.assertEqual(table.columns, ['id', 'name']) # No rows are inserted because no column overlap between `user` and the # provided data. buf.seek(0) count = self.dataset.thaw('user', 'json', file_obj=buf, strict=True) self.assertEqual(count, 0) # Create a new table and load all data into it. table = self.dataset['more_people'] # All rows and columns will be inserted. buf.seek(0) count = self.dataset.thaw('more_people', 'json', file_obj=buf) self.assertEqual(count, 4) self.assertEqual(set(table.columns), set(['id', 'name', 'bar', 'foo'])) self.assertEqual(sorted(table.all(), key=lambda row: row['id']), [ { 'id': 1, 'name': 'zaizee', 'foo': 1, 'bar': None }, { 'id': 2, 'name': 'huey', 'foo': None, 'bar': None }, { 'id': 3, 'name': 'mickey', 'foo': 2, 'bar': None }, { 'id': 4, 'name': None, 'foo': None, 'bar': None }, ]) def test_import_csv(self): table = self.dataset['people'] table.insert(name='charlie') data = [('zaizee', 1, None), ('huey', 2, 'foo'), ('mickey', 3, 'baze')] buf = StringIO() writer = csv.writer(buf) writer.writerow(['name', 'foo', 'bar']) writer.writerows(data) buf.seek(0) count = self.dataset.thaw('people', 'csv', file_obj=buf, strict=True) self.assertEqual(count, 3) names = [row['name'] for row in self.dataset['people'].all()] self.assertEqual(set(names), set(['charlie', 'huey', 'mickey', 'zaizee'])) # The columns have not changed. self.assertEqual(table.columns, ['id', 'name']) # No rows are inserted because no column overlap between `user` and the # provided data. buf.seek(0) count = self.dataset.thaw('user', 'csv', file_obj=buf, strict=True) self.assertEqual(count, 0) # Create a new table and load all data into it. table = self.dataset['more_people'] # All rows and columns will be inserted. buf.seek(0) count = self.dataset.thaw('more_people', 'csv', file_obj=buf) self.assertEqual(count, 3) self.assertEqual(set(table.columns), set(['id', 'name', 'bar', 'foo'])) self.assertEqual(sorted(table.all(), key=lambda row: row['id']), [ { 'id': 1, 'name': 'zaizee', 'foo': '1', 'bar': '' }, { 'id': 2, 'name': 'huey', 'foo': '2', 'bar': 'foo' }, { 'id': 3, 'name': 'mickey', 'foo': '3', 'bar': 'baze' }, ]) def test_table_thaw(self): table = self.dataset['people'] data = json.dumps([{ 'name': 'charlie' }, { 'name': 'huey', 'color': 'white' }]) self.assertEqual(table.thaw(file_obj=StringIO(data), format='json'), 2) self.assertEqual(list(table.all()), [ { 'id': 1, 'name': 'charlie', 'color': None }, { 'id': 2, 'name': 'huey', 'color': 'white' }, ]) def test_creating_tables(self): new_table = self.dataset['new_table'] new_table.insert(data='foo') ref2 = self.dataset['new_table'] self.assertEqual(list(ref2.all()), [{'id': 1, 'data': 'foo'}])
#!/usr/bin/env python3 import peewee as pw from playhouse.dataset import DataSet from playhouse.db_url import connect from random import randint, choice # Variables database='boutique' host='localhost' user='******' dbURL = 'mysql://{user}@{host}/{database}'.format(database=database, host=host, user=user) db = connect(dbURL) dbDataset = DataSet(dbURL) # Models (Table Schema) class BaseModel(pw.Model): class Meta: database = db class rooms(pw.Model): roomType = pw.TextField() price = pw.FloatField() discount = pw.FloatField() roomPhoto = pw.TextField() class Meta: database = db class customers(pw.Model):
class TestDataSet(unittest.TestCase): names = ['charlie', 'huey', 'peewee', 'mickey', 'zaizee'] def setUp(self): if os.path.exists('tmp.db'): os.unlink('tmp.db') db.connect() db.create_tables([User, Note, Category]) self.dataset = DataSet('sqlite:///tmp.db') def tearDown(self): self.dataset.close() db.close() def create_users(self, n=2): user = self.dataset['user'] for i in range(min(n, len(self.names))): user.insert(username=self.names[i]) def test_introspect(self): tables = sorted(self.dataset.tables) self.assertEqual(tables, ['category', 'note', 'user']) user = self.dataset['user'] columns = sorted(user.columns) self.assertEqual(columns, ['username']) note = self.dataset['note'] columns = sorted(note.columns) self.assertEqual(columns, ['content', 'id', 'timestamp', 'user']) category = self.dataset['category'] columns = sorted(category.columns) self.assertEqual(columns, ['id', 'name', 'parent']) def assertQuery(self, query, expected, sort_key='id'): key = operator.itemgetter(sort_key) self.assertEqual( sorted(list(query), key=key), sorted(expected, key=key)) def test_insert(self): self.create_users() user = self.dataset['user'] expected = [ {'username': '******'}, {'username': '******'}] self.assertQuery(user.all(), expected, 'username') user.insert(username='******', age=5) expected = [ {'username': '******', 'age': None}, {'username': '******', 'age': None}, {'username': '******', 'age': 5}] self.assertQuery(user.all(), expected, 'username') query = user.find(username='******') expected = [{'username': '******', 'age': None}] self.assertQuery(query, expected, 'username') self.assertEqual( user.find_one(username='******'), {'username': '******', 'age': 5}) self.assertTrue(user.find_one(username='******') is None) def test_update(self): self.create_users() user = self.dataset['user'] self.assertEqual(user.update(favorite_color='green'), 2) expected = [ {'username': '******', 'favorite_color': 'green'}, {'username': '******', 'favorite_color': 'green'}] self.assertQuery(user.all(), expected, 'username') res = user.update( favorite_color='blue', username='******', columns=['username']) self.assertEqual(res, 1) expected[1]['favorite_color'] = 'blue' self.assertQuery(user.all(), expected, 'username') def test_delete(self): self.create_users() user = self.dataset['user'] self.assertEqual(user.delete(username='******'), 1) self.assertEqual(list(user.all()), [{'username': '******'}]) def test_find(self): self.create_users(5) user = self.dataset['user'] def assertUsernames(query, expected): self.assertEqual( sorted(row['username'] for row in query), sorted(expected)) assertUsernames(user.all(), self.names) assertUsernames(user.find(), self.names) assertUsernames(user.find(username='******'), ['charlie']) assertUsernames(user.find(username='******'), []) user.update(favorite_color='green') for username in ['zaizee', 'huey']: user.update( favorite_color='blue', username=username, columns=['username']) assertUsernames( user.find(favorite_color='green'), ['charlie', 'mickey', 'peewee']) assertUsernames( user.find(favorite_color='blue'), ['zaizee', 'huey']) assertUsernames( user.find(favorite_color='green', username='******'), ['peewee']) self.assertEqual( user.find_one(username='******'), {'username': '******', 'favorite_color': 'green'}) def test_magic_methods(self): self.create_users(5) user = self.dataset['user'] # __len__() self.assertEqual(len(user), 5) # __iter__() users = sorted([u for u in user], key=operator.itemgetter('username')) self.assertEqual(users[0], {'username': '******'}) self.assertEqual(users[-1], {'username': '******'}) # __contains__() self.assertTrue('user' in self.dataset) self.assertFalse('missing' in self.dataset) def test_foreign_keys(self): user = self.dataset['user'] user.insert(username='******') note = self.dataset['note'] for i in range(1, 4): note.insert( content='note %s' % i, timestamp=datetime.date(2014, 1, i), user='******') notes = sorted(note.all(), key=operator.itemgetter('id')) self.assertEqual(notes[0], { 'content': 'note 1', 'id': 1, 'timestamp': datetime.datetime(2014, 1, 1), 'user': '******'}) self.assertEqual(notes[-1], { 'content': 'note 3', 'id': 3, 'timestamp': datetime.datetime(2014, 1, 3), 'user': '******'}) user.insert(username='******') note.update(user='******', id=3, columns=['id']) self.assertEqual(note.find(user='******').count(), 2) self.assertEqual(note.find(user='******').count(), 1) category = self.dataset['category'] category.insert(name='c1') c1 = category.find_one(name='c1') self.assertEqual(c1, {'id': 1, 'name': 'c1', 'parent': None}) category.insert(name='c2', parent=1) c2 = category.find_one(parent=1) self.assertEqual(c2, {'id': 2, 'name': 'c2', 'parent': 1}) self.assertEqual(category.delete(parent=1), 1) self.assertEqual(category.all(), [c1]) def test_transactions(self): user = self.dataset['user'] with self.dataset.transaction() as txn: user.insert(username='******') with self.dataset.transaction() as txn2: user.insert(username='******') txn2.rollback() with self.dataset.transaction() as txn3: user.insert(username='******') with self.dataset.transaction() as txn4: user.insert(username='******') txn3.rollback() with self.dataset.transaction() as txn5: user.insert(username='******') with self.dataset.transaction() as txn6: with self.dataset.transaction() as txn7: user.insert(username='******') txn7.rollback() user.insert(username='******') user.insert(username='******') self.assertQuery(user.all(), [ {'username': '******'}, {'username': '******'}, {'username': '******'}, {'username': '******'}, ], 'username') def test_export(self): self.create_users() user = self.dataset['user'] buf = StringIO() self.dataset.freeze(user.all(), 'json', file_obj=buf) self.assertEqual(buf.getvalue(), ( '[{"username": "******"}, {"username": "******"}]')) buf = StringIO() self.dataset.freeze(user.all(), 'csv', file_obj=buf) self.assertEqual(buf.getvalue().splitlines(), [ 'username', 'charlie', 'huey']) def test_table_column_creation(self): table = self.dataset['people'] table.insert(name='charlie') self.assertEqual(table.columns, ['id', 'name']) self.assertEqual(list(table.all()), [{'id': 1, 'name': 'charlie'}]) def test_import_json(self): table = self.dataset['people'] table.insert(name='charlie') data = [ {'name': 'zaizee', 'foo': 1}, {'name': 'huey'}, {'name': 'mickey', 'foo': 2}, {'bar': None}] buf = StringIO() json.dump(data, buf) buf.seek(0) # All rows but the last will be inserted. count = self.dataset.thaw('people', 'json', file_obj=buf, strict=True) self.assertEqual(count, 3) names = [row['name'] for row in self.dataset['people'].all()] self.assertEqual( set(names), set(['charlie', 'huey', 'mickey', 'zaizee'])) # The columns have not changed. self.assertEqual(table.columns, ['id', 'name']) # No rows are inserted because no column overlap between `user` and the # provided data. buf.seek(0) count = self.dataset.thaw('user', 'json', file_obj=buf, strict=True) self.assertEqual(count, 0) # Create a new table and load all data into it. table = self.dataset['more_people'] # All rows and columns will be inserted. buf.seek(0) count = self.dataset.thaw('more_people', 'json', file_obj=buf) self.assertEqual(count, 4) self.assertEqual( set(table.columns), set(['id', 'name', 'bar', 'foo'])) self.assertEqual(sorted(table.all(), key=lambda row: row['id']), [ {'id': 1, 'name': 'zaizee', 'foo': 1, 'bar': None}, {'id': 2, 'name': 'huey', 'foo': None, 'bar': None}, {'id': 3, 'name': 'mickey', 'foo': 2, 'bar': None}, {'id': 4, 'name': None, 'foo': None, 'bar': None}, ]) def test_import_csv(self): table = self.dataset['people'] table.insert(name='charlie') data = [ ('zaizee', 1, None), ('huey', 2, 'foo'), ('mickey', 3, 'baze')] buf = StringIO() writer = csv.writer(buf) writer.writerow(['name', 'foo', 'bar']) writer.writerows(data) buf.seek(0) count = self.dataset.thaw('people', 'csv', file_obj=buf, strict=True) self.assertEqual(count, 3) names = [row['name'] for row in self.dataset['people'].all()] self.assertEqual( set(names), set(['charlie', 'huey', 'mickey', 'zaizee'])) # The columns have not changed. self.assertEqual(table.columns, ['id', 'name']) # No rows are inserted because no column overlap between `user` and the # provided data. buf.seek(0) count = self.dataset.thaw('user', 'csv', file_obj=buf, strict=True) self.assertEqual(count, 0) # Create a new table and load all data into it. table = self.dataset['more_people'] # All rows and columns will be inserted. buf.seek(0) count = self.dataset.thaw('more_people', 'csv', file_obj=buf) self.assertEqual(count, 3) self.assertEqual( set(table.columns), set(['id', 'name', 'bar', 'foo'])) self.assertEqual(sorted(table.all(), key=lambda row: row['id']), [ {'id': 1, 'name': 'zaizee', 'foo': '1', 'bar': ''}, {'id': 2, 'name': 'huey', 'foo': '2', 'bar': 'foo'}, {'id': 3, 'name': 'mickey', 'foo': '3', 'bar': 'baze'}, ]) def test_table_thaw(self): table = self.dataset['people'] data = json.dumps([{'name': 'charlie'}, {'name': 'huey', 'color': 'white'}]) self.assertEqual(table.thaw(file_obj=StringIO(data), format='json'), 2) self.assertEqual(list(table.all()), [ {'id': 1, 'name': 'charlie', 'color': None}, {'id': 2, 'name': 'huey', 'color': 'white'}, ]) def test_creating_tables(self): new_table = self.dataset['new_table'] new_table.insert(data='foo') ref2 = self.dataset['new_table'] self.assertEqual(list(ref2.all()), [{'id': 1, 'data': 'foo'}])
def etl_file_to_table(self): """Example sub-command.""" db = DataSet(self.app.config.get("APP", "database")) table = db[self.app.pargs.table] table.thaw(filename=self.app.pargs.file, format=self.app.pargs.format) self.info("Done !, file has been loaded to", self.app.pargs.table)
def get_dataset_ext(): # http://docs.peewee-orm.com/en/latest/peewee/playhouse.html#dataset path = get_path_dataset_ext() db = DataSet(path) return db
from peewee import * from playhouse.dataset import DataSet db = SqliteDatabase('validator.db') ds = DataSet('sqlite:///validator.db') class EmailInfo(Model): email = CharField(index=True, unique=True) syntax = BooleanField() mx = BooleanField(null=True) deliverable = BooleanField(null=True) color = CharField() normalized = CharField(null=True) class Meta: database = db def pull(email): record = EmailInfo.get_or_none(EmailInfo.email == email) if not record: return None result = { 'email': record.email, 'syntax': record.syntax, 'mx': record.mx, 'deliverable': record.deliverable, 'color': record.color, 'normalized': record.normalized
class TestDataSet(unittest.TestCase): names = ['charlie', 'huey', 'peewee', 'mickey', 'zaizee'] def setUp(self): if os.path.exists('tmp.db'): os.unlink('tmp.db') db.connect() db.create_tables([User, Note, Category]) self.dataset = DataSet('sqlite:///tmp.db') def tearDown(self): self.dataset.close() db.close() def create_users(self, n=2): user = self.dataset['user'] for i in range(min(n, len(self.names))): user.insert(username=self.names[i]) def test_introspect(self): tables = sorted(self.dataset.tables) self.assertEqual(tables, ['category', 'note', 'user']) user = self.dataset['user'] columns = sorted(user.columns) self.assertEqual(columns, ['username']) note = self.dataset['note'] columns = sorted(note.columns) self.assertEqual(columns, ['content', 'id', 'timestamp', 'user']) category = self.dataset['category'] columns = sorted(category.columns) self.assertEqual(columns, ['id', 'name', 'parent']) def assertQuery(self, query, expected, sort_key='id'): key = operator.itemgetter(sort_key) self.assertEqual(sorted(list(query), key=key), sorted(expected, key=key)) def test_insert(self): self.create_users() user = self.dataset['user'] expected = [{'username': '******'}, {'username': '******'}] self.assertQuery(user.all(), expected, 'username') user.insert(username='******', age=5) expected = [{ 'username': '******', 'age': None }, { 'username': '******', 'age': None }, { 'username': '******', 'age': 5 }] self.assertQuery(user.all(), expected, 'username') query = user.find(username='******') expected = [{'username': '******', 'age': None}] self.assertQuery(query, expected, 'username') self.assertEqual(user.find_one(username='******'), { 'username': '******', 'age': 5 }) self.assertTrue(user.find_one(username='******') is None) def test_update(self): self.create_users() user = self.dataset['user'] self.assertEqual(user.update(favorite_color='green'), 2) expected = [{ 'username': '******', 'favorite_color': 'green' }, { 'username': '******', 'favorite_color': 'green' }] self.assertQuery(user.all(), expected, 'username') res = user.update(favorite_color='blue', username='******', columns=['username']) self.assertEqual(res, 1) expected[1]['favorite_color'] = 'blue' self.assertQuery(user.all(), expected, 'username') def test_delete(self): self.create_users() user = self.dataset['user'] self.assertEqual(user.delete(username='******'), 1) self.assertEqual(list(user.all()), [{'username': '******'}]) def test_find(self): self.create_users(5) user = self.dataset['user'] def assertUsernames(query, expected): self.assertEqual(sorted(row['username'] for row in query), sorted(expected)) assertUsernames(user.all(), self.names) assertUsernames(user.find(), self.names) assertUsernames(user.find(username='******'), ['charlie']) assertUsernames(user.find(username='******'), []) user.update(favorite_color='green') for username in ['zaizee', 'huey']: user.update(favorite_color='blue', username=username, columns=['username']) assertUsernames(user.find(favorite_color='green'), ['charlie', 'mickey', 'peewee']) assertUsernames(user.find(favorite_color='blue'), ['zaizee', 'huey']) assertUsernames(user.find(favorite_color='green', username='******'), ['peewee']) self.assertEqual(user.find_one(username='******'), { 'username': '******', 'favorite_color': 'green' }) def test_magic_methods(self): self.create_users(5) user = self.dataset['user'] # __len__() self.assertEqual(len(user), 5) # __iter__() users = sorted([u for u in user], key=operator.itemgetter('username')) self.assertEqual(users[0], {'username': '******'}) self.assertEqual(users[-1], {'username': '******'}) # __contains__() self.assertTrue('user' in self.dataset) self.assertFalse('missing' in self.dataset) def test_foreign_keys(self): user = self.dataset['user'] user.insert(username='******') note = self.dataset['note'] for i in range(1, 4): note.insert(content='note %s' % i, timestamp=datetime.date(2014, 1, i), user='******') notes = sorted(note.all(), key=operator.itemgetter('id')) self.assertEqual( notes[0], { 'content': 'note 1', 'id': 1, 'timestamp': datetime.datetime(2014, 1, 1), 'user': '******' }) self.assertEqual( notes[-1], { 'content': 'note 3', 'id': 3, 'timestamp': datetime.datetime(2014, 1, 3), 'user': '******' }) user.insert(username='******') note.update(user='******', id=3, columns=['id']) self.assertEqual(note.find(user='******').count(), 2) self.assertEqual(note.find(user='******').count(), 1) category = self.dataset['category'] category.insert(name='c1') c1 = category.find_one(name='c1') self.assertEqual(c1, {'id': 1, 'name': 'c1', 'parent': None}) category.insert(name='c2', parent=1) c2 = category.find_one(parent=1) self.assertEqual(c2, {'id': 2, 'name': 'c2', 'parent': 1}) self.assertEqual(category.delete(parent=1), 1) self.assertEqual(category.all(), [c1]) def test_transactions(self): user = self.dataset['user'] with self.dataset.transaction() as txn: user.insert(username='******') with self.dataset.transaction() as txn2: user.insert(username='******') txn2.rollback() with self.dataset.transaction() as txn3: user.insert(username='******') with self.dataset.transaction() as txn4: user.insert(username='******') txn3.rollback() with self.dataset.transaction() as txn5: user.insert(username='******') with self.dataset.transaction() as txn6: with self.dataset.transaction() as txn7: user.insert(username='******') txn7.rollback() user.insert(username='******') user.insert(username='******') self.assertQuery(user.all(), [ { 'username': '******' }, { 'username': '******' }, { 'username': '******' }, { 'username': '******' }, ], 'username') def test_export(self): self.create_users() user = self.dataset['user'] buf = StringIO() self.dataset.freeze(user.all(), 'json', file_obj=buf) self.assertEqual(buf.getvalue(), ('[{"username": "******"}, {"username": "******"}]')) buf = StringIO() self.dataset.freeze(user.all(), 'csv', file_obj=buf) self.assertEqual(buf.getvalue().splitlines(), ['username', 'charlie', 'huey']) def test_creating_tables(self): new_table = self.dataset['new_table'] new_table.insert(data='foo') ref2 = self.dataset['new_table'] self.assertEqual(list(ref2.all()), [{'id': 1, 'data': 'foo'}])
''' Functions that create a connection to User and Status tables Import user_table and status_table directly ''' import os from playhouse.dataset import DataSet # Ensure we are starting with an empty database FILE = 'socialnetwork.db' if os.path.exists(FILE): os.remove(FILE) # Connect to database SQL_URL = 'sqlite:///' + FILE db = DataSet(SQL_URL) # Setup User user_table = db['User'] user_table.insert(user_id='blank') user_table.create_index(['user_id'], unique=True) user_table.delete(user_id='blank') # Setup Status status_table = db['Status'] status_table.insert(status_id='blank', user_id='blank') status_table.create_index(['status_id'], unique=True) status_table.delete(status_id='blank')
logger = logging.getLogger('main.sql') logger.setLevel(logging.DEBUG) # Ensure we are starting with an empty database FILE = 'socialnetwork.db' USER = '******' STATUS = 'Status' PICTURE = 'Picture' if os.path.exists(FILE): os.remove(FILE) # Connect to database SQL_URL = 'sqlite:///' + FILE db = DataSet(SQL_URL) # Setup User user_table = db[USER] user_table.insert(user_id='blank') user_table.create_index(['user_id'], unique=True) user_table.delete(user_id='blank') # Setup Status status_table = db[STATUS] status_table.insert(status_id='blank', user_id='blank') status_table.create_index(['status_id'], unique=True) status_table.delete(status_id='blank') # Setup Picture picture_table = db[PICTURE] picture_table.insert(picture_id='blank', user_id='blank') picture_table.create_index(['picture_id'], unique=True) picture_table.delete(picture_id='blank')
def backup_database(cls, db_format, filepath): db = DataSet('sqlite:///:memory:') db.freeze(cls.model.select(), format=db_format, filename=filepath)
class TestDataSet(ModelTestCase): database = db requires = [User, Note, Category] names = ['charlie', 'huey', 'plover', 'mickey', 'zaizee'] def setUp(self): if os.path.exists(self.database.database): os.unlink(self.database.database) super(TestDataSet, self).setUp() self.dataset = DataSet('sqlite:///%s' % self.database.database) def tearDown(self): self.dataset.close() super(TestDataSet, self).tearDown() def test_pass_database(self): db = SqliteDatabase(':memory:') dataset = DataSet(db) self.assertEqual(dataset._database_path, ':memory:') users = dataset['users'] users.insert(username='******') self.assertEqual(list(users), [{'id': 1, 'username': '******'}]) def test_item_apis(self): dataset = DataSet('sqlite:///:memory:') users = dataset['users'] users.insert(username='******') self.assertEqual(list(users), [{'id': 1, 'username': '******'}]) users[2] = {'username': '******', 'color': 'white'} self.assertEqual(list(users), [{ 'id': 1, 'username': '******', 'color': None }, { 'id': 2, 'username': '******', 'color': 'white' }]) users[2] = {'username': '******', 'kind': 'cat'} self.assertEqual(list(users), [{ 'id': 1, 'username': '******', 'color': None, 'kind': None }, { 'id': 2, 'username': '******', 'color': 'white', 'kind': 'cat' }]) del users[2] self.assertEqual(list(users), [{ 'id': 1, 'username': '******', 'color': None, 'kind': None }]) users[1] = {'kind': 'person'} users[2] = {'username': '******'} users[2] = {'kind': 'cat'} self.assertEqual(list(users), [{ 'id': 1, 'username': '******', 'color': None, 'kind': 'person' }, { 'id': 2, 'username': '******', 'color': None, 'kind': 'cat' }]) def create_users(self, n=2): user = self.dataset['user'] for i in range(min(n, len(self.names))): user.insert(username=self.names[i]) def test_special_char_table(self): self.database.execute_sql('CREATE TABLE "hello!!world" ("data" TEXT);') self.database.execute_sql('INSERT INTO "hello!!world" VALUES (?)', ('test', )) ds = DataSet('sqlite:///%s' % self.database.database) table = ds['hello!!world'] model = table.model_class self.assertEqual(model._meta.table_name, 'hello!!world') def test_column_preservation(self): ds = DataSet('sqlite:///:memory:') books = ds['books'] books.insert(book_id='BOOK1') books.insert(bookId='BOOK2') data = [(row['book_id'] or '', row['bookId'] or '') for row in books] self.assertEqual(sorted(data), [('', 'BOOK2'), ('BOOK1', '')]) def test_case_insensitive(self): db.execute_sql('CREATE TABLE "SomeTable" (data TEXT);') tables = sorted(self.dataset.tables) self.assertEqual(tables, ['SomeTable', 'category', 'note', 'user']) table = self.dataset['HueyMickey'] self.assertEqual(table.model_class._meta.table_name, 'HueyMickey') tables = sorted(self.dataset.tables) self.assertEqual( tables, ['HueyMickey', 'SomeTable', 'category', 'note', 'user']) # Subsequent lookup succeeds. self.dataset['HueyMickey'] def test_introspect(self): tables = sorted(self.dataset.tables) self.assertEqual(tables, ['category', 'note', 'user']) user = self.dataset['user'] columns = sorted(user.columns) self.assertEqual(columns, ['username']) note = self.dataset['note'] columns = sorted(note.columns) self.assertEqual(columns, ['content', 'id', 'timestamp', 'user_id']) category = self.dataset['category'] columns = sorted(category.columns) self.assertEqual(columns, ['id', 'name', 'parent_id']) def test_update_cache(self): self.assertEqual(sorted(self.dataset.tables), ['category', 'note', 'user']) db.execute_sql('create table "foo" (id INTEGER, data TEXT)') Foo = self.dataset['foo'] self.assertEqual(sorted(Foo.columns), ['data', 'id']) self.assertTrue('foo' in self.dataset._models) self.dataset._models['foo'].drop_table() self.dataset.update_cache() self.assertTrue('foo' not in self.database.get_tables()) # This will create the table again. Foo = self.dataset['foo'] self.assertTrue('foo' in self.database.get_tables()) self.assertEqual(Foo.columns, ['id']) def assertQuery(self, query, expected, sort_key='id'): key = operator.itemgetter(sort_key) self.assertEqual(sorted(list(query), key=key), sorted(expected, key=key)) def test_insert(self): self.create_users() user = self.dataset['user'] expected = [{'username': '******'}, {'username': '******'}] self.assertQuery(user.all(), expected, 'username') user.insert(username='******', age=5) expected = [{ 'username': '******', 'age': None }, { 'username': '******', 'age': None }, { 'username': '******', 'age': 5 }] self.assertQuery(user.all(), expected, 'username') query = user.find(username='******') expected = [{'username': '******', 'age': None}] self.assertQuery(query, expected, 'username') self.assertEqual(user.find_one(username='******'), { 'username': '******', 'age': 5 }) self.assertTrue(user.find_one(username='******') is None) def test_update(self): self.create_users() user = self.dataset['user'] self.assertEqual(user.update(favorite_color='green'), 2) expected = [{ 'username': '******', 'favorite_color': 'green' }, { 'username': '******', 'favorite_color': 'green' }] self.assertQuery(user.all(), expected, 'username') res = user.update(favorite_color='blue', username='******', columns=['username']) self.assertEqual(res, 1) expected[1]['favorite_color'] = 'blue' self.assertQuery(user.all(), expected, 'username') def test_delete(self): self.create_users() user = self.dataset['user'] self.assertEqual(user.delete(username='******'), 1) self.assertEqual(list(user.all()), [{'username': '******'}]) def test_find(self): self.create_users(5) user = self.dataset['user'] def assertUsernames(query, expected): self.assertEqual(sorted(row['username'] for row in query), sorted(expected)) assertUsernames(user.all(), self.names) assertUsernames(user.find(), self.names) assertUsernames(user.find(username='******'), ['charlie']) assertUsernames(user.find(username='******'), []) user.update(favorite_color='green') for username in ['zaizee', 'huey']: user.update(favorite_color='blue', username=username, columns=['username']) assertUsernames(user.find(favorite_color='green'), ['charlie', 'mickey', 'plover']) assertUsernames(user.find(favorite_color='blue'), ['zaizee', 'huey']) assertUsernames(user.find(favorite_color='green', username='******'), ['plover']) self.assertEqual(user.find_one(username='******'), { 'username': '******', 'favorite_color': 'green' }) def test_magic_methods(self): self.create_users(5) user = self.dataset['user'] # __len__() self.assertEqual(len(user), 5) # __iter__() users = sorted([u for u in user], key=operator.itemgetter('username')) self.assertEqual(users[0], {'username': '******'}) self.assertEqual(users[-1], {'username': '******'}) # __contains__() self.assertTrue('user' in self.dataset) self.assertFalse('missing' in self.dataset) def test_foreign_keys(self): user = self.dataset['user'] user.insert(username='******') note = self.dataset['note'] for i in range(1, 4): note.insert(content='note %s' % i, timestamp=datetime.date(2014, 1, i), user_id='charlie') notes = sorted(note.all(), key=operator.itemgetter('id')) self.assertEqual( notes[0], { 'content': 'note 1', 'id': 1, 'timestamp': datetime.datetime(2014, 1, 1), 'user_id': 'charlie' }) self.assertEqual( notes[-1], { 'content': 'note 3', 'id': 3, 'timestamp': datetime.datetime(2014, 1, 3), 'user_id': 'charlie' }) user.insert(username='******') note.update(user_id='mickey', id=3, columns=['id']) self.assertEqual(note.find(user_id='charlie').count(), 2) self.assertEqual(note.find(user_id='mickey').count(), 1) category = self.dataset['category'] category.insert(name='c1') c1 = category.find_one(name='c1') self.assertEqual(c1, {'id': 1, 'name': 'c1', 'parent_id': None}) category.insert(name='c2', parent_id=1) c2 = category.find_one(parent_id=1) self.assertEqual(c2, {'id': 2, 'name': 'c2', 'parent_id': 1}) self.assertEqual(category.delete(parent_id=1), 1) self.assertEqual(list(category.all()), [c1]) def test_transactions(self): user = self.dataset['user'] with self.dataset.transaction() as txn: user.insert(username='******') with self.dataset.transaction() as txn2: user.insert(username='******') txn2.rollback() with self.dataset.transaction() as txn3: user.insert(username='******') with self.dataset.transaction() as txn4: user.insert(username='******') txn3.rollback() with self.dataset.transaction() as txn5: user.insert(username='******') with self.dataset.transaction() as txn6: with self.dataset.transaction() as txn7: user.insert(username='******') txn7.rollback() user.insert(username='******') user.insert(username='******') self.assertQuery(user.all(), [ { 'username': '******' }, { 'username': '******' }, { 'username': '******' }, { 'username': '******' }, ], 'username') def test_export(self): self.create_users() user = self.dataset['user'] buf = StringIO() self.dataset.freeze(user.all(), 'json', file_obj=buf) self.assertEqual(buf.getvalue(), ('[{"username": "******"}, {"username": "******"}]')) buf = StringIO() self.dataset.freeze(user.all(), 'csv', file_obj=buf) self.assertEqual(buf.getvalue().splitlines(), ['username', 'charlie', 'huey']) @skip_if(sys.version_info[0] < 3, 'requires python 3.x') def test_freeze_thaw_csv_utf8(self): self._test_freeze_thaw_utf8('csv') def test_freeze_thaw_json_utf8(self): self._test_freeze_thaw_utf8('json') def _test_freeze_thaw_utf8(self, fmt): username_bytes = b'\xd0\x92obby' # Bobby with cyrillic "B". username_str = username_bytes.decode('utf8') u = User.create(username=username_str) # Freeze the data as a the given format. user = self.dataset['user'] filename = tempfile.mktemp() # Get a filename. self.dataset.freeze(user.all(), fmt, filename) # Clear out the table and reload. User.delete().execute() self.assertEqual(list(user.all()), []) # Thaw the frozen data. n = user.thaw(format=fmt, filename=filename) self.assertEqual(n, 1) self.assertEqual(list(user.all()), [{'username': username_str}]) def test_freeze_thaw(self): user = self.dataset['user'] user.insert(username='******') note = self.dataset['note'] note_ts = datetime.datetime(2017, 1, 2, 3, 4, 5) note.insert(content='foo', timestamp=note_ts, user_id='charlie') buf = StringIO() self.dataset.freeze(note.all(), 'json', file_obj=buf) self.assertEqual(json.loads(buf.getvalue()), [{ 'id': 1, 'user_id': 'charlie', 'content': 'foo', 'timestamp': '2017-01-02 03:04:05' }]) note.delete(id=1) self.assertEqual(list(note.all()), []) buf.seek(0) note.thaw(format='json', file_obj=buf) self.assertEqual(list(note.all()), [{ 'id': 1, 'user_id': 'charlie', 'content': 'foo', 'timestamp': note_ts }]) def test_table_column_creation(self): table = self.dataset['people'] table.insert(name='charlie') self.assertEqual(table.columns, ['id', 'name']) self.assertEqual(list(table.all()), [{'id': 1, 'name': 'charlie'}]) def test_import_json(self): table = self.dataset['people'] table.insert(name='charlie') data = [{ 'name': 'zaizee', 'foo': 1 }, { 'name': 'huey' }, { 'name': 'mickey', 'foo': 2 }, { 'bar': None }] buf = StringIO() json.dump(data, buf) buf.seek(0) # All rows but the last will be inserted. count = self.dataset.thaw('people', 'json', file_obj=buf, strict=True) self.assertEqual(count, 3) names = [row['name'] for row in self.dataset['people'].all()] self.assertEqual(set(names), set(['charlie', 'huey', 'mickey', 'zaizee'])) # The columns have not changed. self.assertEqual(table.columns, ['id', 'name']) # No rows are inserted because no column overlap between `user` and the # provided data. buf.seek(0) count = self.dataset.thaw('user', 'json', file_obj=buf, strict=True) self.assertEqual(count, 0) # Create a new table and load all data into it. table = self.dataset['more_people'] # All rows and columns will be inserted. buf.seek(0) count = self.dataset.thaw('more_people', 'json', file_obj=buf) self.assertEqual(count, 4) self.assertEqual(set(table.columns), set(['id', 'name', 'bar', 'foo'])) self.assertEqual(sorted(table.all(), key=lambda row: row['id']), [ { 'id': 1, 'name': 'zaizee', 'foo': 1, 'bar': None }, { 'id': 2, 'name': 'huey', 'foo': None, 'bar': None }, { 'id': 3, 'name': 'mickey', 'foo': 2, 'bar': None }, { 'id': 4, 'name': None, 'foo': None, 'bar': None }, ]) def test_import_csv(self): table = self.dataset['people'] table.insert(name='charlie') data = [('zaizee', 1, None), ('huey', 2, 'foo'), ('mickey', 3, 'baze')] buf = StringIO() writer = csv.writer(buf) writer.writerow(['name', 'foo', 'bar']) writer.writerows(data) buf.seek(0) count = self.dataset.thaw('people', 'csv', file_obj=buf, strict=True) self.assertEqual(count, 3) names = [row['name'] for row in self.dataset['people'].all()] self.assertEqual(set(names), set(['charlie', 'huey', 'mickey', 'zaizee'])) # The columns have not changed. self.assertEqual(table.columns, ['id', 'name']) # No rows are inserted because no column overlap between `user` and the # provided data. buf.seek(0) count = self.dataset.thaw('user', 'csv', file_obj=buf, strict=True) self.assertEqual(count, 0) # Create a new table and load all data into it. table = self.dataset['more_people'] # All rows and columns will be inserted. buf.seek(0) count = self.dataset.thaw('more_people', 'csv', file_obj=buf) self.assertEqual(count, 3) self.assertEqual(set(table.columns), set(['id', 'name', 'bar', 'foo'])) self.assertEqual(sorted(table.all(), key=lambda row: row['id']), [ { 'id': 1, 'name': 'zaizee', 'foo': '1', 'bar': '' }, { 'id': 2, 'name': 'huey', 'foo': '2', 'bar': 'foo' }, { 'id': 3, 'name': 'mickey', 'foo': '3', 'bar': 'baze' }, ]) def test_table_thaw(self): table = self.dataset['people'] data = json.dumps([{ 'name': 'charlie' }, { 'name': 'huey', 'color': 'white' }]) self.assertEqual(table.thaw(file_obj=StringIO(data), format='json'), 2) self.assertEqual(list(table.all()), [ { 'id': 1, 'name': 'charlie', 'color': None }, { 'id': 2, 'name': 'huey', 'color': 'white' }, ]) def test_creating_tables(self): new_table = self.dataset['new_table'] new_table.insert(data='foo') ref2 = self.dataset['new_table'] self.assertEqual(list(ref2.all()), [{'id': 1, 'data': 'foo'}])
def setUp(self): if os.path.exists(self.database.database): os.unlink(self.database.database) super(TestDataSet, self).setUp() self.dataset = DataSet('sqlite:///%s' % self.database.database)
from peewee import * from playhouse.mysql_ext import MySQLConnectorDatabase from playhouse.dataset import DataSet dbInstance = MySQLConnectorDatabase('sample', host='localhost', user='******', password='******') dbDataSet = DataSet(dbInstance) class BaseModel(Model): create_timestamp = DateTimeField( constraints=[SQL('DEFAULT CURRENT_TIMESTAMP')], null=True) update_timestamp = DateTimeField(constraints=[ SQL('DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP') ], null=True) class Meta: database = dbInstance class ConflictDetectedException(Exception): pass class BaseVersionedModel(BaseModel): version = IntegerField(constraints=[SQL('DEFAULT 1')], index=True) def __init__(self, *args, **kwargs):
class TestDataSet(ModelTestCase): database = db requires = [User, Note, Category] names = ['charlie', 'huey', 'peewee', 'mickey', 'zaizee'] def setUp(self): if os.path.exists(self.database.database): os.unlink(self.database.database) super(TestDataSet, self).setUp() self.dataset = DataSet('sqlite:///%s' % self.database.database) def tearDown(self): self.dataset.close() super(TestDataSet, self).tearDown() def test_pass_database(self): db = SqliteDatabase(':memory:') dataset = DataSet(db) self.assertEqual(dataset._database_path, ':memory:') users = dataset['users'] users.insert(username='******') self.assertEqual(list(users), [{'id': 1, 'username': '******'}]) def create_users(self, n=2): user = self.dataset['user'] for i in range(min(n, len(self.names))): user.insert(username=self.names[i]) def test_special_char_table(self): self.database.execute_sql('CREATE TABLE "hello!!world" ("data" TEXT);') self.database.execute_sql('INSERT INTO "hello!!world" VALUES (?)', ('test',)) ds = DataSet('sqlite:///%s' % self.database.database) table = ds['hello!!world'] model = table.model_class self.assertEqual(model._meta.table_name, 'hello!!world') def test_column_preservation(self): ds = DataSet('sqlite:///:memory:') books = ds['books'] books.insert(book_id='BOOK1') books.insert(bookId='BOOK2') data = [(row['book_id'] or '', row['bookId'] or '') for row in books] self.assertEqual(sorted(data), [ ('', 'BOOK2'), ('BOOK1', '')]) def test_case_insensitive(self): db.execute_sql('CREATE TABLE "SomeTable" (data TEXT);') tables = sorted(self.dataset.tables) self.assertEqual(tables, ['SomeTable', 'category', 'note', 'user']) table = self.dataset['HueyMickey'] self.assertEqual(table.model_class._meta.table_name, 'HueyMickey') tables = sorted(self.dataset.tables) self.assertEqual( tables, ['HueyMickey', 'SomeTable', 'category', 'note', 'user']) # Subsequent lookup succeeds. self.dataset['HueyMickey'] def test_introspect(self): tables = sorted(self.dataset.tables) self.assertEqual(tables, ['category', 'note', 'user']) user = self.dataset['user'] columns = sorted(user.columns) self.assertEqual(columns, ['username']) note = self.dataset['note'] columns = sorted(note.columns) self.assertEqual(columns, ['content', 'id', 'timestamp', 'user_id']) category = self.dataset['category'] columns = sorted(category.columns) self.assertEqual(columns, ['id', 'name', 'parent_id']) def test_update_cache(self): self.assertEqual(sorted(self.dataset.tables), ['category', 'note', 'user']) db.execute_sql('create table "foo" (id INTEGER, data TEXT)') Foo = self.dataset['foo'] self.assertEqual(sorted(Foo.columns), ['data', 'id']) self.assertTrue('foo' in self.dataset._models) self.dataset._models['foo'].drop_table() self.dataset.update_cache() self.assertTrue('foo' not in self.database.get_tables()) # This will create the table again. Foo = self.dataset['foo'] self.assertTrue('foo' in self.database.get_tables()) self.assertEqual(Foo.columns, ['id']) def assertQuery(self, query, expected, sort_key='id'): key = operator.itemgetter(sort_key) self.assertEqual( sorted(list(query), key=key), sorted(expected, key=key)) def test_insert(self): self.create_users() user = self.dataset['user'] expected = [ {'username': '******'}, {'username': '******'}] self.assertQuery(user.all(), expected, 'username') user.insert(username='******', age=5) expected = [ {'username': '******', 'age': None}, {'username': '******', 'age': None}, {'username': '******', 'age': 5}] self.assertQuery(user.all(), expected, 'username') query = user.find(username='******') expected = [{'username': '******', 'age': None}] self.assertQuery(query, expected, 'username') self.assertEqual( user.find_one(username='******'), {'username': '******', 'age': 5}) self.assertTrue(user.find_one(username='******') is None) def test_update(self): self.create_users() user = self.dataset['user'] self.assertEqual(user.update(favorite_color='green'), 2) expected = [ {'username': '******', 'favorite_color': 'green'}, {'username': '******', 'favorite_color': 'green'}] self.assertQuery(user.all(), expected, 'username') res = user.update( favorite_color='blue', username='******', columns=['username']) self.assertEqual(res, 1) expected[1]['favorite_color'] = 'blue' self.assertQuery(user.all(), expected, 'username') def test_delete(self): self.create_users() user = self.dataset['user'] self.assertEqual(user.delete(username='******'), 1) self.assertEqual(list(user.all()), [{'username': '******'}]) def test_find(self): self.create_users(5) user = self.dataset['user'] def assertUsernames(query, expected): self.assertEqual( sorted(row['username'] for row in query), sorted(expected)) assertUsernames(user.all(), self.names) assertUsernames(user.find(), self.names) assertUsernames(user.find(username='******'), ['charlie']) assertUsernames(user.find(username='******'), []) user.update(favorite_color='green') for username in ['zaizee', 'huey']: user.update( favorite_color='blue', username=username, columns=['username']) assertUsernames( user.find(favorite_color='green'), ['charlie', 'mickey', 'peewee']) assertUsernames( user.find(favorite_color='blue'), ['zaizee', 'huey']) assertUsernames( user.find(favorite_color='green', username='******'), ['peewee']) self.assertEqual( user.find_one(username='******'), {'username': '******', 'favorite_color': 'green'}) def test_magic_methods(self): self.create_users(5) user = self.dataset['user'] # __len__() self.assertEqual(len(user), 5) # __iter__() users = sorted([u for u in user], key=operator.itemgetter('username')) self.assertEqual(users[0], {'username': '******'}) self.assertEqual(users[-1], {'username': '******'}) # __contains__() self.assertTrue('user' in self.dataset) self.assertFalse('missing' in self.dataset) def test_foreign_keys(self): user = self.dataset['user'] user.insert(username='******') note = self.dataset['note'] for i in range(1, 4): note.insert( content='note %s' % i, timestamp=datetime.date(2014, 1, i), user_id='charlie') notes = sorted(note.all(), key=operator.itemgetter('id')) self.assertEqual(notes[0], { 'content': 'note 1', 'id': 1, 'timestamp': datetime.datetime(2014, 1, 1), 'user_id': 'charlie'}) self.assertEqual(notes[-1], { 'content': 'note 3', 'id': 3, 'timestamp': datetime.datetime(2014, 1, 3), 'user_id': 'charlie'}) user.insert(username='******') note.update(user_id='mickey', id=3, columns=['id']) self.assertEqual(note.find(user_id='charlie').count(), 2) self.assertEqual(note.find(user_id='mickey').count(), 1) category = self.dataset['category'] category.insert(name='c1') c1 = category.find_one(name='c1') self.assertEqual(c1, {'id': 1, 'name': 'c1', 'parent_id': None}) category.insert(name='c2', parent_id=1) c2 = category.find_one(parent_id=1) self.assertEqual(c2, {'id': 2, 'name': 'c2', 'parent_id': 1}) self.assertEqual(category.delete(parent_id=1), 1) self.assertEqual(list(category.all()), [c1]) def test_transactions(self): user = self.dataset['user'] with self.dataset.transaction() as txn: user.insert(username='******') with self.dataset.transaction() as txn2: user.insert(username='******') txn2.rollback() with self.dataset.transaction() as txn3: user.insert(username='******') with self.dataset.transaction() as txn4: user.insert(username='******') txn3.rollback() with self.dataset.transaction() as txn5: user.insert(username='******') with self.dataset.transaction() as txn6: with self.dataset.transaction() as txn7: user.insert(username='******') txn7.rollback() user.insert(username='******') user.insert(username='******') self.assertQuery(user.all(), [ {'username': '******'}, {'username': '******'}, {'username': '******'}, {'username': '******'}, ], 'username') def test_export(self): self.create_users() user = self.dataset['user'] buf = StringIO() self.dataset.freeze(user.all(), 'json', file_obj=buf) self.assertEqual(buf.getvalue(), ( '[{"username": "******"}, {"username": "******"}]')) buf = StringIO() self.dataset.freeze(user.all(), 'csv', file_obj=buf) self.assertEqual(buf.getvalue().splitlines(), [ 'username', 'charlie', 'huey']) @skip_if(sys.version_info[0] < 3, 'requires python 3.x') def test_freeze_thaw_csv_utf8(self): self._test_freeze_thaw_utf8('csv') def test_freeze_thaw_json_utf8(self): self._test_freeze_thaw_utf8('json') def _test_freeze_thaw_utf8(self, fmt): username_bytes = b'\xd0\x92obby' # Bobby with cyrillic "B". username_str = username_bytes.decode('utf8') u = User.create(username=username_str) # Freeze the data as a the given format. user = self.dataset['user'] filename = tempfile.mktemp() # Get a filename. self.dataset.freeze(user.all(), fmt, filename) # Clear out the table and reload. User.delete().execute() self.assertEqual(list(user.all()), []) # Thaw the frozen data. n = user.thaw(format=fmt, filename=filename) self.assertEqual(n, 1) self.assertEqual(list(user.all()), [{'username': username_str}]) def test_freeze_thaw(self): user = self.dataset['user'] user.insert(username='******') note = self.dataset['note'] note_ts = datetime.datetime(2017, 1, 2, 3, 4, 5) note.insert(content='foo', timestamp=note_ts, user_id='charlie') buf = StringIO() self.dataset.freeze(note.all(), 'json', file_obj=buf) self.assertEqual(json.loads(buf.getvalue()), [{ 'id': 1, 'user_id': 'charlie', 'content': 'foo', 'timestamp': '2017-01-02 03:04:05'}]) note.delete(id=1) self.assertEqual(list(note.all()), []) buf.seek(0) note.thaw(format='json', file_obj=buf) self.assertEqual(list(note.all()), [{ 'id': 1, 'user_id': 'charlie', 'content': 'foo', 'timestamp': note_ts}]) def test_table_column_creation(self): table = self.dataset['people'] table.insert(name='charlie') self.assertEqual(table.columns, ['id', 'name']) self.assertEqual(list(table.all()), [{'id': 1, 'name': 'charlie'}]) def test_import_json(self): table = self.dataset['people'] table.insert(name='charlie') data = [ {'name': 'zaizee', 'foo': 1}, {'name': 'huey'}, {'name': 'mickey', 'foo': 2}, {'bar': None}] buf = StringIO() json.dump(data, buf) buf.seek(0) # All rows but the last will be inserted. count = self.dataset.thaw('people', 'json', file_obj=buf, strict=True) self.assertEqual(count, 3) names = [row['name'] for row in self.dataset['people'].all()] self.assertEqual( set(names), set(['charlie', 'huey', 'mickey', 'zaizee'])) # The columns have not changed. self.assertEqual(table.columns, ['id', 'name']) # No rows are inserted because no column overlap between `user` and the # provided data. buf.seek(0) count = self.dataset.thaw('user', 'json', file_obj=buf, strict=True) self.assertEqual(count, 0) # Create a new table and load all data into it. table = self.dataset['more_people'] # All rows and columns will be inserted. buf.seek(0) count = self.dataset.thaw('more_people', 'json', file_obj=buf) self.assertEqual(count, 4) self.assertEqual( set(table.columns), set(['id', 'name', 'bar', 'foo'])) self.assertEqual(sorted(table.all(), key=lambda row: row['id']), [ {'id': 1, 'name': 'zaizee', 'foo': 1, 'bar': None}, {'id': 2, 'name': 'huey', 'foo': None, 'bar': None}, {'id': 3, 'name': 'mickey', 'foo': 2, 'bar': None}, {'id': 4, 'name': None, 'foo': None, 'bar': None}, ]) def test_import_csv(self): table = self.dataset['people'] table.insert(name='charlie') data = [ ('zaizee', 1, None), ('huey', 2, 'foo'), ('mickey', 3, 'baze')] buf = StringIO() writer = csv.writer(buf) writer.writerow(['name', 'foo', 'bar']) writer.writerows(data) buf.seek(0) count = self.dataset.thaw('people', 'csv', file_obj=buf, strict=True) self.assertEqual(count, 3) names = [row['name'] for row in self.dataset['people'].all()] self.assertEqual( set(names), set(['charlie', 'huey', 'mickey', 'zaizee'])) # The columns have not changed. self.assertEqual(table.columns, ['id', 'name']) # No rows are inserted because no column overlap between `user` and the # provided data. buf.seek(0) count = self.dataset.thaw('user', 'csv', file_obj=buf, strict=True) self.assertEqual(count, 0) # Create a new table and load all data into it. table = self.dataset['more_people'] # All rows and columns will be inserted. buf.seek(0) count = self.dataset.thaw('more_people', 'csv', file_obj=buf) self.assertEqual(count, 3) self.assertEqual( set(table.columns), set(['id', 'name', 'bar', 'foo'])) self.assertEqual(sorted(table.all(), key=lambda row: row['id']), [ {'id': 1, 'name': 'zaizee', 'foo': '1', 'bar': ''}, {'id': 2, 'name': 'huey', 'foo': '2', 'bar': 'foo'}, {'id': 3, 'name': 'mickey', 'foo': '3', 'bar': 'baze'}, ]) def test_table_thaw(self): table = self.dataset['people'] data = json.dumps([{'name': 'charlie'}, {'name': 'huey', 'color': 'white'}]) self.assertEqual(table.thaw(file_obj=StringIO(data), format='json'), 2) self.assertEqual(list(table.all()), [ {'id': 1, 'name': 'charlie', 'color': None}, {'id': 2, 'name': 'huey', 'color': 'white'}, ]) def test_creating_tables(self): new_table = self.dataset['new_table'] new_table.insert(data='foo') ref2 = self.dataset['new_table'] self.assertEqual(list(ref2.all()), [{'id': 1, 'data': 'foo'}])
from peewee import SqliteDatabase from playhouse.dataset import DataSet DATA_DIR = '../data' db = DataSet(SqliteDatabase(DATA_DIR + '/bandwidth.sqlite')) tresults = db['test-results']
# import dataset from playhouse.dataset import DataSet # for DB.py # from db import DB # db = DB(filename=db_file, dbname="flix", dbtype="sqlite") logger = logging.getLogger() # LOG_FILENAME = 'flix.log' # logging.basicConfig(filename=LOG_FILENAME, level=logging.INFO,) project_path = '{}/.flix'.format(os.path.expanduser('~')) db_file = '{}/flix.db'.format(project_path) # dataset_db = dataset.connect('sqlite:///{}'.format(db_file)) dataset_db = DataSet('sqlite:///{}'.format(db_file)) media_url = 'http://www.omdbapi.com/?' # media_extenions = [ # '.3g2', '.3gp', '.3gp2', '.3gpp', '.60d', '.ajp', '.asf', '.asx', # '.avchd', '.avi', '.bik', '.bix', '.box', '.cam', '.dat', '.divx', # '.dmf', '.dv', '.dvr-ms', '.evo', '.flc', '.fli', '.flic', '.flv', # '.flx', '.gvi', '.gvp', '.h264', '.m1v', '.m2p', '.m2ts', '.m2v', # '.m4e', '.m4v', '.mjp', '.mjpeg', '.mjpg', '.mkv', '.moov', # '.mov', '.movhd', '.movie', '.movx', '.mp4', '.mpe', '.mpeg', # '.mpg', '.mpv', '.mpv2', '.mxf', '.nsv', '.nut', '.ogg', '.ogm', # '.omf', '.ps', '.qt', '.ram', '.rm', '.rmvb', '.swf', '.ts', # '.vfw', '.vid', '.video', '.viv', '.vivo', '.vob', '.vro', '.wm', # '.wmv', '.wmx', '.wrap', '.wvx', '.wx', '.x264', '.xvid' # ]