def setUp(self): if os.path.exists(db.database): os.unlink(db.database) db.connect() db.create_tables([User, Note, Category]) self.dataset = DataSet('sqlite:///%s' % db.database)
def test_column_preservation(self): ds = DataSet('sqlite:///:memory:') books = ds['books'] books.insert(book_id='BOOK1') books.insert(bookId='BOOK2') data = [(row['book_id'] or '', row['bookId'] or '') for row in books] self.assertEqual(sorted(data), [('', 'BOOK2'), ('BOOK1', '')])
def test_with_views(self): self.dataset.query('CREATE VIEW notes_public AS ' 'SELECT content, timestamp FROM note ' 'WHERE status = 1 ORDER BY timestamp DESC') try: self.assertTrue('notes_public' in self.dataset.views) self.assertFalse('notes_public' in self.dataset.tables) users = self.dataset['user'] with self.dataset.transaction(): users.insert(username='******') users.insert(username='******') notes = self.dataset['note'] for i, (ct, st) in enumerate([('n1', 1), ('n2', 2), ('n3', 1)]): notes.insert(content=ct, status=st, user_id='charlie', timestamp=datetime.datetime(2022, 1, 1 + i)) self.assertFalse('notes_public' in self.dataset) # Create a new dataset instance with views enabled. dataset = DataSet(self.dataset._database, include_views=True) self.assertTrue('notes_public' in dataset) public = dataset['notes_public'] self.assertEqual(public.columns, ['content', 'timestamp']) self.assertEqual(list(public), [ {'content': 'n3', 'timestamp': datetime.datetime(2022, 1, 3)}, {'content': 'n1', 'timestamp': datetime.datetime(2022, 1, 1)}]) finally: self.dataset.query('DROP VIEW notes_public')
def setUp(self): if os.path.exists('tmp.db'): os.unlink('tmp.db') db.connect() db.create_tables([User, Note, Category]) self.dataset = DataSet('sqlite:///tmp.db')
def test_item_apis(self): dataset = DataSet('sqlite:///:memory:') users = dataset['users'] users.insert(username='******') self.assertEqual(list(users), [{'id': 1, 'username': '******'}]) users[2] = {'username': '******', 'color': 'white'} self.assertEqual(list(users), [ {'id': 1, 'username': '******', 'color': None}, {'id': 2, 'username': '******', 'color': 'white'}]) users[2] = {'username': '******', 'kind': 'cat'} self.assertEqual(list(users), [ {'id': 1, 'username': '******', 'color': None, 'kind': None}, {'id': 2, 'username': '******', 'color': 'white', 'kind': 'cat'}]) del users[2] self.assertEqual(list(users), [ {'id': 1, 'username': '******', 'color': None, 'kind': None}]) users[1] = {'kind': 'person'} users[2] = {'username': '******'} users[2] = {'kind': 'cat'} self.assertEqual(list(users), [ {'id': 1, 'username': '******', 'color': None, 'kind': 'person'}, {'id': 2, 'username': '******', 'color': None, 'kind': 'cat'}])
def test_pass_database(self): db = SqliteDatabase(':memory:') dataset = DataSet(db) self.assertEqual(dataset._database_path, ':memory:') users = dataset['users'] users.insert(username='******') self.assertEqual(list(users), [{'id': 1, 'username': '******'}])
def data_dump(data_format, directory_name): db.close() ds = DataSet(db) for table_name in ds.tables: file_name = os.path.abspath( os.path.join(directory_name, f'{table_name}.{data_format}')) table = ds[table_name] ds.freeze(table.all(), format=data_format, filename=file_name)
def test_special_char_table(self): self.database.execute_sql('CREATE TABLE "hello!!world" ("data" TEXT);') self.database.execute_sql('INSERT INTO "hello!!world" VALUES (?)', ('test', )) ds = DataSet('sqlite:///%s' % self.database.database) table = ds['hello!!world'] model = table.model_class self.assertEqual(model._meta.table_name, 'hello!!world')
def data_load(data_format, directory_name): db.close() ds = DataSet(db) for table_name in ['users', 'revokedtoken', 'brewery', 'beer', 'rating']: file_name = os.path.abspath( os.path.join(directory_name, f'{table_name}.{data_format}')) if os.path.isfile(file_name): table = ds[table_name] table.thaw(format=data_format, filename=file_name, strict=True)
def main(): # This function exists to act as a console script entry-point. parser = get_option_parser() options, args = parser.parse_args() if not args: die('Error: A path to database file is required!.') db_file = args[0] global dataset global migrator dataset = DataSet('sqlite:///%s' % db_file) migrator = dataset._migrator # Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host=options.host, port=port, debug=options.debug)
def main(): # This function exists to act as a console script entry-point. parser = get_option_parser() options, args = parser.parse_args() if not args: die('Error: missing required path to database file.') db_file = args[0] global dataset global migrator dataset = DataSet('sqlite:///%s' % db_file) migrator = dataset._migrator if options.browser: open_browser_tab(options.host, options.port) app.run(host=options.host, port=options.port, debug=options.debug)
def importBkp(): zf = zip.ZipFile('App/database/import/bkp.zip', 'r') zf.extractall('App/database/import') zf.close() dbe = DataSet("postgresql://*****:*****@localhost:5432/my_app") tablePessoa = dbe['pessoa'] tableLocal = dbe['local'] tableTarefa = dbe['tarefa'] tablePessoa.thaw( format='json', filename='App/database/import/App/database/exported/pessoa.json') tableLocal.thaw( format='json', filename='App/database/import/App/database/exported/local.json') tableTarefa.thaw( format='json', filename='App/database/import/App/database/exported/tarefa.json')
def db(): social_network_db = DataSet('sqlite:///social_network.db') social_network_db['users'].delete() social_network_db['statuses'].delete() social_network_db['users'].insert(USER_ID='') social_network_db['users'].delete(USER_ID='') social_network_db['users'].insert(EMAIL='') social_network_db['users'].delete(EMAIL='') social_network_db['users'].insert(NAME='') social_network_db['users'].delete(NAME='') social_network_db['users'].insert(LASTNAME='') social_network_db['users'].delete(LASTNAME='') social_network_db['statuses'].insert(STATUS_ID='') social_network_db['statuses'].delete(STATUS_ID='') social_network_db['statuses'].insert(USER_ID='') social_network_db['statuses'].delete(USER_ID='') return social_network_db
def exportBkp(): dbe = DataSet("postgresql://*****:*****@localhost:5432/my_app") tablePessoa = dbe['pessoa'] tableLocal = dbe['local'] tableTarefa = dbe['tarefa'] dbe.freeze(tablePessoa.all(), format='json', filename='App/database/exported/pessoa.json') dbe.freeze(tableLocal.all(), format='json', filename='App/database/exported/local.json') dbe.freeze(tableTarefa.all(), format='json', filename='App/database/exported/tarefa.json') zf = zip.ZipFile('App/database/bkp.zip', 'w') for dirname, subdirs, files in os.walk('App/database/exported'): zf.write(dirname) for filename in files: zf.write(os.path.join(dirname, filename)) zf.close()
def load_status_updates(filename): ''' Requirements: - If a status_id already exists, it will ignore it and continue to the next. - Returns False if there are any errors (such as empty fields in the source CSV file) - Otherwise, it returns True. ''' db = DataSet('sqlite:///social_network.db') db['statuses'].delete() with open(filename, newline='') as csvfile: reader = csv.DictReader(csvfile, dialect='excel') def csv_reader_row(reader_list): return reader_list['STATUS_ID'], reader_list['USER_ID'], reader_list['STATUS_TEXT'] reader_rows = map(csv_reader_row, reader) no_errors = reduce(lambda x, y: x and y, map(partial(user_status.add_status, db=db), reader_rows)) return no_errors
def load_users(filename): ''' Requirements: - If a user_id already exists, it will ignore it and continue to the next. - Returns False if there are any errors (such as empty fields in the source CSV file) - Otherwise, it returns True. ''' db = DataSet('sqlite:///social_network.db') db['users'].delete() # Start with a new db every time we load a file for the sake of the demo. db['statuses'].delete() # db['users'].thaw(filename=filename, format='csv') # how to load and still check data validity by using thaw()?? # TO DO: Use multiprocessing with this. Break into chunks, could probably load several times faster. with open(filename, newline='') as csvfile: reader = csv.DictReader(csvfile, dialect='excel') # an iterable def csv_reader_row(reader_list): return reader_list['USER_ID'], reader_list['EMAIL'], reader_list['NAME'], reader_list['LASTNAME'] reader_rows = map(csv_reader_row, reader) db_loaded_successfully = reduce(lambda x, y: x and y, map(partial(users.add_user, db=db), reader_rows)) return db_loaded_successfully
#!/usr/bin/env python3 import peewee as pw from playhouse.dataset import DataSet from playhouse.db_url import connect from random import randint, choice # Variables database='boutique' host='localhost' user='******' dbURL = 'mysql://{user}@{host}/{database}'.format(database=database, host=host, user=user) db = connect(dbURL) dbDataset = DataSet(dbURL) # Models (Table Schema) class BaseModel(pw.Model): class Meta: database = db class rooms(pw.Model): roomType = pw.TextField() price = pw.FloatField() discount = pw.FloatField() roomPhoto = pw.TextField() class Meta: database = db class customers(pw.Model):
# import dataset from playhouse.dataset import DataSet # for DB.py # from db import DB # db = DB(filename=db_file, dbname="flix", dbtype="sqlite") logger = logging.getLogger() # LOG_FILENAME = 'flix.log' # logging.basicConfig(filename=LOG_FILENAME, level=logging.INFO,) project_path = '{}/.flix'.format(os.path.expanduser('~')) db_file = '{}/flix.db'.format(project_path) # dataset_db = dataset.connect('sqlite:///{}'.format(db_file)) dataset_db = DataSet('sqlite:///{}'.format(db_file)) media_url = 'http://www.omdbapi.com/?' # media_extenions = [ # '.3g2', '.3gp', '.3gp2', '.3gpp', '.60d', '.ajp', '.asf', '.asx', # '.avchd', '.avi', '.bik', '.bix', '.box', '.cam', '.dat', '.divx', # '.dmf', '.dv', '.dvr-ms', '.evo', '.flc', '.fli', '.flic', '.flv', # '.flx', '.gvi', '.gvp', '.h264', '.m1v', '.m2p', '.m2ts', '.m2v', # '.m4e', '.m4v', '.mjp', '.mjpeg', '.mjpg', '.mkv', '.moov', # '.mov', '.movhd', '.movie', '.movx', '.mp4', '.mpe', '.mpeg', # '.mpg', '.mpv', '.mpv2', '.mxf', '.nsv', '.nut', '.ogg', '.ogm', # '.omf', '.ps', '.qt', '.ram', '.rm', '.rmvb', '.swf', '.ts', # '.vfw', '.vid', '.video', '.viv', '.vivo', '.vob', '.vro', '.wm', # '.wmv', '.wmx', '.wrap', '.wvx', '.wx', '.x264', '.xvid' # ]
def etl_file_to_table(self): """Example sub-command.""" db = DataSet(self.app.config.get("APP", "database")) table = db[self.app.pargs.table] table.thaw(filename=self.app.pargs.file, format=self.app.pargs.format) self.info("Done !, file has been loaded to", self.app.pargs.table)
def get_dataset_ext(): # http://docs.peewee-orm.com/en/latest/peewee/playhouse.html#dataset path = get_path_dataset_ext() db = DataSet(path) return db
from peewee import SqliteDatabase from playhouse.dataset import DataSet DATA_DIR = '../data' db = DataSet(SqliteDatabase(DATA_DIR + '/bandwidth.sqlite')) tresults = db['test-results']
def setUp(self): if os.path.exists(self.database.database): os.unlink(self.database.database) super(TestDataSet, self).setUp() self.dataset = DataSet('sqlite:///%s' % self.database.database)
from peewee import * from playhouse.dataset import DataSet db = SqliteDatabase('validator.db') ds = DataSet('sqlite:///validator.db') class EmailInfo(Model): email = CharField(index=True, unique=True) syntax = BooleanField() mx = BooleanField(null=True) deliverable = BooleanField(null=True) color = CharField() normalized = CharField(null=True) class Meta: database = db def pull(email): record = EmailInfo.get_or_none(EmailInfo.email == email) if not record: return None result = { 'email': record.email, 'syntax': record.syntax, 'mx': record.mx, 'deliverable': record.deliverable, 'color': record.color, 'normalized': record.normalized
"""Connects to database and setups table.""" from peewee import (Model, AutoField, CharField, IntegerField, UUIDField, BooleanField, DateTimeField, DecimalField, PostgresqlDatabase) from playhouse.dataset import DataSet from .constants import DATABASE_NAME, PORT db = PostgresqlDatabase(DATABASE_NAME, port=PORT) data_set = DataSet( "postgresql://*****:*****@localhost:5433/higgs2_db") class SimulatedImageClick(Model): """Represent a click on an image.""" pk = AutoField(primary_key=True) click_set_id = CharField() user_id = IntegerField() logged_in = BooleanField() zooniverse_id = CharField() timestamp = DateTimeField() click_x = DecimalField(max_digits=15) click_y = DecimalField(max_digits=15) number_of_tracks = CharField() # 2, 3, 4, 5-10 or 10+ decay_type = CharField() mass = CharField() decay_length = CharField() projection = CharField() true_x1 = DecimalField(max_digits=15) true_y1 = DecimalField(max_digits=15) true_x2 = DecimalField(max_digits=15)
''' Functions that create a connection to User and Status tables Import user_table and status_table directly ''' import os from playhouse.dataset import DataSet # Ensure we are starting with an empty database FILE = 'socialnetwork.db' if os.path.exists(FILE): os.remove(FILE) # Connect to database SQL_URL = 'sqlite:///' + FILE db = DataSet(SQL_URL) # Setup User user_table = db['User'] user_table.insert(user_id='blank') user_table.create_index(['user_id'], unique=True) user_table.delete(user_id='blank') # Setup Status status_table = db['Status'] status_table.insert(status_id='blank', user_id='blank') status_table.create_index(['status_id'], unique=True) status_table.delete(status_id='blank')
from peewee import * from playhouse.mysql_ext import MySQLConnectorDatabase from playhouse.dataset import DataSet dbInstance = MySQLConnectorDatabase('sample', host='localhost', user='******', password='******') dbDataSet = DataSet(dbInstance) class BaseModel(Model): create_timestamp = DateTimeField( constraints=[SQL('DEFAULT CURRENT_TIMESTAMP')], null=True) update_timestamp = DateTimeField(constraints=[ SQL('DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP') ], null=True) class Meta: database = dbInstance class ConflictDetectedException(Exception): pass class BaseVersionedModel(BaseModel): version = IntegerField(constraints=[SQL('DEFAULT 1')], index=True) def __init__(self, *args, **kwargs):
def backup_database(cls, db_format, filepath): db = DataSet('sqlite:///:memory:') db.freeze(cls.model.select(), format=db_format, filename=filepath)