def get_database_class(self, backend=None): mapping = { 'postgres': PostgresqlDatabase, 'sqlite': SqliteDatabase, 'mysql': MySQLDatabase, } try: from playhouse.apsw_ext import APSWDatabase except ImportError: pass else: mapping['apsw'] = APSWDatabase try: from playhouse.berkeleydb import BerkeleyDatabase except ImportError: pass else: mapping['berkeleydb'] = BerkeleyDatabase try: from playhouse.sqlcipher_ext import SqlCipherDatabase except ImportError: pass else: mapping['sqlcipher'] = SqlCipherDatabase backend = backend or self.backend try: return mapping[backend] except KeyError: print_('Unrecognized database: "%s".' % backend) print_('Available choices:\n%s' % '\n'.join( sorted(mapping.keys()))) raise
def get_foreign_keys(self, table, schema=None): query = """ SELECT sql FROM sqlite_master WHERE (tbl_name = ? AND type = ?)""" cursor = self.execute(query, table, "table") table_definition = cursor.fetchone()[0].strip() try: columns = re.search("\((.+)\)", table_definition).groups()[0] except AttributeError: print_('Unable to read table definition for "%s"' % table) return [] fks = [] for column_def in columns.split(","): column_def = column_def.strip() match = re.search(self.re_foreign_key, column_def, re.I) if not match: continue column, dest_table, dest_column = [s.strip('"') for s in match.groups()] fks.append(ForeignKeyMapping(table=table, column=column, dest_table=dest_table, dest_column=dest_column)) return fks
def print_models(engine, database, tables, **connect): schema = connect.get('schema') db = get_introspector(engine, database, **connect) models, table_to_model, table_fks, col_meta = introspect(db, schema) # write generated code to standard out print_(TEMPLATE % (db.get_conn_class().__name__, database, repr(connect))) pk_classes = (IntegerField, PrimaryKeyField) # print the models def print_model(model, seen, accum=None): accum = accum or [] for _, rel_table, _ in table_fks[model]: if rel_table in accum and model not in accum: print_('# POSSIBLE REFERENCE CYCLE: %s' % table_to_model[rel_table]) if rel_table not in seen and rel_table not in accum: seen.add(rel_table) if rel_table != model: print_model(rel_table, seen, accum + [model]) ttm = table_to_model[model] print_('class %s(BaseModel):' % ttm) cols = models[model] for column, column_info in ds(cols): if column == 'id' and column_info.field_class in pk_classes: continue field_params = ', '.join([ '%s=%s' % (k, v) for k, v in col_meta[model][column].items() ]) colname = cn(column) if colname in RESERVED_WORDS: print_(' # FIXME: "%s" is a reserved word, renamed.' % colname) colname = colname + '_' comments = '' if column_info.field_class is UnknownFieldType: comments = ' # %s' % column_info.raw_column_type print_(' %s = %s(%s)%s' % ( colname, column_info.field_class.__name__, field_params, comments)) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % model) print_('') seen.add(model) seen = set() for model, cols in ds(models): if model not in seen: if not tables or model in tables: print_model(model, seen)
def decorator(klass): if expression(): if TEST_VERBOSITY > 0: print_('Skipping %s tests.' % klass.__name__) class Dummy(object): pass return Dummy return klass
def inner(self): for database, db_name in DATABASES: if database: self.create_tables(database) fn(self, database, db_name) else: print_('Skipping %s, driver not found' % db_name)
def print_models(introspector, tables=None): database = introspector.introspect() print_(TEMPLATE % ( introspector.get_database_class().__name__, introspector.get_database_name(), repr(introspector.get_database_kwargs()))) def _print_table(table, seen, accum=None): accum = accum or [] foreign_keys = database.foreign_keys[table] for foreign_key in foreign_keys: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_('# Possible reference cycle: %s' % dest) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_('class %s(BaseModel):' % database.model_names[table]) columns = database.columns[table] primary_keys = database.primary_keys[table] for name, column in sorted(columns.items()): if name == 'id' and column.field_class in introspector.pk_classes: continue if column.primary_key and len(primary_keys) > 1: # If we have a CompositeKey, then we do not want to explicitly # mark the columns as being primary keys. column.primary_key = False print_(' %s' % column.get_field()) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % table) if introspector.schema: print_(' schema = \'%s\'' % introspector.schema) if len(primary_keys) > 1: pk_field_names = sorted([ field.name for col, field in columns.items() if col in primary_keys]) pk_list = ', '.join("'%s'" % pk for pk in pk_field_names) print_(' primary_key = CompositeKey(%s)' % pk_list) print_('') seen.add(table) seen = set() for table in sorted(database.model_names.keys()): if table not in seen: if not tables or table in tables: _print_table(table, seen)
def inner(self): for database, identifier in DATABASES: if database: introspector = Introspector.from_database(database) self.create_tables(database) fn(self, introspector) elif TEST_VERBOSITY > 0: print_('Skipping %s, driver not found' % identifier)
def inner(self): for database_type, database in DATABASES: if database: introspector = make_introspector( database_type, database.database) self.create_tables(database) fn(self, introspector) elif TEST_VERBOSITY > 0: print_('Skipping %s, driver not found' % database_type)
def print_models(engine, database, tables, **connect): schema = connect.get('schema') db = get_conn(engine, database, **connect) models, table_to_model, table_fks, col_meta = introspect(db, schema) # write generated code to standard out print_(frame % (db.get_conn_class().__name__, database, repr(connect))) # print the models def print_model(model, seen, accum=None): accum = accum or [] for _, rel_table, _ in table_fks[model]: if rel_table in accum and model not in accum: print_('# POSSIBLE REFERENCE CYCLE: %s' % table_to_model[rel_table]) if rel_table not in seen and rel_table not in accum: seen.add(rel_table) if rel_table != model: print_model(rel_table, seen, accum + [model]) ttm = table_to_model[model] print_('class %s(BaseModel):' % ttm) cols = models[model] for column, (field_class, nullable) in ds(cols): if column == 'id' and field_class in (IntegerField, PrimaryKeyField): continue field_params = ', '.join([ '%s=%s' % (k, v) for k, v in col_meta[model][column].items() ]) colname = cn(column) if colname in reserved_words: print_(' # FIXME: "%s" is a reserved word' % colname) colname = '#' + colname print_(' %s = %s(%s)' % (colname, field_class.__name__, field_params)) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % model) print_('') seen.add(model) seen = set() for model, cols in ds(models): if model not in seen: if not tables or model in tables: print_model(model, seen)
def print_models(self, tables=None): columns, foreign_keys, model_names = self.introspect() print_( TEMPLATE % (self.metadata.database_class.__name__, self.metadata.database, repr(self.metadata.database_kwargs)) ) def _print_table(table, seen, accum=None): accum = accum or [] for foreign_key in foreign_keys[table]: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_("# Possible reference cycle: %s" % foreign_key) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_("class %s(BaseModel):" % model_names[table]) for name, column in sorted(columns[table].items()): if name == "id" and column.field_class in self.pk_classes: continue print_(" %s" % column.get_field()) print_("") print_(" class Meta:") print_(" db_table = '%s'" % table) print_("") seen.add(table) seen = set() for table in sorted(model_names.keys()): if table not in seen: if not tables or table in tables: _print_table(table, seen)
def print_models(introspector, tables=None): columns, foreign_keys, model_names = introspector.introspect() print_(TEMPLATE % ( introspector.get_database_class().__name__, introspector.get_database_name(), repr(introspector.get_database_kwargs()))) def _print_table(table, seen, accum=None): accum = accum or [] for foreign_key in foreign_keys[table]: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_('# Possible reference cycle: %s' % foreign_key) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_('class %s(BaseModel):' % model_names[table]) for name, column in sorted(columns[table].items()): if name == 'id' and column.field_class in introspector.pk_classes: continue print_(' %s' % column.get_field()) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % table) print_('') seen.add(table) seen = set() for table in sorted(model_names.keys()): if table not in seen: if not tables or table in tables: _print_table(table, seen)
def collect_modules(options): modules = [] xtra = lambda op: op or options.extra or options.all if xtra(options.apsw): try: from playhouse import tests_apsw modules.append(tests_apsw) except ImportError: print_("Unable to import apsw tests, skipping") if xtra(options.gfk): from playhouse import tests_gfk modules.append(tests_gfk) if xtra(options.migrations): try: from playhouse import tests_migrate modules.append(tests_migrate) except ImportError: print_("Unable to import migration tests, skipping") if xtra(options.postgres_ext): try: from playhouse import tests_postgres modules.append(tests_postgres) except ImportError: print_("Unable to import postgres-ext tests, skipping") if xtra(options.proxy): from playhouse import tests_proxy modules.append(tests_proxy) if xtra(options.pwiz): from playhouse import tests_pwiz modules.append(tests_pwiz) if xtra(options.signals): from playhouse import tests_signals modules.append(tests_signals) if xtra(options.sqlite_ext): from playhouse import tests_sqlite_ext modules.append(tests_sqlite_ext) if xtra(options.kv): from playhouse import tests_kv modules.append(tests_kv) if xtra(options.test_utils): from playhouse import tests_test_utils modules.append(tests_test_utils) if not modules or options.all: import tests modules.insert(0, tests) return modules
def get_foreign_keys(self, table, schema=None): query = """ SELECT sql FROM sqlite_master WHERE (tbl_name = ? AND type = ?)""" cursor = self.execute(query, table, 'table') table_definition = cursor.fetchone()[0].strip() try: columns = re.search( '\((.+)\)', table_definition, re.MULTILINE | re.DOTALL).groups()[0] except AttributeError: print_('Unable to read table definition for "%s"' % table) return [] # Replace any new-lines or other junk with whitespace. columns = re.sub('[\s\n\r]+', ' ', columns).strip() fks = [] for column_def in columns.split(','): column_def = column_def.strip() match = re.search(self.re_foreign_key, column_def, re.I) if not match: continue column, dest_table, dest_column = [ s.strip('"') for s in match.groups()] fks.append(ForeignKeyMapping( table=table, column=column, dest_table=dest_table, dest_column=dest_column)) return fks
# Re-create the indexes. ('CREATE UNIQUE INDEX "indexmodel_data" ' 'ON "indexmodel" ("data")', []), ('CREATE UNIQUE INDEX "indexmodel_first_last_name" ' 'ON "indexmodel" ("first", "last_name")', []) ]) if psycopg2: pg_db = PostgresqlDatabase('peewee_test') class PostgresqlMigrationTestCase(BaseMigrationTestCase, unittest.TestCase): database = pg_db migrator_class = PostgresqlMigrator elif TEST_VERBOSITY > 0: print_('Skipping postgres migrations, driver not found.') if mysql: mysql_db = MySQLDatabase('peewee_test') class MySQLMigrationTestCase(BaseMigrationTestCase, unittest.TestCase): database = mysql_db migrator_class = MySQLMigrator # MySQL does not raise an exception when adding a not null constraint # to a column that contains NULL values. _exception_add_not_null = False elif TEST_VERBOSITY > 0: print_('Skipping mysql migrations, driver not found.')
def print_models(introspector, tables=None, preserve_order=False, include_views=False, ignore_unknown=False, snake_case=True): database = introspector.introspect(table_names=tables, include_views=include_views, snake_case=snake_case) db_kwargs = introspector.get_database_kwargs() header = HEADER % (introspector.get_additional_imports(), introspector.get_database_class().__name__, introspector.get_database_name(), ', **%s' % repr(db_kwargs) if db_kwargs else '') print_(header) if not ignore_unknown: print_(UNKNOWN_FIELD) print_(BASE_MODEL) def _print_table(table, seen, accum=None): accum = accum or [] foreign_keys = database.foreign_keys[table] for foreign_key in foreign_keys: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_('# Possible reference cycle: %s' % dest) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_('class %s(BaseModel):' % database.model_names[table]) columns = database.columns[table].items() if not preserve_order: columns = sorted(columns) primary_keys = database.primary_keys[table] for name, column in columns: skip = all([ name in primary_keys, name == 'id', len(primary_keys) == 1, column.field_class in introspector.pk_classes ]) if skip: continue if column.primary_key and len(primary_keys) > 1: # If we have a CompositeKey, then we do not want to explicitly # mark the columns as being primary keys. column.primary_key = False is_unknown = column.field_class is UnknownField if is_unknown and ignore_unknown: disp = '%s - %s' % (column.name, column.raw_column_type or '?') print_(' # %s' % disp) else: print_(' %s' % column.get_field()) print_('') print_(' class Meta:') print_(' table_name = \'%s\'' % table) multi_column_indexes = database.multi_column_indexes(table) if multi_column_indexes: print_(' indexes = (') for fields, unique in sorted(multi_column_indexes): print_(' ((%s), %s),' % ( ', '.join("'%s'" % field for field in fields), unique, )) print_(' )') if introspector.schema: print_(' schema = \'%s\'' % introspector.schema) if len(primary_keys) > 1: pk_field_names = sorted( [field.name for col, field in columns if col in primary_keys]) pk_list = ', '.join("'%s'" % pk for pk in pk_field_names) print_(' primary_key = CompositeKey(%s)' % pk_list) elif not primary_keys: print_(' primary_key = False') print_('') seen.add(table) seen = set() for table in sorted(database.model_names.keys()): if table not in seen: if not tables or table in tables: _print_table(table, seen)
if __name__ == '__main__': parser = OptionParser(usage='usage: %prog [options] database_name') ao = parser.add_option ao('-H', '--host', dest='host') ao('-p', '--port', dest='port', type='int') ao('-u', '--user', dest='user') ao('-P', '--password', dest='password') ao('-e', '--engine', dest='engine', default='postgresql') ao('-s', '--schema', dest='schema') ao('-t', '--tables', dest='tables') options, args = parser.parse_args() ops = ('host', 'port', 'user', 'password', 'schema') connect = dict((o, getattr(options, o)) for o in ops if getattr(options, o)) if len(args) < 1: print_('error: missing required parameter "database"') parser.print_help() sys.exit(1) database = args[-1] if options.engine == 'mysql' and 'password' in connect: connect['passwd'] = connect.pop('password', None) if options.tables: tables = [x for x in options.tables.split(',') if x] else: tables = [] print_models(options.engine, database, tables, **connect)
def collect_modules(options): modules = [] xtra = lambda op: op or options.extra or options.all if xtra(options.apsw): try: from playhouse import tests_apsw modules.append(tests_apsw) except ImportError: print_('Unable to import apsw tests, skipping') if xtra(options.berkeleydb): try: from playhouse import tests_berkeleydb modules.append(tests_berkeleydb) except ImportError: print_('Unable to import berkeleydb tests, skipping') if xtra(options.csv): from playhouse import tests_csv_utils modules.append(tests_csv_utils) if xtra(options.dataset): from playhouse import tests_dataset modules.append(tests_dataset) if xtra(options.db_url): from playhouse import tests_db_url modules.append(tests_db_url) if xtra(options.djpeewee): from playhouse import tests_djpeewee modules.append(tests_djpeewee) if xtra(options.gfk): from playhouse import tests_gfk modules.append(tests_gfk) if xtra(options.kv): from playhouse import tests_kv modules.append(tests_kv) if xtra(options.migrations): try: from playhouse import tests_migrate modules.append(tests_migrate) except ImportError: print_('Unable to import migration tests, skipping') if xtra(options.pool): try: from playhouse import tests_pool modules.append(tests_pool) except ImportError: print_('Unable to import connection pool tests, skipping') if xtra(options.postgres_ext): try: from playhouse import tests_postgres modules.append(tests_postgres) except ImportError: print_('Unable to import postgres-ext tests, skipping') if xtra(options.pwiz): from playhouse import tests_pwiz modules.append(tests_pwiz) if xtra(options.read_slave): from playhouse import tests_read_slave modules.append(tests_read_slave) if xtra(options.reflection): from playhouse import tests_reflection modules.append(tests_reflection) if xtra(options.signals): from playhouse import tests_signals modules.append(tests_signals) if xtra(options.shortcuts): from playhouse import tests_shortcuts modules.append(tests_shortcuts) if xtra(options.sqlcipher): try: from playhouse import tests_sqlcipher_ext modules.append(tests_sqlcipher_ext) except ImportError: print_('Unable to import pysqlcipher tests, skipping') if xtra(options.sqlite_ext): from playhouse import tests_sqlite_ext modules.append(tests_sqlite_ext) if xtra(options.test_utils): from playhouse import tests_test_utils modules.append(tests_test_utils) if not modules or options.all: import tests modules.insert(0, tests) return modules
def print_header(cmd_line, introspector): timestamp = datetime.datetime.now() print_("# Code generated by:") print_("# python -m pwiz %s" % cmd_line) print_("# Date: %s" % timestamp.strftime("%B %d, %Y %I:%M%p")) print_("# Database: %s" % introspector.get_database_name()) print_("# Peewee version: %s" % peewee_version) print_("")
def _print_table(table, seen, accum=None): accum = accum or [] for foreign_key in foreign_keys[table]: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_("# Possible reference cycle: %s" % foreign_key) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_("class %s(BaseModel):" % model_names[table]) for name, column in sorted(columns[table].items()): if name == "id" and column.field_class in self.pk_classes: continue print_(" %s" % column.get_field()) print_("") print_(" class Meta:") print_(" db_table = '%s'" % table) print_("") seen.add(table)
def _print_table(table, seen, accum=None): accum = accum or [] for foreign_key in foreign_keys[table]: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_('# Possible reference cycle: %s' % foreign_key) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_('class %s(BaseModel):' % model_names[table]) for name, column in sorted(columns[table].items()): if name == 'id' and column.field_class in introspector.pk_classes: continue print_(' %s' % column.get_field()) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % table) print_('') seen.add(table)
def log_console(s): if TEST_VERBOSITY > 1: print_(s)
def _print_table(table, seen, accum=None): accum = accum or [] foreign_keys = database.foreign_keys[table] for foreign_key in foreign_keys: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_('# Possible reference cycle: %s' % dest) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_('class %s(BaseModel):' % database.model_names[table]) columns = database.columns[table].items() if not preserve_order: columns = sorted(columns) primary_keys = database.primary_keys[table] for name, column in columns: skip = all([ name in primary_keys, name == 'id', len(primary_keys) == 1, column.field_class in introspector.pk_classes ]) if skip: continue if column.primary_key and len(primary_keys) > 1: # If we have a CompositeKey, then we do not want to explicitly # mark the columns as being primary keys. column.primary_key = False print_(' %s' % column.get_field()) print_('') print_(' class Meta:') print_(' table_name = \'%s\'' % table) multi_column_indexes = database.multi_column_indexes(table) if multi_column_indexes: print_(' indexes = (') for fields, unique in sorted(multi_column_indexes): print_(' ((%s), %s),' % ( ', '.join("'%s'" % field for field in fields), unique, )) print_(' )') if introspector.schema: print_(' schema = \'%s\'' % introspector.schema) if len(primary_keys) > 1: pk_field_names = sorted( [field.name for col, field in columns if col in primary_keys]) pk_list = ', '.join("'%s'" % pk for pk in pk_field_names) print_(' primary_key = CompositeKey(%s)' % pk_list) elif not primary_keys: print_(' primary_key = False') print_('') seen.add(table)
def print_header(cmd_line, introspector): timestamp = datetime.datetime.now() print_('# Code generated by:') print_('# python -m pwiz %s' % cmd_line) print_('# Date: %s' % timestamp.strftime('%B %d, %Y %I:%M%p')) print_('# Database: %s' % introspector.get_database_name()) print_('# Peewee version: %s' % peewee_version) print_('')
return modules if __name__ == '__main__': parser = get_option_parser() options, args = parser.parse_args() if options.engine: os.environ['PEEWEE_TEST_BACKEND'] = options.engine os.environ['PEEWEE_TEST_VERBOSITY'] = str(options.verbosity) from peewee import print_ suite = unittest.TestSuite() for module in collect_modules(options): print_('Adding tests for "%s"' % module.__name__) module_suite = unittest.TestLoader().loadTestsFromModule(module) suite.addTest(module_suite) failures, errors = runtests(suite, options.verbosity) if errors: sys.exit(2) elif failures: sys.exit(1) files_to_delete = ['tmp.db', 'tmp.bdb.db', 'test_sqlcipher.db'] paths_to_delete = ['tmp.bdb.db-journal'] for filename in files_to_delete: if os.path.exists(filename): os.unlink(filename) for path in paths_to_delete:
self.assertEqual(t2_db.person, None) class SqliteMigrationTestCase(BaseMigrationTestCase, unittest.TestCase): database = sqlite_db migrator_class = SqliteMigrator if psycopg2: pg_db = PostgresqlDatabase('peewee_test') class PostgresqlMigrationTestCase(BaseMigrationTestCase, unittest.TestCase): database = pg_db migrator_class = PostgresqlMigrator elif TEST_VERBOSITY > 0: print_('Skipping postgres migrations, driver not found.') if mysql: mysql_db = MySQLDatabase('peewee_test') class MySQLMigrationTestCase(BaseMigrationTestCase, unittest.TestCase): database = mysql_db migrator_class = MySQLMigrator # MySQL does not raise an exception when adding a not null constraint # to a column that contains NULL values. _exception_add_not_null = False elif TEST_VERBOSITY > 0: print_('Skipping mysql migrations, driver not found.')
def print_model(model, seen, accum=None): accum = accum or [] for _, rel_table, _ in table_fks[model]: if rel_table in accum and model not in accum: print_('# POSSIBLE REFERENCE CYCLE: %s' % table_to_model[rel_table]) if rel_table not in seen and rel_table not in accum: seen.add(rel_table) if rel_table != model: print_model(rel_table, seen, accum + [model]) ttm = table_to_model[model] print_('class %s(BaseModel):' % ttm) cols = models[model] for column, (field_class, nullable) in ds(cols): if column == 'id' and field_class in (IntegerField, PrimaryKeyField): continue field_params = ', '.join( ['%s=%s' % (k, v) for k, v in col_meta[model][column].items()]) colname = cn(column) if colname in reserved_words: print_(' # FIXME: "%s" is a reserved word' % colname) colname = '#' + colname print_(' %s = %s(%s)' % (colname, field_class.__name__, field_params)) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % model) print_('') seen.add(model)
def _print_table(table, seen, accum=None): accum = accum or [] foreign_keys = database.foreign_keys[table] for foreign_key in foreign_keys: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_('# Possible reference cycle: %s' % foreign_key) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_('class %s(BaseModel):' % database.model_names[table]) columns = database.columns[table] primary_keys = database.primary_keys[table] for name, column in sorted(columns.items()): if name == 'id' and column.field_class in introspector.pk_classes: continue if column.primary_key and len(primary_keys) > 1: # If we have a CompositeKey, then we do not want to explicitly # mark the columns as being primary keys. column.primary_key = False print_(' %s' % column.get_field()) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % table) if introspector.schema: print_(' schema = \'%s\'' % introspector.schema) if len(primary_keys) > 1: pk_field_names = sorted([ field.name for col, field in columns.items() if col in primary_keys ]) pk_list = ', '.join("'%s'" % pk for pk in pk_field_names) print_(' primary_key = CompositeKey(%s)' % pk_list) print_('') seen.add(table)
def print_models(introspector, tables=None, preserve_order=False): database = introspector.introspect(table_names=tables) print_(TEMPLATE % (introspector.get_database_class().__name__, introspector.get_database_name(), repr(introspector.get_database_kwargs()))) def _print_table(table, seen, accum=None): accum = accum or [] foreign_keys = database.foreign_keys[table] for foreign_key in foreign_keys: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_('# Possible reference cycle: %s' % dest) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_('class %s(BaseModel):' % database.model_names[table]) columns = database.columns[table].items() if not preserve_order: columns = sorted(columns) primary_keys = database.primary_keys[table] for name, column in columns: skip = all([ name in primary_keys, name == 'id', len(primary_keys) == 1, column.field_class in introspector.pk_classes ]) if skip: continue if column.primary_key and len(primary_keys) > 1: # If we have a CompositeKey, then we do not want to explicitly # mark the columns as being primary keys. column.primary_key = False print_(' %s' % column.get_field()) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % table) if introspector.schema: print_(' schema = \'%s\'' % introspector.schema) if len(primary_keys) > 1: pk_field_names = sorted( [field.name for col, field in columns if col in primary_keys]) pk_list = ', '.join("'%s'" % pk for pk in pk_field_names) print_(' primary_key = CompositeKey(%s)' % pk_list) print_('') seen.add(table) seen = set() for table in sorted(database.model_names.keys()): if table not in seen: if not tables or table in tables: _print_table(table, seen)
def inner(*args, **kwargs): if expression(): if TEST_VERBOSITY > 1: print_('Skipping %s test.' % fn.__name__) else: return fn(*args, **kwargs)
Category.create(name='rants', parent=essays) Category.create(name='poetry', parent=books) query = (Category .select(Category.name, Closure.depth) .join(Closure, on=(Category.id == Closure.id)) .where(Closure.root == new_root) .order_by(Closure.depth, Category.name) .tuples()) self.assertEqual(list(query), [ ('products', 0), ('books', 1), ('magazines', 1), ('essays', 2), ('fiction', 2), ('non-fiction', 2), ('poetry', 2), ('biographies', 3), ('classics', 3), ('rants', 3), ('scifi', 3), ('westerns', 3), ('hard scifi', 4), ]) def tearDown(self): ext_db.unload_extension(CLOSURE_EXTENSION.rstrip('.so')) elif TEST_VERBOSITY > 0: print_('Skipping transitive closure integration tests.')
def _print_table(table, seen, accum=None): accum = accum or [] foreign_keys = database.foreign_keys[table] for foreign_key in foreign_keys: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_('# Possible reference cycle: %s' % dest) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_('class %s(BaseModel):' % database.model_names[table]) columns = database.columns[table].items() if not preserve_order: columns = sorted(columns) primary_keys = database.primary_keys[table] for name, column in columns: skip = all([ name in primary_keys, name == 'id', len(primary_keys) == 1, column.field_class in introspector.pk_classes]) if skip: continue if column.primary_key and len(primary_keys) > 1: # If we have a CompositeKey, then we do not want to explicitly # mark the columns as being primary keys. column.primary_key = False print_(' %s' % column.get_field()) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % table) multi_column_indexes = database.multi_column_indexes(table) if multi_column_indexes: print_(' indexes = (') for fields, unique in sorted(multi_column_indexes): print_(' ((%s), %s),' % ( ', '.join("'%s'" % field for field in fields), unique, )) print_(' )') if introspector.schema: print_(' schema = \'%s\'' % introspector.schema) if len(primary_keys) > 1: pk_field_names = sorted([ field.name for col, field in columns if col in primary_keys]) pk_list = ', '.join("'%s'" % pk for pk in pk_field_names) print_(' primary_key = CompositeKey(%s)' % pk_list) print_('') seen.add(table)
def print_models(introspector, tables=None, preserve_order=False): database = introspector.introspect(table_names=tables) print_( TEMPLATE % ( introspector.get_database_class().__name__, introspector.get_database_name(), repr(introspector.get_database_kwargs()), ) ) def _print_table(table, seen, accum=None): accum = accum or [] foreign_keys = database.foreign_keys[table] for foreign_key in foreign_keys: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_("# Possible reference cycle: %s" % dest) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_("class %s(BaseModel):" % database.model_names[table]) columns = database.columns[table].items() if not preserve_order: columns = sorted(columns) primary_keys = database.primary_keys[table] for name, column in columns: skip = all( [ name in primary_keys, name == "id", len(primary_keys) == 1, column.field_class in introspector.pk_classes, ] ) if skip: continue if column.primary_key and len(primary_keys) > 1: # If we have a CompositeKey, then we do not want to explicitly # mark the columns as being primary keys. column.primary_key = False print_(" %s" % column.get_field()) print_("") print_(" class Meta:") print_(" db_table = '%s'" % table) multi_column_indexes = database.multi_column_indexes(table) if multi_column_indexes: print_(" indexes = (") for fields, unique in sorted(multi_column_indexes): print_(" ((%s), %s)," % (", ".join("'%s'" % field for field in fields), unique)) print_(" )") if introspector.schema: print_(" schema = '%s'" % introspector.schema) if len(primary_keys) > 1: pk_field_names = sorted([field.name for col, field in columns if col in primary_keys]) pk_list = ", ".join("'%s'" % pk for pk in pk_field_names) print_(" primary_key = CompositeKey(%s)" % pk_list) print_("") seen.add(table) seen = set() for table in sorted(database.model_names.keys()): if table not in seen: if not tables or table in tables: _print_table(table, seen)
def collect_modules(options): modules = [] xtra = lambda op: op or options.extra or options.all if xtra(options.apsw): try: from playhouse.tests import test_apsw modules.append(test_apsw) except ImportError: print_('Unable to import apsw tests, skipping') if xtra(options.berkeleydb): try: from playhouse.tests import test_berkeleydb modules.append(test_berkeleydb) except ImportError: print_('Unable to import berkeleydb tests, skipping') if xtra(options.csv): from playhouse.tests import test_csv_utils modules.append(test_csv_utils) if xtra(options.dataset): from playhouse.tests import test_dataset modules.append(test_dataset) if xtra(options.db_url): from playhouse.tests import test_db_url modules.append(test_db_url) if xtra(options.djpeewee): from playhouse.tests import test_djpeewee modules.append(test_djpeewee) if xtra(options.fields): from playhouse.tests import test_extra_fields from playhouse.tests import test_manytomany modules.append(test_extra_fields) if test_manytomany not in modules: modules.append(test_manytomany) if xtra(options.flask): try: import flask except ImportError: print_('Unable to import Flask tests, Flask is not installed.') else: from playhouse.tests import test_flask_utils modules.append(test_flask_utils) if xtra(options.gfk): from playhouse.tests import test_gfk modules.append(test_gfk) if xtra(options.hybrid): from playhouse.tests import test_hybrid modules.append(test_hybrid) if xtra(options.kv): from playhouse.tests import test_kv modules.append(test_kv) if xtra(options.manytomany): from playhouse.tests import test_manytomany if test_manytomany not in modules: modules.append(test_manytomany) if xtra(options.migrations): try: from playhouse.tests import test_migrate modules.append(test_migrate) except ImportError: print_('Unable to import migration tests, skipping') if xtra(options.pool): try: from playhouse.tests import test_pool modules.append(test_pool) except ImportError: print_('Unable to import connection pool tests, skipping') if xtra(options.postgres_ext): try: from playhouse.tests import test_postgres modules.append(test_postgres) except ImportError: print_('Unable to import postgres-ext tests, skipping') if xtra(options.pwiz): from playhouse.tests import test_pwiz modules.append(test_pwiz) if xtra(options.read_slave): from playhouse.tests import test_read_slave modules.append(test_read_slave) if xtra(options.reflection): from playhouse.tests import test_reflection modules.append(test_reflection) if xtra(options.signals): from playhouse.tests import test_signals modules.append(test_signals) if xtra(options.shortcuts): from playhouse.tests import test_shortcuts modules.append(test_shortcuts) if xtra(options.sqlcipher): try: from playhouse.tests import test_sqlcipher_ext modules.append(test_sqlcipher_ext) except ImportError: print_('Unable to import pysqlcipher tests, skipping') if xtra(options.sqlite_ext): from playhouse.tests import test_sqlite_ext modules.append(test_sqlite_ext) if xtra(options.test_utils): from playhouse.tests import test_test_utils modules.append(test_test_utils) if not modules or options.all: import tests modules.insert(0, tests) return modules
def _print_table(table, seen, accum=None): accum = accum or [] for foreign_key in foreign_keys[table]: dest = foreign_key.dest_table # In the event the destination table has already been pushed # for printing, then we have a reference cycle. if dest in accum and table not in accum: print_('# Possible reference cycle: %s' % foreign_key) # If this is not a self-referential foreign key, and we have # not already processed the destination table, do so now. if dest not in seen and dest not in accum: seen.add(dest) if dest != table: _print_table(dest, seen, accum + [table]) print_('class %s(BaseModel):' % model_names[table]) for name, column in sorted(columns[table].items()): if name == 'id' and column.field_class in introspector.pk_classes: continue print_(' %s' % column.get_field()) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % table) if len(primary_keys[table]) > 1: pk_field_names = sorted([ field.name for col, field in columns[table].items() if col in primary_keys[table]]) pk_list = ', '.join("'%s'" % pk for pk in pk_field_names) print_(' primary_key = CompositeKey(%s)' % pk_list) print_('') seen.add(table)
def print_model(model, seen, accum=None): accum = accum or [] for _, rel_table, _ in table_fks[model]: if rel_table in accum and model not in accum: print_('# POSSIBLE REFERENCE CYCLE: %s' % table_to_model[rel_table]) if rel_table not in seen and rel_table not in accum: seen.add(rel_table) if rel_table != model: print_model(rel_table, seen, accum + [model]) ttm = table_to_model[model] print_('class %s(BaseModel):' % ttm) cols = models[model] for column, column_info in ds(cols): if column == 'id' and column_info.field_class in pk_classes: continue field_params = ', '.join([ '%s=%s' % (k, v) for k, v in col_meta[model][column].items() ]) colname = cn(column) if colname in RESERVED_WORDS: print_(' # FIXME: "%s" is a reserved word, renamed.' % colname) colname = colname + '_' comments = '' if column_info.field_class is UnknownFieldType: comments = ' # %s' % column_info.raw_column_type print_(' %s = %s(%s)%s' % ( colname, column_info.field_class.__name__, field_params, comments)) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % model) print_('') seen.add(model)
tj_db = TestingJson.get(tj.pk_expr()) self.assertEqual(tj_db.data, data) def assertItems(self, where, *items): query = TestingJson.select().where(where).order_by(TestingJson.id) self.assertEqual( [item.id for item in query], [item.id for item in items]) def test_lookup(self): t1 = TestingJson.create(data={'k1': 'v1', 'k2': {'k3': 'v3'}}) t2 = TestingJson.create(data={'k1': 'x1', 'k2': {'k3': 'x3'}}) t3 = TestingJson.create(data={'k1': 'v1', 'j2': {'j3': 'v3'}}) self.assertItems((TestingJson.data['k2']['k3'] == 'v3'), t1) self.assertItems((TestingJson.data['k1'] == 'v1'), t1, t3) # Valid key, no matching value. self.assertItems((TestingJson.data['k2'] == 'v1')) # Non-existent key. self.assertItems((TestingJson.data['not-here'] == 'v1')) # Non-existent nested key. self.assertItems((TestingJson.data['not-here']['xxx'] == 'v1')) self.assertItems((TestingJson.data['k2']['xxx'] == 'v1')) elif TEST_VERBOSITY > 0: print_('Skipping postgres "Json" tests, unsupported version.')
Aggregate all the test modules and run from the command-line. For information about running tests, see the README located in the `playhouse/tests` directory. """ import sys import unittest from playhouse.tests.test_apis import * from playhouse.tests.test_compound_queries import * from playhouse.tests.test_database import * from playhouse.tests.test_fields import * from playhouse.tests.test_helpers import * from playhouse.tests.test_introspection import * from playhouse.tests.test_keys import * from playhouse.tests.test_models import * from playhouse.tests.test_queries import * from playhouse.tests.test_query_results import * from playhouse.tests.test_transactions import * from playhouse.tests.test_plus import * if __name__ == '__main__': from peewee import print_ print_("""\033[1;31m ______ ______ ______ __ __ ______ ______ /\ == \ /\ ___\ /\ ___\ /\ \ _ \ \ /\ ___\ /\ ___\\ \ \ _-/ \ \ __\ \ \ __\ \ \ \/ ".\ \ \ \ __\ \ \ __\\ \ \_\ \ \_____\ \ \_____\ \ \__/".~\_\ \ \_____\ \ \_____\\ \/_/ \/_____/ \/_____/ \/_/ \/_/ \/_____/ \/_____/ \033[0m""") unittest.main(argv=sys.argv)
if xtra(options.test_utils): from playhouse import tests_test_utils modules.append(tests_test_utils) if not modules or options.all: import tests modules.insert(0, tests) return modules if __name__ == '__main__': parser = get_option_parser() options, args = parser.parse_args() os.environ['PEEWEE_TEST_BACKEND'] = options.engine os.environ['PEEWEE_TEST_VERBOSITY'] = str(options.verbosity) from peewee import print_ suite = unittest.TestSuite() for module in collect_modules(options): print_('Adding tests for for "%s"' % module.__name__) module_suite = unittest.TestLoader().loadTestsFromModule(module) suite.addTest(module_suite) failures, errors = runtests(suite, options.verbosity) if errors: sys.exit(2) elif failures: sys.exit(1) sys.exit(0)
def print_model(model, seen, accum=None): accum = accum or [] for _, rel_table, _ in table_fks[model]: if rel_table in accum and model not in accum: print_('# POSSIBLE REFERENCE CYCLE: %s' % table_to_model[rel_table]) if rel_table not in seen and rel_table not in accum: seen.add(rel_table) if rel_table != model: print_model(rel_table, seen, accum + [model]) ttm = table_to_model[model] print_('class %s(BaseModel):' % ttm) cols = models[model] for column, column_info in ds(cols): if column == 'id' and column_info.field_class in pk_classes: continue field_params = ', '.join( ['%s=%s' % (k, v) for k, v in col_meta[model][column].items()]) colname = cn(column) if colname in RESERVED_WORDS: print_(' # FIXME: "%s" is a reserved word, renamed.' % colname) colname = colname + '_' comments = '' if column_info.field_class is UnknownFieldType: comments = ' # %s' % column_info.raw_column_type print_(' %s = %s(%s)%s' % (colname, column_info.field_class.__name__, field_params, comments)) print_('') print_(' class Meta:') print_(' db_table = \'%s\'' % model) print_('') seen.add(model)
except OperationalError: print('Postgresql test database "peewee_test" not found, skipping ' 'the postgres_ext tests.') from .pwiz_integration import * from .reflection import * from .shortcuts import * from .signals import * try: from .sqlcipher_ext import * except ImportError: print('Unable to import SQLCipher extension tests, skipping.') try: from .sqlite import * except ImportError: print('Unable to import sqlite extension tests, skipping.') from .sqliteq import * from .sqlite_udf import * from .test_utils import * if __name__ == '__main__': from peewee import print_ print_("""\033[1;31m ______ ______ ______ __ __ ______ ______ /\ == \ /\ ___\ /\ ___\ /\ \ _ \ \ /\ ___\ /\ ___\\ \ \ _-/ \ \ __\ \ \ __\ \ \ \/ ".\ \ \ \ __\ \ \ __\\ \ \_\ \ \_____\ \ \_____\ \ \__/".~\_\ \ \_____\ \ \_____\\ \/_/ \/_____/ \/_____/ \/_/ \/_/ \/_____/ \/_____/ \033[0m""") unittest.main(argv=sys.argv)
self.assertEqual( sql, ('SELECT t1."id", t1."data" ' 'FROM "testingjson" AS t1 WHERE (t1."data"->>%s = %s)')) self.assertEqual(params, ['foo', 'bar']) def assertItems(self, where, *items): query = TestingJson.select().where(where).order_by(TestingJson.id) self.assertEqual([item.id for item in query], [item.id for item in items]) def test_lookup(self): t1 = TestingJson.create(data={'k1': 'v1', 'k2': {'k3': 'v3'}}) t2 = TestingJson.create(data={'k1': 'x1', 'k2': {'k3': 'x3'}}) t3 = TestingJson.create(data={'k1': 'v1', 'j2': {'j3': 'v3'}}) self.assertItems((TestingJson.data['k2']['k3'] == 'v3'), t1) self.assertItems((TestingJson.data['k1'] == 'v1'), t1, t3) # Valid key, no matching value. self.assertItems((TestingJson.data['k2'] == 'v1')) # Non-existent key. self.assertItems((TestingJson.data['not-here'] == 'v1')) # Non-existent nested key. self.assertItems((TestingJson.data['not-here']['xxx'] == 'v1')) self.assertItems((TestingJson.data['k2']['xxx'] == 'v1')) elif TEST_VERBOSITY > 0: print_('Skipping postgres "Json" tests, unsupported version.')
parser = OptionParser(usage='usage: %prog [options] database_name') ao = parser.add_option ao('-H', '--host', dest='host') ao('-p', '--port', dest='port', type='int') ao('-u', '--user', dest='user') ao('-P', '--password', dest='password') ao('-e', '--engine', dest='engine', default='postgresql') ao('-s', '--schema', dest='schema') ao('-t', '--tables', dest='tables') options, args = parser.parse_args() ops = ('host', 'port', 'user', 'password', 'schema') connect = dict( (o, getattr(options, o)) for o in ops if getattr(options, o)) if len(args) < 1: print_('error: missing required parameter "database"') parser.print_help() sys.exit(1) database = args[-1] if options.engine == 'mysql' and 'password' in connect: connect['passwd'] = connect.pop('password', None) if options.tables: tables = [x for x in options.tables.split(',') if x] else: tables = [] print_models(options.engine, database, tables, **connect)
trans['Comment']) self.assertFields(trans['Comment'], [ ('id', PrimaryKeyField), ('post', ForeignKeyField), ('commenter', IntegerField), ('comment', TextField)]) def test_backrefs(self): trans = translate(User, backrefs=True) self.assertEqual(sorted(trans.keys()), [ 'Comment', 'Post', 'User']) def test_inheritance(self): trans = translate(Parent) self.assertEqual(list(trans.keys()), ['Parent']) self.assertFields(trans['Parent'], [ ('id', PrimaryKeyField),]) trans = translate(Child) self.assertEqual(sorted(trans.keys()), ['Child', 'Parent']) self.assertFields(trans['Child'], [ ('id', PrimaryKeyField), ('parent_ptr', ForeignKeyField)]) else: print_('Skipping djpeewee tests, Django not found.')