class DBCache(Cache): '''An implementation of the c9r.file.cache.FileCache with PyDAL using database. ''' defaults = { 'db': 'sqlite://cache.db', # Database URL } def_conf = ['~/.etc/cache-conf.json'] def clear(self, clear_all=True): '''Remove the entire content(s) in the cache. ''' db = self.db db((db.vars.id>=0) if clear_all else (db.vars.expires<time())).delete() def clear_cache(self, vset, names): ''' ''' def get(self, vset, name): '''Get a named data from this cache. ''' db = self.db rows = db((db.vars.name==name)&(db.vars.expires<time())).select() return rows[0].value def put(self, vset, name, data): '''Save given data into the cache with given name. ''' self.db.vars.insert(name=name, value=data, expires=time()+self.window) def __init__(self, conf=[], initconf=None): ''' ''' Cache.__init__(self, conf, initconf) self.db = DAL(self.config('db')) self.db.define_table('varset', Field('name')) self.db.define_table('vars', Field('name'), Field('value', 'json'), Field('varset'), Field('expires', 'integer'), primarykey=['varset', 'name']) self.window = int(self.config('window'))
class DyTables(object): def __init__(self, uri=None): self._uri = uri self._schema = uri.split("/")[-1] self._dal = DAL(self._uri) self._datatapy_dict = datatype_mysql() self.get_tables() def get_tables(self): _tables = GetAllTables(uri="/".join(self._uri.split("/")[:-1]) + "/information_schema", schema=self._schema) for numb, table in enumerate(_tables): fields = [] for field in _tables.get(table): try: fields.append(Field(field[0], self._datatapy_dict[field[1]])) except SyntaxError: fields.append(Field("r_" + field[0], self._datatapy_dict[field[1]], rname=field[0])) self._dal.define_table(table, *fields, primarykey=[], migrate=False) def get_db(self): return self._dal
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=["all"], entity_quoting=True) db.define_table("tt", Field("aa"), Field("bb", "boolean")) sql = db._adapter.dialect.create_index("idx_aa_f", db.tt, [db.tt.aa], where=str(db.tt.bb == False)) self.assertEqual(sql, 'CREATE INDEX "idx_aa_f" ON "tt" ("aa") WHERE ("tt"."bb" = \'F\');') rv = db.tt.create_index("idx_aa_f", db.tt.aa, where=(db.tt.bb == False)) self.assertTrue(rv) rv = db.tt.drop_index("idx_aa_f") self.assertTrue(rv) drop(db.tt)
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('vv')) vv = 'ἀγοραζε' id_i = db.tt.insert(vv=vv) row = db(db.tt.id == id_i).select().first() self.assertEqual(row.vv, vv) db.commit() drop(db.tt) db.close()
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('vv')) db.define_table('ttt', Field('vv'), Field('tt_id', 'reference tt', notnull=True)) self.assertRaises(Exception, db.ttt.insert, vv='pydal') # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
def __new__(self, ): dalString = 'mongodb://mongodb/ipChecker' db = DAL(dalString) db.define_table( 'ips', Field('ip'), Field('created_at', default=datetime.now().strftime("%d/%m/%Y")), Field('cloudflare', 'boolean', default=False)) db.define_table('statistics', Field('ip'), Field('status'), Field('created_on', 'datetime', default=datetime.now)) return db
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=["all"]) db.define_table("tt", Field("aa")) rv = db.tt.create_index("idx_aa", db.tt.aa) self.assertTrue(rv) rv = db.tt.drop_index("idx_aa") self.assertTrue(rv) with self.assertRaises(Exception): db.tt.drop_index("idx_aa") db.rollback() drop(db.tt)
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('aa')) rv = db.tt.create_index('idx_aa', db.tt.aa) self.assertTrue(rv) rv = db.tt.drop_index('idx_aa') self.assertTrue(rv) with self.assertRaises(Exception): db.tt.drop_index('idx_aa') db.rollback() drop(db.tt)
def testRun(self): for ref, bigint in [('reference', False), ('big-reference', True)]: db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint) db.define_table('tt', Field('vv')) db.define_table('ttt', Field('vv'), Field('tt_id', '%s tt' % ref, notnull=True)) self.assertRaises(Exception, db.ttt.insert, vv='pydal') # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
def get_sys_table(uri="mysql://*****:*****@192.168.1.110/information_schema"): sys_tab = DAL(uri=uri) sys_tab.define_table('COLUMNS', Field("TABLE_SCHEMA", ), Field("TABLE_NAME"), Field("COLUMN_NAME"), Field("IS_NULLABLE"), Field("DATA_TYPE"), Field("COLUMN_TYPE"), primarykey=[], migrate=False) return sys_tab
def get_course_names(self, prefix=None): db = DAL('sqlite://courses.db', folder='dbs') db.define_table('courses', Field('class_id', type='integer'), Field('class_name'), Field('date_time'), Field('descriptive_link'), Field('enrolled'), Field('instructor'), Field('link_sources'), Field('location'), Field('status')) if (prefix != None): return db.executesql( 'SELECT DISTINCT class_name FROM courses WHERE class_name LIKE ' + prefix + '%') else: return db.executesql('SELECT DISTINCT class_name FROM courses')
def setup_database(data_dir, sub_dir, db_file, tables): # Figure out where our database should live db_folder = Path(data_dir, sub_dir).absolute() db_folder.mkdir(exist_ok=True, parents=True) db = DAL(f'sqlite://{db_folder}/store.sqlite', folder=db_folder) for table_name in tables: db.define_table( table_name, *[Field(f, tables[table_name][f]) for f in tables[table_name]], migrate=f'{table_name}.migrate') return db
def testRun(self): for ref, bigint in [('reference', False), ('big-reference', True)]: db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint) if bigint and 'big-id' not in db._adapter.types: continue db.define_table('tt', Field('vv')) db.define_table('ttt', Field('vv'), Field('tt_id', '%s tt' % ref, notnull=True)) self.assertRaises(Exception, db.ttt.insert, vv='pydal') # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
class database(): def __init__(self, ): self.migrate = True if os.path.exists(os.path.abspath('database/banco.db')): self.migrate = False self.DATABASE_TYPE = 'sqlite://' self.DATABASE = self.DATABASE_TYPE + os.path.abspath( 'database/banco.db') self.db = DAL(self.DATABASE, migrate=self.migrate) def create(self, ): self.db.define_table('log', Field('date', 'datetime'), Field('time', 'datetime'), Field('phrase')) return self.db
def get_prefixes(self): db = DAL('sqlite://courses.db', folder='dbs') db.define_table('courses', Field('class_id', type='integer'), Field('class_name'), Field('date_time'), Field('descriptive_link'), Field('enrolled'), Field('instructor'), Field('link_sources'), Field('location'), Field('status')) course_names = self.get_course_names() prefixes = [] for name in course_names: if name[0].split()[0] not in prefixes: prefixes.append(name[0].split()[0]) return prefixes
def get_sys_table( uri="mysql://*****:*****@192.168.1.110/information_schema"): sys_tab = DAL(uri=uri) sys_tab.define_table('COLUMNS', Field("TABLE_SCHEMA", ), Field("TABLE_NAME"), Field("COLUMN_NAME"), Field("IS_NULLABLE"), Field("DATA_TYPE"), Field("COLUMN_TYPE"), primarykey=[], migrate=False) return sys_tab
def testRun(self): for ref, bigint in [("reference", False), ("big-reference", True)]: db = DAL(DEFAULT_URI, check_reserved=["all"], bigint_id=bigint) if bigint and "big-id" not in db._adapter.types: continue db.define_table("tt", Field("vv")) db.define_table("ttt", Field("vv"), Field("tt_id", "%s tt" % ref, notnull=True)) self.assertRaises(Exception, db.ttt.insert, vv="pydal") # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
def testRun(self): cache = SimpleCache() db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('aa')) db.tt.insert(aa='1') r0 = db().select(db.tt.ALL) r1 = db().select(db.tt.ALL, cache=(cache, 1000)) self.assertEqual(len(r0), len(r1)) r2 = db().select(db.tt.ALL, cache=(cache, 1000)) self.assertEqual(len(r0), len(r2)) r3 = db().select(db.tt.ALL, cache=(cache, 1000), cacheable=True) self.assertEqual(len(r0), len(r3)) r4 = db().select(db.tt.ALL, cache=(cache, 1000), cacheable=True) self.assertEqual(len(r0), len(r4)) drop(db.tt)
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all'], entity_quoting=True) db.define_table('tt', Field('aa'), Field('bb', 'boolean')) sql = db._adapter.dialect.create_index( 'idx_aa_f', db.tt, [db.tt.aa], where=str(db.tt.bb == False) ) self.assertEqual( sql, 'CREATE INDEX "idx_aa_f" ON "tt" ("aa") WHERE ("tt"."bb" = \'F\');' ) rv = db.tt.create_index( 'idx_aa_f', db.tt.aa, where=(db.tt.bb == False)) self.assertTrue(rv) rv = db.tt.drop_index('idx_aa_f') self.assertTrue(rv) drop(db.tt)
def test_single_transaction(self): db = DAL(DEFAULT_URI) db.define_table('tt', Field('aa')) self.assertEqual(db(db.tt).count(), 0) db.commit() try: with db.single_transaction(): db.tt.insert(aa='test') 1 / 0 except ZeroDivisionError: pass self.assertEqual(db(db.tt).count(), 0) with db.single_transaction(): db.tt.insert(aa='test') self.assertEqual(db(db.tt).count(), 1)
class Database: def __init__(self): self.config = Configuration().get_db_params() self.db = DAL( self.config.uri, pool_size=self.config.pool_size, migrate_enabled=self.config.migrate, check_reserved=['all'], folder=self.config.folder if self.config.folder else 'database', # adapter_args=dict(migrator=InDBMigrator) ) self.define_tables() def define_tables(self): self.db.define_table('notification', Field('launched_on', 'date'), Field('event_date', 'date'))
class DBCache(Cache): '''An implementation of the c9r.file.cache.FileCache with PyDAL using database. ''' defaults = { 'db': 'sqlite://cache.db', # Database URL } def_conf = ['~/.etc/cache-conf.json'] def clear(self, clear_all=True): '''Remove the entire content(s) in the cache. ''' db = self.db db((db.vars.id >= 0) if clear_all else ( db.vars.expires < time())).delete() def clear_cache(self, vset, names): ''' ''' def get(self, vset, name): '''Get a named data from this cache. ''' db = self.db rows = db((db.vars.name == name) & (db.vars.expires < time())).select() return rows[0].value def put(self, vset, name, data): '''Save given data into the cache with given name. ''' self.db.vars.insert(name=name, value=data, expires=time() + self.window) def __init__(self, conf=[], initconf=None): ''' ''' Cache.__init__(self, conf, initconf) self.db = DAL(self.config('db')) self.db.define_table('varset', Field('name')) self.db.define_table('vars', Field('name'), Field('value', 'json'), Field('varset'), Field('expires', 'integer'), primarykey=['varset', 'name']) self.window = int(self.config('window'))
def testRun(self): for ref, bigint in [('reference', False), ('big-reference', True)]: db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint) db.define_table('tt', Field('vv')) db.define_table('ttt', Field('vv'), Field('tt_id', '%s tt' % ref, unique=True)) id_i = db.tt.insert(vv='pydal') # Null tt_id db.ttt.insert(vv='pydal') # first insert is OK db.ttt.insert(tt_id=id_i) self.assertRaises(Exception, db.ttt.insert, tt_id=id_i) # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=["all"], entity_quoting=True) db.define_table("tt", Field("aa"), Field("bb", "datetime")) sql = db._adapter.dialect.create_index("idx_aa_and_bb", db.tt, [db.tt.aa, db.tt.bb.coalesce(None)]) with db._adapter.index_expander(): coalesce_sql = str(db.tt.bb.coalesce(None)) expected_sql = "CREATE INDEX %s ON %s (%s,%s);" % ( db._adapter.dialect.quote("idx_aa_and_bb"), db.tt.sqlsafe, db.tt.aa.sqlsafe_name, coalesce_sql, ) self.assertEqual(sql, expected_sql) rv = db.tt.create_index("idx_aa_and_bb", db.tt.aa, db.tt.bb.coalesce(None)) self.assertTrue(rv) rv = db.tt.drop_index("idx_aa_and_bb") self.assertTrue(rv) drop(db.tt)
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all'], entity_quoting=True) db.define_table('tt', Field('aa'), Field('bb', 'datetime')) sql = db._adapter.dialect.create_index( 'idx_aa_and_bb', db.tt, [db.tt.aa, db.tt.bb.coalesce(None)]) with db._adapter.index_expander(): coalesce_sql = str(db.tt.bb.coalesce(None)) expected_sql = 'CREATE INDEX %s ON %s (%s,%s);' % ( db._adapter.dialect.quote('idx_aa_and_bb'), db.tt.sqlsafe, db.tt.aa.sqlsafe_name, coalesce_sql) self.assertEqual(sql, expected_sql) rv = db.tt.create_index('idx_aa_and_bb', db.tt.aa, db.tt.bb.coalesce(None)) self.assertTrue(rv) rv = db.tt.drop_index('idx_aa_and_bb') self.assertTrue(rv) drop(db.tt)
def model(): db = DAL('sqlite://todo.db',pool_size=1,folder='./') #,migrate=False) Todos=db.define_table('todos',Field('title'),Field('isCompleted','boolean',default=False)) if not db(db.todos).count(): for i in range(1,16): db.todos.insert(title='الموعد '+str(i)) db.commit() return (db)
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('val_and_insert', Field('aa'), Field('bb', 'integer', requires=IS_INT_IN_RANGE(1, 5))) rtn = db.val_and_insert.validate_and_insert(aa='test1', bb=2) if NOSQL: self.assertEqual(isinstance(rtn.id, long), True) else: self.assertEqual(rtn.id, 1) #errors should be empty self.assertEqual(len(rtn.errors.keys()), 0) #this insert won't pass rtn = db.val_and_insert.validate_and_insert(bb="a") #the returned id should be None self.assertEqual(rtn.id, None) #an error message should be in rtn.errors.bb self.assertNotEqual(rtn.errors.bb, None) #cleanup table drop(db.val_and_insert)
def index(): from pydal import DAL, Field dbConnect = DAL('mysql://*****:*****@localhost/web2py2') try: dbConnect.define_table( 'list_event', Field('event_name', type='text'), Field('dia_diem', type='text'), Field('thoi_gian', type='date'), ) # dbConnect.list_event.insert(event_name='Phao Hoa', dia_diem='Cau rong', thoi_gian='2018/05/06') # dbConnect.list_event.insert(event_name='Cau phun lua', dia_diem='Cau rong', thoi_gian='2018/05/07') # dbConnect.list_event.insert(event_name='Duong pho', dia_diem='Hai Chau', thoi_gian='2018/05/06') getAllEvents = dbConnect().select(dbConnect.list_event.ALL) finally: if dbConnect: dbConnect.close() return dict(list=getAllEvents)
def testRun(self): for ref, bigint in [('reference', False), ('big-reference', True)]: db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint) if bigint and 'big-id' not in db._adapter.types: continue db.define_table('tt', Field('vv')) db.define_table('ttt', Field('vv'), Field('tt_id', '%s tt' % ref, unique=True, notnull=True)) self.assertRaises(Exception, db.ttt.insert, vv='pydal') db.commit() id_i = db.tt.insert(vv='pydal') # first insert is OK db.ttt.insert(tt_id=id_i) self.assertRaises(Exception, db.ttt.insert, tt_id=id_i) # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=["all"], entity_quoting=True) db.define_table("tt", Field("aa"), Field("bb", "datetime")) sql = db._adapter.dialect.create_index( "idx_aa_and_bb", db.tt, [db.tt.aa, db.tt.bb.coalesce(None)]) with db._adapter.index_expander(): coalesce_sql = str(db.tt.bb.coalesce(None)) expected_sql = "CREATE INDEX %s ON %s (%s,%s);" % ( db._adapter.dialect.quote("idx_aa_and_bb"), db.tt.sql_shortref, db.tt.aa.sqlsafe_name, coalesce_sql, ) self.assertEqual(sql, expected_sql) rv = db.tt.create_index("idx_aa_and_bb", db.tt.aa, db.tt.bb.coalesce(None)) self.assertTrue(rv) rv = db.tt.drop_index("idx_aa_and_bb") self.assertTrue(rv) drop(db.tt)
def setUp(self): db = DAL('sqlite:memory') db.define_table('color', Field('name', requires=IS_NOT_IN_DB(db, 'color.name'))) db.color.insert(name='red') db.color.insert(name='green') db.color.insert(name='blue') db.define_table('thing', Field('name'), Field('color', 'reference color')) db.thing.insert(name='Chair', color=1) db.thing.insert(name='Chair', color=2) db.thing.insert(name='Table', color=1) db.thing.insert(name='Table', color=3) db.thing.insert(name='Lamp', color=2) db.define_table('rel', Field('a', 'reference thing'), Field('desc'), Field('b','reference thing')) db.rel.insert(a=1, b=2, desc='is like') db.rel.insert(a=3, b=4, desc='is like') db.rel.insert(a=1, b=3, desc='is under') db.rel.insert(a=2, b=4, desc='is under') db.rel.insert(a=5, b=4, desc='is above') api = DBAPI(db, ALLOW_ALL_POLICY) self.db = db self.api = api
def setUp(self): db = DAL("sqlite:memory") db.define_table("color", Field("name", requires=IS_NOT_IN_DB(db, "color.name"))) db.color.insert(name="red") db.color.insert(name="green") db.color.insert(name="blue") db.define_table("thing", Field("name"), Field("color", "reference color")) db.thing.insert(name="Chair", color=1) db.thing.insert(name="Chair", color=2) db.thing.insert(name="Table", color=1) db.thing.insert(name="Table", color=3) db.thing.insert(name="Lamp", color=2) db.define_table( "rel", Field("a", "reference thing"), Field("desc"), Field("b", "reference thing"), ) db.rel.insert(a=1, b=2, desc="is like") db.rel.insert(a=3, b=4, desc="is like") db.rel.insert(a=1, b=3, desc="is under") db.rel.insert(a=2, b=4, desc="is under") db.rel.insert(a=5, b=4, desc="is above") api = RestAPI(db, ALLOW_ALL_POLICY) self.db = db self.api = api
def setUp(self): db = DAL('sqlite:memory') db.define_table('color', Field('name', requires=IS_NOT_IN_DB(db, 'color.name'))) db.color.insert(name='red') db.color.insert(name='green') db.color.insert(name='blue') db.define_table('thing', Field('name'), Field('color', 'reference color')) db.thing.insert(name='Chair', color=1) db.thing.insert(name='Chair', color=2) db.thing.insert(name='Table', color=1) db.thing.insert(name='Table', color=3) db.thing.insert(name='Lamp', color=2) db.define_table('rel', Field('a', 'reference thing'), Field('desc'), Field('b', 'reference thing')) db.rel.insert(a=1, b=2, desc='is like') db.rel.insert(a=3, b=4, desc='is like') db.rel.insert(a=1, b=3, desc='is under') db.rel.insert(a=2, b=4, desc='is under') db.rel.insert(a=5, b=4, desc='is above') api = DBAPI(db, ALLOW_ALL_POLICY) self.db = db self.api = api
def preloop(self): db = DAL('sqlite://grades.db') db.define_table( 'students', Field('fname', notnull=True), Field('lname', notnull=True), Field('dob', 'date', notnull=True), Field('sex_is_f', 'boolean', notnull=True), ) db.define_table( 'courses', Field('course_id', notnull=True), Field('category', notnull=True), Field('description', notnull=True), Field('professor', notnull=True), Field('classroom', notnull=True), Field('long_descr', 'text'), Field('start_date', 'date', notnull=True), Field('end_date', 'date', notnull=True), ) db.define_table( 'students_to_courses', Field('student', 'reference students', notnull=True), Field('course', 'reference courses', notnull=True), ) db.define_table( 'assignments', Field('course', 'reference courses', notnull=True), Field('description', notnull=True), Field('max_points', 'integer', notnull=True), Field('due_date', 'date', notnull=True), ) db.define_table( 'grades', Field('student', 'reference students', notnull=True), Field('assignment', 'reference assignments', notnull=True), Field('grade', 'integer'), ) self.db = db
def test_tags(self): db = DAL("sqlite:memory") db.define_table("thing", Field("name")) properties = Tags(db.thing) id1 = db.thing.insert(name="chair") id2 = db.thing.insert(name="table") properties.add(id1, "color/red") properties.add(id1, "style/modern") properties.add(id2, "color/green") properties.add(id2, "material/wood") self.assertTrue(properties.get(id1), ["color/red", "style/modern"]) self.assertTrue(properties.get(id2), ["color/green", "material/wood"]) rows = db(properties.find(["style/modern"])).select() self.assertTrue(rows.first().id, id1) rows = db(properties.find(["material/wood"])).select() self.assertTrue(rows.first().id, id1) rows = db(properties.find(["color"])).select() self.assertTrue(len(rows), 2)
def preloop(self): db = DAL("sqlite://grades.db") db.define_table( "students", Field("fname", notnull=True), Field("lname", notnull=True), Field("dob", "date", notnull=True), Field("sex_is_f", "boolean", notnull=True), ) db.define_table( "courses", Field("course_id", notnull=True), Field("category", notnull=True), Field("description", notnull=True), Field("professor", notnull=True), Field("classroom", notnull=True), Field("long_descr", "text"), Field("start_date", "date", notnull=True), Field("end_date", "date", notnull=True), ) db.define_table( "students_to_courses", Field("student", "reference students", notnull=True), Field("course", "reference courses", notnull=True), ) db.define_table( "assignments", Field("course", "reference courses", notnull=True), Field("description", notnull=True), Field("max_points", "integer", notnull=True), Field("due_date", "date", notnull=True), ) db.define_table( "grades", Field("student", "reference students", notnull=True), Field("assignment", "reference assignments", notnull=True), Field("grade", "integer"), ) self.db = db
def test_tags(self): db = DAL('sqlite:memory') db.define_table('thing', Field('name')) properties = Tags(db.thing) id1 = db.thing.insert(name='chair') id2 = db.thing.insert(name='table') properties.add(id1, 'color/red') properties.add(id1, 'style/modern') properties.add(id2, 'color/green') properties.add(id2, 'material/wood') self.assertTrue(properties.get(id1), ['color/red', 'style/modern']) self.assertTrue(properties.get(id2), ['color/gree', 'material/wood']) rows = db(properties.find(['style/modern'])).select() self.assertTrue(rows.first().id, id1) rows = db(properties.find(['material/wood'])).select() self.assertTrue(rows.first().id, id1) rows = db(properties.find(['color'])).select() self.assertTrue(len(rows), 2)
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=["all"]) db.define_table("aa", Field("name")) db.define_table("bb", Field("aa", "reference aa"), Field("name")) for k in ("x", "y", "z"): i = db.aa.insert(name=k) for j in ("u", "v", "w"): db.bb.insert(aa=i, name=k + j) db.commit() rows = db(db.aa).select() rows.join(db.bb.aa, fields=[db.bb.name], orderby=[db.bb.name]) self.assertEqual(rows[0].bb[0].name, "xu") self.assertEqual(rows[0].bb[1].name, "xv") self.assertEqual(rows[0].bb[2].name, "xw") self.assertEqual(rows[1].bb[0].name, "yu") self.assertEqual(rows[1].bb[1].name, "yv") self.assertEqual(rows[1].bb[2].name, "yw") self.assertEqual(rows[2].bb[0].name, "zu") self.assertEqual(rows[2].bb[1].name, "zv") self.assertEqual(rows[2].bb[2].name, "zw") rows = db(db.bb).select() rows.join(db.aa.id, fields=[db.aa.name]) self.assertEqual(rows[0].aa.name, "x") self.assertEqual(rows[1].aa.name, "x") self.assertEqual(rows[2].aa.name, "x") self.assertEqual(rows[3].aa.name, "y") self.assertEqual(rows[4].aa.name, "y") self.assertEqual(rows[5].aa.name, "y") self.assertEqual(rows[6].aa.name, "z") self.assertEqual(rows[7].aa.name, "z") self.assertEqual(rows[8].aa.name, "z") rows_json = rows.as_json() drop(db.bb) drop(db.aa) db.close()
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('aa',Field('name')) db.define_table('bb',Field('aa','reference aa'),Field('name')) for k in ('x','y','z'): i = db.aa.insert(name=k) for j in ('u','v','w'): db.bb.insert(aa=i,name=k+j) db.commit() rows = db(db.aa).select() rows.join(db.bb.aa, fields=[db.bb.name], orderby=[db.bb.name]) self.assertEqual(rows[0].bb[0].name, 'xu') self.assertEqual(rows[0].bb[1].name, 'xv') self.assertEqual(rows[0].bb[2].name, 'xw') self.assertEqual(rows[1].bb[0].name, 'yu') self.assertEqual(rows[1].bb[1].name, 'yv') self.assertEqual(rows[1].bb[2].name, 'yw') self.assertEqual(rows[2].bb[0].name, 'zu') self.assertEqual(rows[2].bb[1].name, 'zv') self.assertEqual(rows[2].bb[2].name, 'zw') rows = db(db.bb).select() rows.join(db.aa.id, fields=[db.aa.name]) self.assertEqual(rows[0].aa.name, 'x') self.assertEqual(rows[1].aa.name, 'x') self.assertEqual(rows[2].aa.name, 'x') self.assertEqual(rows[3].aa.name, 'y') self.assertEqual(rows[4].aa.name, 'y') self.assertEqual(rows[5].aa.name, 'y') self.assertEqual(rows[6].aa.name, 'z') self.assertEqual(rows[7].aa.name, 'z') self.assertEqual(rows[8].aa.name, 'z') rows_json = rows.as_json() drop(db.bb) drop(db.aa) db.close()
class Current(object): config = { "events": [ { "event_type": "nba", "event_external_link": "http://givemenbastreams.com/nba.php?g={}", }, { "event_type": "nfl", "event_external_link": "https://nflwebcast.com/verses/{}.html", }, { "event_type": "mlb", "event_external_link": "http://givemenbastreams.com/mlb.php?g={}", }, ] } def __init__(self): self.db = DAL( "sqlite://../database/storage.sqlite", folder=os.path.join(os.path.dirname(__file__), "..", "database"), ) for each in self.config["events"]: self.db.define_table( "{}_event".format(each["event_type"]), Field("event_id", length=32), Field("event_title", length=512), Field("event_subtitle", length=512), Field("event_datetime", type="datetime"), Field("event_home_team", length=64), Field("event_away_team", length=64), Field("event_description", type="text"), )
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) t1 = db.define_table('t1', Field('int_level', requires=IS_INT_IN_RANGE(1, 5))) i_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=1) u_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=2) e_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=6) self.assertTrue(i_response.id != None) self.assertTrue(u_response.id != None) self.assertTrue(e_response.id == None and len(e_response.errors.keys()) != 0) self.assertTrue(db(t1).count() == 1) self.assertTrue(db(t1.int_level == 1).count() == 0) self.assertTrue(db(t1.int_level == 6).count() == 0) self.assertTrue(db(t1.int_level == 2).count() == 1) db.t1.drop() return
class DbHelper(object): """docstring for DbHelper""" def __init__(self, arg): super(DbHelper, self).__init__() self.arg = arg self.db = DAL('mongodb://140.143.247.178:27099/spider') self.define_table() ''' self.db.thing.insert(name='Chair') query = self.db.thing.name.startswith('C') rows = self.db(query).select() print(rows[0].name) self.db.commit() ''' def define_table(self): print(self.db._dbname) self.db.define_table('douban_topic',Field('title'),Field('title_url'),Field('people'),Field('people_url') ,Field('replay_num'),Field('post_time')) def insert_models(self,table_name='',items=[]): a = list(map(dict,items)) self.db.douban_topic.bulk_insert(a) self.db.commit()
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) t1 = db.define_table('t1', Field('int_level', 'integer', requires=IS_INT_IN_RANGE(1, 5))) i_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=1) u_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=2) e_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=6) self.assertTrue(i_response.id != None) self.assertTrue(u_response.id != None) self.assertTrue(e_response.id == None and len(e_response.errors.keys()) != 0) self.assertEqual(len(db(t1).select()), 1) self.assertEqual(db(t1).count(), 1) self.assertEqual(db(t1.int_level == 1).count(), 0) self.assertEqual(db(t1.int_level == 6).count(), 0) self.assertEqual(db(t1.int_level == 2).count(), 1) drop(db.t1) return
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) # ----------------------------------------------------------------------------- # Seems further imports are required for the commented field types below # db.define_table('referred_table', # Field('represent_field', 'string')) # NOTE : Don't forget to uncomment the line # drop(db.referred_table) at the very end below # if the above are uncommented db.define_table('a_table', Field('string_field', 'string'), Field('text_field', 'text'), Field('boolean_field', 'boolean'), Field('integer_field', 'integer'), Field('double_field', 'double'), # Field('decimal_field', 'decimal'), # Field('date_field', 'date'), # Field('time_field', 'time'), # Field('datetime_field', 'datetime'), # Field('reference_field', 'reference referred_table'), # Field('list_string_field', 'list:string'), # Field('list_integer_field', 'list:integer'), # Field('list_reference_field', 'list:reference referred_table') ) fields = [db.a_table.id, db.a_table.string_field, db.a_table.text_field, db.a_table.boolean_field, db.a_table.integer_field, db.a_table.double_field, # db.a_table.decimal_field, # db.a_table.date_field, # db.a_table.time_field, # db.a_table.reference_field, # db.a_table.list_string_field, # db.a_table.list_integer_field, # db.a_table.list_reference_field ] # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Test with boolean field # Operator under test # operators = \ # [(' starts with ','startswith'), # (' ends with ','endswith'), # ('contains', 'N/A'), # ('like', 'N/A') # ] # # keywords = 'a_table.boolean_field = True' q = (db.a_table.boolean_field == True) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) if not IS_GAE: # Test string field query # starts with keywords = 'a_table.string_field starts with "pydal"' q = (db.a_table.string_field.startswith('pydal')) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ends with keywords = 'a_table.string_field ends with "Rocks!!"' q = (db.a_table.string_field.endswith('Rocks!!')) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # contains keywords = 'a_table.string_field contains "Rocks"' q = (db.a_table.string_field.contains('Rocks')) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # Don't work for some reason # # like # keywords = 'a_table.string_field like "%Rocks%"' # q = (db.a_table.string_field.like('%Rocks%')) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q) # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Tests with integer field # For generating these tests # def generate_tests(): # operators = \ # [('=', '='), # ('==', '='), # (' is ','='), # (' equal ', '='), # (' equals ', '='), # (' equal to ', '='), # ('<>', '!='), # (' not equal ', '!='), # (' not equal to ', '!='), # ('<', '<'), # (' less than ', '<'), # ('<=', '<='), # ('=<', '<='), # (' less or equal ', '<='), # (' less or equal than ', '<='), # (' equal or less ', '<='), # (' equal or less than ', '<='), # ('>', '>'), # (' greater than ', '>'), # ('=>', '>='), # ('>=', '>='), # (' greater or equal ', '>='), # (' greater or equal than ', '>='), # (' equal or greater ', '>='), # (' equal or greater than ', '>=')] # JUST APPEND MORE OPERATORS HERE # # for op in operators: # print """ # # {op} # keywords = 'a_table.integer_field {test_op} 1' # q = (db.a_table.integer_field {result_op} 1) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q)""".format(op=op, # test_op=op[0], # result_op='==' if op[1] == '=' else op[1]) # ('=', '=') keywords = 'a_table.integer_field = 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ('==', '=') keywords = 'a_table.integer_field == 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' is ','=') keywords = 'a_table.integer_field is 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal ', '=') keywords = 'a_table.integer_field equal 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equals ', '=') keywords = 'a_table.integer_field equals 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal to ', '=') keywords = 'a_table.integer_field equal to 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # This one not allow over integer it seems # # ('<>', '!=') # keywords = 'a_table.integer_field <> 1' # q = (db.a_table.integer_field != 1) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q) # (' not equal ', '!=') keywords = 'a_table.integer_field not equal 1' q = (db.a_table.integer_field != 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' not equal to ', '!=') keywords = 'a_table.integer_field not equal to 1' q = (db.a_table.integer_field != 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ('<', '<') keywords = 'a_table.integer_field < 1' q = (db.a_table.integer_field < 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' less than ', '<') keywords = 'a_table.integer_field less than 1' q = (db.a_table.integer_field < 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ('<=', '<=') keywords = 'a_table.integer_field <= 1' q = (db.a_table.integer_field <= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # This one is invalid, maybe we should remove it from smart_query # # ('=<', '<=') # keywords = 'a_table.integer_field =< 1' # q = (db.a_table.integer_field <= 1) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q) # (' less or equal ', '<=') keywords = 'a_table.integer_field less or equal 1' q = (db.a_table.integer_field <= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' less or equal than ', '<=') keywords = 'a_table.integer_field less or equal than 1' q = (db.a_table.integer_field <= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal or less ', '<=') keywords = 'a_table.integer_field equal or less 1' q = (db.a_table.integer_field <= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal or less than ', '<=') keywords = 'a_table.integer_field equal or less than 1' q = (db.a_table.integer_field <= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ('>', '>') keywords = 'a_table.integer_field > 1' q = (db.a_table.integer_field > 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' greater than ', '>') keywords = 'a_table.integer_field greater than 1' q = (db.a_table.integer_field > 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # This one is invalid, maybe we should remove it from smart_query # # ('=>', '>=') # keywords = 'a_table.integer_field => 1' # q = (db.a_table.integer_field >= 1) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q) # ('>=', '>=') keywords = 'a_table.integer_field >= 1' q = (db.a_table.integer_field >= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' greater or equal ', '>=') keywords = 'a_table.integer_field greater or equal 1' q = (db.a_table.integer_field >= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' greater or equal than ', '>=') keywords = 'a_table.integer_field greater or equal than 1' q = (db.a_table.integer_field >= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal or greater ', '>=') keywords = 'a_table.integer_field equal or greater 1' q = (db.a_table.integer_field >= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal or greater than ', '>=') keywords = 'a_table.integer_field equal or greater than 1' q = (db.a_table.integer_field >= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Belongs and not belongs # NOTE : The below tests don't works # Issue : https://github.com/web2py/pydal/issues/161 # (' in ', 'belongs') -> field.belongs(1, 2, 3) # keywords = 'a_table.integer_field in "1, 2, 3"' # q = (db.a_table.integer_field.belongs([1, 2, 3])) # smart_q = smart_query(fields, keywords) # self.assertEqual(smart_q, q) # keywords = 'a_table.id in "1, 2, 3"' # q = (db.a_table.id.belongs([1, 2, 3])) # smart_q = smart_query(fields, keywords) # self.assertEqual(smart_q, q) # # # (' not in ' , 'notbelongs'), # keywords = 'a_table.integer_field not in "1, 2, 3"' # q = (~db.a_table.id.belongs([1, 2, 3])) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q) # ----------------------------------------------------------------------------- # cleanup table drop(db.a_table)
{ 'id':1, 'trigger':'begin', 'source':'start', 'dest':'proc', 'after':'increase_processings' }, { 'id':2, 'trigger':'end', 'source':'proc', 'dest':'finish', 'after':'noop' }, { 'id':3, 'trigger':'reprocess', 'source':'finish', 'dest':'proc', 'after':'increase_processings' } ] db.config_workflow.truncate() db.config_wfstate.truncate() for i in valid_transitions: db.config_workflow.insert(trigger=i['trigger'], source=i['source'], dest=i['dest'], after=i['after']) db.config_wfstate.insert(name='start') db.config_wfstate.insert(name='proc') db.config_wfstate.insert(name='finish') db.commit() db = DAL(uri='sqlite://temp.db', folder='db') db.define_table('config_workflow', Field('trigger'), Field('source'), Field('dest'), Field('after')) db.define_table('config_wfstate', Field('name')) db_config_init(db) flow = db(db.config_workflow).select() #state = db(db.config_wfstate).select() state=['start','proc','finish'] m = Mincer(valid_states=state, valid_transitions=flow, initial='start') print state print flow print m.state m.begin() m.end() m.reprocess() m.end()
def testRun(self): db = DAL(None) db.define_table('no_table', Field('aa')) self.assertIsInstance(db.no_table.aa, Field) self.assertIsInstance(db.no_table['aa'], Field) db.close()
from pydal import DAL, Field import os import sys db = None try: table_folder = os.path.join(os.path.dirname(__file__), 'tables') db = DAL('mysql://*****:*****@localhost/trans', entity_quoting=True, fake_migrate=True, folder=table_folder) #mdb.connect('localhost', 'trans', 'transFTW!', 'trans', cursorclass=mdb.cursors.DictCursor) except Exception as e: print 'Error %s' % (e,) sys.exit(1) db.define_table('transients', Field('name', 'string', notnull=True), Field('type', 'string'), Field('ra', 'double', notnull=True), Field('dec', 'double', notnull=True) ) # db.executesql('ALTER TABLE transients ADD INDEX (name)') db.define_table('intensities', Field('intensity', 'double', notnull=True), Field('error', 'double', notnull=True), Field('sigma', 'double', notnull=True), Field('trans_id', 'reference transients', notnull=True), Field('detected_time', 'integer', notnull=True) ) db.define_table('devices', Field('id', 'string', notnull=True),
#!/usr/bin/python # -*- coding: utf-8 -*- import os from core.crud import crud from pydal import DAL, Field ##db = DAL('mysql://*****:*****@192.168.0.200/fasa', migrate=False) db = DAL('sqlite://'+os.path.join('modelos','pyfactura.db'), migrate=False) db.define_table('pagos', Field('pago', 'string', length=2, required=True,), Field('detalle', 'string', length=40, required=True,), Field('dia', 'date', length=8), Field('des1', 'decimal(12,2)', length=10), primarykey=['pago'], ) formato = {'pago':{'width':30, 'text':'Pagos', 'id':True}, } def main(): abm = crud(tabla=db.pagos, basedatos=db, formato=formato, ) if __name__ == '__main__': main()
from pydal import DAL, Field db = DAL('sqlite://storage.db') db.define_table('thing',Field('name')) db.thing.insert(name='Chair') query = db.thing.name.startswith('C') rows = db(query).select() print rows[0].name db.commit()
from pydal import DAL, Field db = DAL('sqlite:memory:') db.define_table('persons', Field('name'), Field('age') ) amy = db.persons.insert( name='Amy', age=52 ) bob = db.persons.insert( name='Bob', age=48 ) cat = db.persons.insert( name='Cat', age=23 ) dan = db.persons.insert( name='Dan', age=17 ) edd = db.persons.insert( name='Edd', age=77 ) fan = db.persons.insert( name='Fan', age=65 ) gin = db.persons.insert( name='Gin', age=27 ) hil = db.persons.insert( name='Hil', age=30 ) iri = db.persons.insert( name='Iri', age=62 ) jac = db.persons.insert( name='Jac', age=18 ) db.commit() # Export the 'persons' database with open( 'persons.csv', 'wb' ) as f: f.write( str(db(db.persons.id).select()) ) # Export only the young persons with open( 'young-people.csv', 'wb') as f: people = db( db.persons.age <= 30 ).select() f.write( str( people ) )
def as_dict( self ): return dict( x = self.x, y = self.y ) def __repr__( self ): return '<Point2D x:%s, y:%s>' % ( self.x, self.y ) db = DAL('sqlite:memory:') db.define_table('point2d', Field('x',type='double'), Field('y',type='double') ) p1 = Point2D(10,10) p2 = Point2D(10,20) p3 = Point2D(20,20) p4 = Point2D(20,10) db.point2d.insert( **p1.as_dict() ) db.point2d.insert( **p2.as_dict() ) db.point2d.insert( **p3.as_dict() ) db.point2d.insert( **p4.as_dict() ) query = db.point2d.x == 10 rows = db(query).select()
def model(): db = DAL('sqlite://pin.db',pool_size=1,folder='./',migrate=False) Pin=db.define_table('pin',Field('title'),Field('image')) return (db,Pin)
Field('created_on', 'datetime', default=datetime.now()), Field('updated_on', 'datetime', update=datetime.now())) db.define_table("property", # for debugging Field("link", unique=True), Field("headtext"), Field("pricetext"), Field("address"), Field("m2m", 'integer'), Field("m2f", 'integer'), # data for analysis Field('forsale', 'boolean'), Field('type'), Field('bedrooms', 'integer'), Field('receptions', 'integer'), Field('town'), Field('postcode'), Field('sector'), Field('district'), Field('lat', 'double'), Field('lng', 'double'), Field('price', 'integer'), Field('rentpppw', 'integer'), Field('m2', 'integer'), Field('priceperbed', 'integer'), signature) def from_query(query, **args): """ read pydal db to df """
class DNSAPI(object): """DNSAPI is an easy way to use your database so dockerlab-dns knows how to read your records. Use this class to interact with the database or just use it as an example.""" def __init__(self, db_user, db_pass, db_host, db_name, migrate=False): super(DNSAPI, self).__init__() self.db = DAL("postgres://%s:%s@%s/%s" % (db_user, db_pass, db_host, db_name), migrate=migrate) if self.db: print 'Successfully connected to db "%s" on host "%s"' % (db_name, db_host) self.db.define_table( "dns_zones", Field( "name", "string" ), # ends in . (e.g. example.com.); input should probably have a validator to ensure zones end in a . ) self.db.define_table( "dns_zone_records", Field("zone", "reference dns_zones"), Field("record_name", "string"), # (e.g. ns1.example.com.) Field( "record_type", "string", default="A", requires=IS_IN_SET(RECORD_TYPES) ), # (e.g. A, AAAA, CNAME, MX, NS) Field( "record_value", "string" ), # (e.g. an IP for A or AAAA, an address for CNAME, and an address and priority for MX) Field( "record_ttl", "integer", default=60 * 5 ), # A TTL in seconds before a client should check for a new value. Can reasonably set to lower or higher depending on the volatility of the records ) def get_zones(self): return self.db(self.db.dns_zones).select().as_list() def get_zone(self, zone_name): return self.db(self.db.dns_zones.name == zone_name).select().as_list() def get_records(self, zone_id): return self.db((self.db.dns_zone_records.zone == zone_id)).select().as_list() def get_records_matching(self, zone_id, record_name=None, record_type=None): q = self.db.dns_zone_records.zone == zone_id if not record_name is None: q = q & (self.db.dns_zone_records.record_name == record_name) if not record_type is None: q = q & (self.db.dns_zone_records.record_type == record_type) return self.db(q).select().as_list() def delete_zone(self, zone_name): self.db.dns_zones( (self.db.dns_zone_records.zone == self.db.dns_zones.id) & (self.db.dns_zones.name == zone_name) ).delete() return self.db(self.db.dns_zones.name == zone_name).delete() def delete_record(self, record_id): return self.db((self.db.dns_zone_records.id == record_id)).delete() def delete_record_matching(self, zone_id, record_name, record_type): return self.db( (self.db.dns_zone_records.zone == zone_id) & (self.db.dns_zone_records.record_name == record_name) & (self.db.dns_zone_records.record_type == record_type) ).delete() def create_zone(self, zone_name): return self.db.dns_zones.insert(name=zone_name) def create_record(self, zone_id, record_name, record_type, record_value, record_ttl): return self.db.dns_zone_records.insert( zone=zone_id, record_name=record_name, record_type=record_type, record_value=record_value, record_ttl=record_ttl, )
#!/usr/bin/env python # -*- coding: utf-8 -*- # Created by Alex on 2016/11/30 from pydal import DAL, Field from ConfPars import DB_URL sys_tab = DAL(DB_URL) sys_tab.define_table('COLUMNS', Field("TABLE_SCHEMA", ), Field("TABLE_NAME"), Field("COLUMN_NAME"), Field("IS_NULLABLE"), Field("DATA_TYPE"), Field("COLUMN_TYPE"), primarykey=[], migrate=False) if __name__ == "__main__": print sys_tab(sys_tab.COLUMNS.TABLE_SCHEMA == "test").select()
and info about xtopdf is at: http://slides.com/vasudevram/xtopdf or at: http://slid.es/vasudevram/xtopdf """ # imports from pydal import DAL, Field from PDFWriter import PDFWriter SEP = 60 # create the database db = DAL('sqlite://house_depot.db') # define the table db.define_table('furniture', \ Field('id'), Field('name'), Field('quantity'), Field('unit_price') ) # insert rows into table items = ( \ (1, 'chair', 40, 50), (2, 'table', 10, 300), (3, 'cupboard', 20, 200), (4, 'bed', 30, 400) ) for item in items: db.furniture.insert(id=item[0], name=item[1], quantity=item[2], unit_price=item[3]) # define the query query = db.furniture # the above line shows an interesting property of PyDAL; it seems to