class DBCache(Cache): '''An implementation of the c9r.file.cache.FileCache with PyDAL using database. ''' defaults = { 'db': 'sqlite://cache.db', # Database URL } def_conf = ['~/.etc/cache-conf.json'] def clear(self, clear_all=True): '''Remove the entire content(s) in the cache. ''' db = self.db db((db.vars.id>=0) if clear_all else (db.vars.expires<time())).delete() def clear_cache(self, vset, names): ''' ''' def get(self, vset, name): '''Get a named data from this cache. ''' db = self.db rows = db((db.vars.name==name)&(db.vars.expires<time())).select() return rows[0].value def put(self, vset, name, data): '''Save given data into the cache with given name. ''' self.db.vars.insert(name=name, value=data, expires=time()+self.window) def __init__(self, conf=[], initconf=None): ''' ''' Cache.__init__(self, conf, initconf) self.db = DAL(self.config('db')) self.db.define_table('varset', Field('name')) self.db.define_table('vars', Field('name'), Field('value', 'json'), Field('varset'), Field('expires', 'integer'), primarykey=['varset', 'name']) self.window = int(self.config('window'))
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.223245', None) self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 12) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.223245Z', None) self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 12) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.223245-2:0', None) self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 10) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36+1:0', None) self.assertEqual(dt.microsecond, 0) self.assertEqual(dt.hour, 13) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.123', None) self.assertEqual(dt.microsecond, 123000) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.00123', None) self.assertEqual(dt.microsecond, 1230) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.1234567890', None) self.assertEqual(dt.microsecond, 123456) db.close()
class dbgetter: def __init__(self, dbchoice=None): self.db = DAL('sqlite://storage.db', folder=path.join('../database')) self.hasdb = (not dbchoice == None) self.mydb = 0 if self.hasdb: if dbchoice == 'trenddates' or dbchoice == 'trends': self.mydb = self.db.define_table('trenddates', Field('trend', 'text'), Field('date', 'date')) elif dbchoice == 'words': self.mydb = self.db.define_table('words', Field('word', 'string'), Field('freq', 'integer'), Field('dates', 'list:string')) else: raise NameError( "Your options from dbgetter are 'trends' or 'words'.") else: raise NameError( "You must provide a database choice from dbgetter ('trends' or 'words')." ) def getInfo(self, data): toRet = [] dbdata = self.db(self.mydb).select().as_list() for d in dbdata: toRet.append(d.get(data)) return toRet def getTable(self): return self.db(self.mydb).select() def close(self): self.db.close()
def __init__(self): self.db = DAL('sqlite://ccaccounts.db') self.db.define_table('accounts', Field('name'), Field('account_number', 'integer'), Field('balance', 'integer'), Field('balance_limit', 'integer')) self.log = Logger()
def indexx(): #response.flash = T("Hello World") #response.menu += [ # (T('My Sites'), False, URL('admin', 'default', 'site')) #] import os.path # x=os.getcwd()+'\..\models\database_registry.py.bak' # x=os.getcwd()+'\models\database_registry.py.bak' x = os.getcwd( ) + '\\applications\\' + request.application + '\models\database_registry.py.bak' outfile = os.getcwd( ) + '\\applications\\' + request.application + '\models\database_registry.py.out' # y=x + request.application # return 'ZZZ \>' + y + str(os.path.isfile(y)) + '\\' + request.application + ' \< ZZZ' #print(x) #return [os.path.dirname(os.path.abspath(__file__)), " <".join(os.getcwd()).join(">> "), os.path.isfile(os.getcwd().join('/../models/database_registry.py.bak'))] # return [os.getcwd(), os.path.isfile(fname)] from pydal import DAL, Field # DAL() db = DAL('mssql4://BuildDbAdmin:Alt0ids76@localhost/master') results = db.executesql('select * from sys.databases') with open(outfile, 'w') as f: for row in results: # print row.name # f.write("%s\n" % str(row.name)) # register('ApplicationConfiguration', 'mssql4://BuildDbAdmin:Alt0ids76@localhost/ApplicationConfiguration') registerx(row.name, 'mssql4://BuildDbAdmin:Alt0ids76@localhost/' + row.name) # return 'ZZZ \>' + x + str(os.path.isfile(x)) + '\\' + request.application + ' \< ZZZ' return DBREG
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) dt=db._adapter.parsemap['datetime']('2015-09-04t12:33:36.223245', None) self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 12) dt=db._adapter.parsemap['datetime']('2015-09-04t12:33:36.223245Z', None) self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 12) dt=db._adapter.parsemap['datetime']('2015-09-04t12:33:36.223245-2:0', None) self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 10) dt=db._adapter.parsemap['datetime']('2015-09-04t12:33:36+1:0', None) self.assertEqual(dt.microsecond, 0) self.assertEqual(dt.hour, 13) dt=db._adapter.parsemap['datetime']('2015-09-04t12:33:36.123', None) self.assertEqual(dt.microsecond, 123000) dt=db._adapter.parsemap['datetime']('2015-09-04t12:33:36.00123', None) self.assertEqual(dt.microsecond, 1230) dt=db._adapter.parsemap['datetime']('2015-09-04t12:33:36.1234567890', None) self.assertEqual(dt.microsecond, 123456) db.close()
def __new__(self, ): dalString = 'sqlite://Agenda.db' db = DAL(dalString, migrate=True) db.define_table('users', Field('name', 'string', unique=True), Field('age', 'integer'), Field('weight', 'string'), Field('cell', 'string'), Field('id', type='id')) return db
class DyTables(object): def __init__(self, uri=None): self._uri = uri self._schema = uri.split("/")[-1] self._dal = DAL(self._uri) self._datatapy_dict = datatype_mysql() self.get_tables() def get_tables(self): _tables = GetAllTables(uri="/".join(self._uri.split("/")[:-1]) + "/information_schema", schema=self._schema) for numb, table in enumerate(_tables): fields = [] for field in _tables.get(table): try: fields.append(Field(field[0], self._datatapy_dict[field[1]])) except SyntaxError: fields.append(Field("r_" + field[0], self._datatapy_dict[field[1]], rname=field[0])) self._dal.define_table(table, *fields, primarykey=[], migrate=False) def get_db(self): return self._dal
class trenddb: def __init__(self): self.db = DAL('sqlite://storage.db', folder=path.join("../database")) try: self.db.define_table('trenddates', Field('trend', 'text'), Field('date', 'date')) except: print("Trenddates exists...") if len(self.db(self.db.trenddates).select().as_list()) > 0: print("It has entries, somehow.") def add(self, trends, trenddate): if trends is None: return f = open('runningTrends.txt', 'a') d = date(int(trenddate[:4]), int(trenddate[5:7]), int(trenddate[8:])) for t in trends: self.db.trenddates.insert(trend=t, date=d) f.write(str(d) + ": " + t) f.write("\n") f.close() def addParsed(self, trends, trenddate): d = date(int(trenddate[:4]), int(trenddate[5:7]), int(trenddate[8:])) for t in trends: self.db.trenddates.insert(trend=t, date=d) def end(self): self.db.close()
class CloudSqlModel: """ Class for abstracting the model of the Mysql, Postgresql and SQLite on CloudSQL """ session: DAL = None def __init__(self, uri: str, migrate: bool = False): """Constructor""" self.session = DAL(uri, migrate=migrate) @staticmethod def get_fields(columns: list) -> List[Field]: """Get list of pydal field objects""" logging.info( _LOG_PREFIX_, f"Get table fields {columns}" ) return [Field(i) for i in columns] def get_table(self, table_name: str, columns: list) -> Table: """Get table of pydal""" logging.info( _LOG_PREFIX_, f"Generate table with {table_name}" ) columns = self.get_fields(columns) if table_name not in self.session.tables: self.session.define_table(table_name, *columns) return self.session[table_name]
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) #: skip for adapters that use drivers for datetime parsing if db._adapter.parser.registered.get('datetime') is None: return parse = lambda v: db._adapter.parser.parse(v, 'datetime', 'datetime') dt = parse('2015-09-04t12:33:36.223245') self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 12) dt = parse('2015-09-04t12:33:36.223245Z') self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 12) dt = parse('2015-09-04t12:33:36.223245-2:0') self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 10) dt = parse('2015-09-04t12:33:36+1:0') self.assertEqual(dt.microsecond, 0) self.assertEqual(dt.hour, 13) dt = parse('2015-09-04t12:33:36.123') self.assertEqual(dt.microsecond, 123000) dt = parse('2015-09-04t12:33:36.00123') self.assertEqual(dt.microsecond, 1230) dt = parse('2015-09-04t12:33:36.1234567890') self.assertEqual(dt.microsecond, 123456) db.close()
class DbHelper(object): """docstring for DbHelper""" def __init__(self, arg): super(DbHelper, self).__init__() self.arg = arg self.db = DAL('mongodb://140.143.247.178:27099/spider') self.define_table() ''' self.db.thing.insert(name='Chair') query = self.db.thing.name.startswith('C') rows = self.db(query).select() print(rows[0].name) self.db.commit() ''' def define_table(self): print(self.db._dbname) self.db.define_table('douban_topic', Field('title'), Field('title_url'), Field('people'), Field('people_url'), Field('replay_num'), Field('post_time')) def insert_models(self, table_name='', items=[]): a = list(map(dict, items)) self.db.douban_topic.bulk_insert(a) self.db.commit()
def testRun(self): for ref, bigint in [("reference", False), ("big-reference", True)]: db = DAL(DEFAULT_URI, check_reserved=["all"], bigint_id=bigint) if bigint and "big-id" not in db._adapter.types: continue db.define_table("tt", Field("vv")) db.define_table( "ttt", Field("vv"), Field("tt_id", "%s tt" % ref, unique=True), Field("tt_uq", "integer", unique=True), ) id_1 = db.tt.insert(vv="pydal") id_2 = db.tt.insert(vv="pydal") # Null tt_id db.ttt.insert(vv="pydal", tt_uq=1) # first insert is OK db.ttt.insert(tt_id=id_1, tt_uq=2) self.assertRaises(Exception, db.ttt.insert, tt_id=id_1, tt_uq=3) self.assertRaises(Exception, db.ttt.insert, tt_id=id_2, tt_uq=2) # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
class DyTables(object): def __init__(self, uri=None): self._uri = uri self._schema = uri.split("/")[-1] self._dal = DAL(self._uri) self._datatapy_dict = datatype_mysql() self.get_tables() def get_tables(self): _tables = GetAllTables(uri="/".join(self._uri.split("/")[:-1]) + "/information_schema", schema=self._schema) for numb, table in enumerate(_tables): fields = [] for field in _tables.get(table): try: fields.append( Field(field[0], self._datatapy_dict[field[1]])) except SyntaxError: fields.append( Field("r_" + field[0], self._datatapy_dict[field[1]], rname=field[0])) self._dal.define_table(table, *fields, primarykey=[], migrate=False) def get_db(self): return self._dal
def __new__(self, ): #dalString = 'mongodb://localhost/leakManager' #uncomment to use mongodb dalString = 'sqlite://leakManager.db' #uncomment to use sqlite db = DAL(dalString, migrate=True) db.define_table('leaks', Field('username'), Field('email'), Field('password'), Field('database')) return db
def __init__(self, uri_db): self.log.info("creating instance of DBClient: {0}".format(uri_db)) self.db = DAL(uri_db, migrate_enabled=False) self.wells = self.db.define_table('wells', Field('uuid'), Field('project_uuid'), Field('well_name'), Field('uwi'), Field('created_at'), Field('modified_at'), primarykey=['uuid']) self.clients = self.db.define_table('clients', Field('uuid'), Field('company_name'), Field('created_at'), Field('modified_at'), primarykey=['uuid']) self.projects = self.db.define_table('projects', Field('uuid'), Field('client_uuid'), Field('name'), Field('created_at'), Field('modified_at'), Field('basin'), Field('shapefile'), primarykey=['uuid'])
def find_course(self, course_name=None, class_id=None, instructor=None): db = DAL('sqlite://courses.db', folder='dbs') db.define_table('courses', Field('class_id', type='integer'), Field('class_name'), Field('date_time'), Field('descriptive_link'), Field('enrolled'), Field('instructor'), Field('link_sources'), Field('location'), Field('status')) if (course_name != None): rows = db(db.courses.class_name.like('%' + course_name + '%')).select() if len(rows) > 0 and class_id != None: return db( db.courses.class_name.like('%' + course_name + '%') and db.courses.class_id == class_id).select().first() elif len(rows) > 0 and instructor != None: return db( db.courses.class_name.like('%' + course_name + '%') and db.courses.instructor == instructor).select().first() else: return rows.first() elif (class_id != None): rows = db(db.courses.class_id == class_id).select() return rows.first() else: return None
def connect(self): if not self.db: self.db = DAL(self.uri, folder=self.folder, pool_size=5, lazy_tables=False) #, migrate_enabled=False, migrate=False, lazy_tables=True self.tables() else: print("Error: db already open")
def __init__(self, ): self.migrate = True if os.path.exists(os.path.abspath('database/banco.db')): self.migrate = False self.DATABASE_TYPE = 'sqlite://' self.DATABASE = self.DATABASE_TYPE + os.path.abspath( 'database/banco.db') self.db = DAL(self.DATABASE, migrate=self.migrate)
class Db: def __init__(self): self.uri = 'sqlite://database.sqlite' self.folder = os.path.join(current_dir, "sqlite") self.db = None def connect(self): if not self.db: self.db = DAL(self.uri, folder=self.folder, pool_size=5, lazy_tables=False) #, migrate_enabled=False, migrate=False, lazy_tables=True self.tables() else: print("Error: db already open") def close(self): self.db.close() self.db = None def tables(self): self.tableUsers() def tableUsers(self): try: #Note: id is created automattically if omited self.db.define_table('users', Field('username', type='string'), Field('email', type='string'), Field('password', type='string'), Field('uid', type='integer', default=0), Field('userRole', type='string', defaul='user'),#user, admin Field('secureKey', type='string', default='0', writable=False, readable=False),#secet key token - should be used if in server side Field('dateRegistration', type='datetime', writable=False, readable=False) ) except: print('models db.DB() tableUsers Error') def tableLogs(self): pass def insertUser(self, username, email, password, dateRegistration): self.connect() import secrets#generate url safe token secureKey = secrets.token_urlsafe() self.db.users.insert(username=username, email=email, password=password, uid=0, userRole="user", secureKey=secureKey, dateRegistration=dateRegistration) self.db.commit() self.db.close() def loginUser(self, username, password): self.connect() self.db._adapter.reconnect()#gives thread error without user = self.db( (self.db.users.username == username) & (self.db.users.password == password) ).select().first() self.db.close() #if no user, it will return user = None return user
def __new__(self, ): dalString = 'sqlite://encryptandshare.db' #uncomment to use sqlite #dalString = 'mysql://*****:*****@127.0.0.1/encryptandshare' #uncomment to use sqlite #db = DAL(dalString,fake_migrate_all=True) db = DAL(dalString, migrate=True) db.define_table('files', Field('fname'), Field('limit'), Field('status', 'boolean'), Field('fid')) #db.commit() return db
def init_db(path='sqlite://storage.db'): '''Connect to DB''' global db db = DAL(path) db = create_tables(db) logger.error('*******' + str(db)) db.commit() return db
def __new__(self, ): dbPath = os.path.abspath(os.path.join('data', 'storage.db')) path = 'sqlite://{0}'.format(dbPath) migrate = True if os.path.exists(dbPath): migrate = False db = DAL(path, migrate=migrate) db.define_table('servers', Field('url')) return db
def __init__(self): self.db = DAL('sqlite://storage.db', folder=path.join("../database")) try: self.db.define_table('trenddates', Field('trend', 'text'), Field('date', 'date')) except: print("Trenddates exists...") if len(self.db(self.db.trenddates).select().as_list()) > 0: print("It has entries, somehow.")
def model(): db = DAL('sqlite://todo.db',pool_size=1,folder='./') #,migrate=False) Todos=db.define_table('todos',Field('title'),Field('isCompleted','boolean',default=False)) if not db(db.todos).count(): for i in range(1,16): db.todos.insert(title='الموعد '+str(i)) db.commit() return (db)
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=["all"], entity_quoting=True) db.define_table("tt", Field("aa"), Field("bb", "boolean")) sql = db._adapter.dialect.create_index("idx_aa_f", db.tt, [db.tt.aa], where=str(db.tt.bb == False)) self.assertEqual(sql, 'CREATE INDEX "idx_aa_f" ON "tt" ("aa") WHERE ("tt"."bb" = \'F\');') rv = db.tt.create_index("idx_aa_f", db.tt.aa, where=(db.tt.bb == False)) self.assertTrue(rv) rv = db.tt.drop_index("idx_aa_f") self.assertTrue(rv) drop(db.tt)
def create(self, ): self.migrate = True if os.path.exists(os.path.abspath(self.dbPath)): self.migrate = False self.DATABASE_TYPE = 'sqlite://' self.DATABASE = self.DATABASE_TYPE + os.path.abspath(self.dbPath) self.db = DAL(self.DATABASE, migrate=self.migrate) self.db.define_table('log', Field('date', ), Field('time', ), Field('phrase')) return self.db
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=["all"]) db.define_table("tt", Field("aa")) rv = db.tt.create_index("idx_aa", db.tt.aa) self.assertTrue(rv) rv = db.tt.drop_index("idx_aa") self.assertTrue(rv) with self.assertRaises(Exception): db.tt.drop_index("idx_aa") db.rollback() drop(db.tt)
def __init__(self): self.db = DAL('sqlite://caldav.db', folder=BaseDirectory.save_data_path('pyhttpserver')) self.db.define_table('collections', Field('url', type='string', required=True, unique=True), Field('displayname', type='string'), Field('subscribed', type='boolean', default=True)) self.db.define_table('colitems', Field('href', type='string', required=True), Field('etag', type='string'), Field('collection', type='reference collections'), Field('content', type='blob'), Field('local_status', type='integer', default=0)) self.davStorages = {}
def __init__(self): self.config = Configuration().get_db_params() self.db = DAL( self.config.uri, pool_size=self.config.pool_size, migrate_enabled=self.config.migrate, check_reserved=['all'], folder=self.config.folder if self.config.folder else 'database', # adapter_args=dict(migrator=InDBMigrator) ) self.define_tables()
def testRun(self): for ref, bigint in [('reference', False), ('big-reference', True)]: db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint) db.define_table('tt', Field('vv')) db.define_table('ttt', Field('vv'), Field('tt_id', '%s tt' % ref, notnull=True)) self.assertRaises(Exception, db.ttt.insert, vv='pydal') # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
def __init__(self, conf=[], initconf=None): ''' ''' Cache.__init__(self, conf, initconf) self.db = DAL(self.config('db')) self.db.define_table('varset', Field('name')) self.db.define_table('vars', Field('name'), Field('value', 'json'), Field('varset'), Field('expires', 'integer'), primarykey=['varset', 'name']) self.window = int(self.config('window'))
def get_sys_table(uri="mysql://*****:*****@192.168.1.110/information_schema"): sys_tab = DAL(uri=uri) sys_tab.define_table('COLUMNS', Field("TABLE_SCHEMA", ), Field("TABLE_NAME"), Field("COLUMN_NAME"), Field("IS_NULLABLE"), Field("DATA_TYPE"), Field("COLUMN_TYPE"), primarykey=[], migrate=False) return sys_tab
def setup_database(data_dir, sub_dir, db_file, tables): # Figure out where our database should live db_folder = Path(data_dir, sub_dir).absolute() db_folder.mkdir(exist_ok=True, parents=True) db = DAL(f'sqlite://{db_folder}/store.sqlite', folder=db_folder) for table_name in tables: db.define_table( table_name, *[Field(f, tables[table_name][f]) for f in tables[table_name]], migrate=f'{table_name}.migrate') return db
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('vv')) db.define_table('ttt', Field('vv'), Field('tt_id', 'reference tt', notnull=True)) self.assertRaises(Exception, db.ttt.insert, vv='pydal') # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
def setUp(self): db = DAL('sqlite:memory') db.define_table('color', Field('name', requires=IS_NOT_IN_DB(db, 'color.name'))) db.color.insert(name='red') db.color.insert(name='green') db.color.insert(name='blue') db.define_table('thing', Field('name'), Field('color', 'reference color')) db.thing.insert(name='Chair', color=1) db.thing.insert(name='Chair', color=2) db.thing.insert(name='Table', color=1) db.thing.insert(name='Table', color=3) db.thing.insert(name='Lamp', color=2) db.define_table('rel', Field('a', 'reference thing'), Field('desc'), Field('b', 'reference thing')) db.rel.insert(a=1, b=2, desc='is like') db.rel.insert(a=3, b=4, desc='is like') db.rel.insert(a=1, b=3, desc='is under') db.rel.insert(a=2, b=4, desc='is under') db.rel.insert(a=5, b=4, desc='is above') api = DBAPI(db, ALLOW_ALL_POLICY) self.db = db self.api = api
def setUp(self): db = DAL("sqlite:memory") db.define_table("color", Field("name", requires=IS_NOT_IN_DB(db, "color.name"))) db.color.insert(name="red") db.color.insert(name="green") db.color.insert(name="blue") db.define_table("thing", Field("name"), Field("color", "reference color")) db.thing.insert(name="Chair", color=1) db.thing.insert(name="Chair", color=2) db.thing.insert(name="Table", color=1) db.thing.insert(name="Table", color=3) db.thing.insert(name="Lamp", color=2) db.define_table( "rel", Field("a", "reference thing"), Field("desc"), Field("b", "reference thing"), ) db.rel.insert(a=1, b=2, desc="is like") db.rel.insert(a=3, b=4, desc="is like") db.rel.insert(a=1, b=3, desc="is under") db.rel.insert(a=2, b=4, desc="is under") db.rel.insert(a=5, b=4, desc="is above") api = RestAPI(db, ALLOW_ALL_POLICY) self.db = db self.api = api
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) t1 = db.define_table('t1', Field('int_level', requires=IS_INT_IN_RANGE(1, 5))) i_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=1) u_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=2) e_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=6) self.assertTrue(i_response.id != None) self.assertTrue(u_response.id != None) self.assertTrue(e_response.id == None and len(e_response.errors.keys()) != 0) self.assertTrue(db(t1).count() == 1) self.assertTrue(db(t1.int_level == 1).count() == 0) self.assertTrue(db(t1.int_level == 6).count() == 0) self.assertTrue(db(t1.int_level == 2).count() == 1) db.t1.drop() return
def testRun(self): cache = SimpleCache() db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('aa')) db.tt.insert(aa='1') r0 = db().select(db.tt.ALL) r1 = db().select(db.tt.ALL, cache=(cache, 1000)) self.assertEqual(len(r0), len(r1)) r2 = db().select(db.tt.ALL, cache=(cache, 1000)) self.assertEqual(len(r0), len(r2)) r3 = db().select(db.tt.ALL, cache=(cache, 1000), cacheable=True) self.assertEqual(len(r0), len(r3)) r4 = db().select(db.tt.ALL, cache=(cache, 1000), cacheable=True) self.assertEqual(len(r0), len(r4)) drop(db.tt)
def __init__(self, db_user, db_pass, db_host, db_name, migrate=False): super(DNSAPI, self).__init__() self.db = DAL("postgres://%s:%s@%s/%s" % (db_user, db_pass, db_host, db_name), migrate=migrate) if self.db: print 'Successfully connected to db "%s" on host "%s"' % (db_name, db_host) self.db.define_table( "dns_zones", Field( "name", "string" ), # ends in . (e.g. example.com.); input should probably have a validator to ensure zones end in a . ) self.db.define_table( "dns_zone_records", Field("zone", "reference dns_zones"), Field("record_name", "string"), # (e.g. ns1.example.com.) Field( "record_type", "string", default="A", requires=IS_IN_SET(RECORD_TYPES) ), # (e.g. A, AAAA, CNAME, MX, NS) Field( "record_value", "string" ), # (e.g. an IP for A or AAAA, an address for CNAME, and an address and priority for MX) Field( "record_ttl", "integer", default=60 * 5 ), # A TTL in seconds before a client should check for a new value. Can reasonably set to lower or higher depending on the volatility of the records )
def testRun(self): for ref, bigint in [('reference', False), ('big-reference', True)]: db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint) if bigint and 'big-id' not in db._adapter.types: continue db.define_table('tt', Field('vv')) db.define_table('ttt', Field('vv'), Field('tt_id', '%s tt' % ref, notnull=True)) self.assertRaises(Exception, db.ttt.insert, vv='pydal') # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
def setUp(self): db = DAL('sqlite:memory') db.define_table('color', Field('name', requires=IS_NOT_IN_DB(db, 'color.name'))) db.color.insert(name='red') db.color.insert(name='green') db.color.insert(name='blue') db.define_table('thing', Field('name'), Field('color', 'reference color')) db.thing.insert(name='Chair', color=1) db.thing.insert(name='Chair', color=2) db.thing.insert(name='Table', color=1) db.thing.insert(name='Table', color=3) db.thing.insert(name='Lamp', color=2) db.define_table('rel', Field('a', 'reference thing'), Field('desc'), Field('b','reference thing')) db.rel.insert(a=1, b=2, desc='is like') db.rel.insert(a=3, b=4, desc='is like') db.rel.insert(a=1, b=3, desc='is under') db.rel.insert(a=2, b=4, desc='is under') db.rel.insert(a=5, b=4, desc='is above') api = DBAPI(db, ALLOW_ALL_POLICY) self.db = db self.api = api
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=["all"], entity_quoting=True) db.define_table("tt", Field("aa"), Field("bb", "datetime")) sql = db._adapter.dialect.create_index("idx_aa_and_bb", db.tt, [db.tt.aa, db.tt.bb.coalesce(None)]) with db._adapter.index_expander(): coalesce_sql = str(db.tt.bb.coalesce(None)) expected_sql = "CREATE INDEX %s ON %s (%s,%s);" % ( db._adapter.dialect.quote("idx_aa_and_bb"), db.tt.sqlsafe, db.tt.aa.sqlsafe_name, coalesce_sql, ) self.assertEqual(sql, expected_sql) rv = db.tt.create_index("idx_aa_and_bb", db.tt.aa, db.tt.bb.coalesce(None)) self.assertTrue(rv) rv = db.tt.drop_index("idx_aa_and_bb") self.assertTrue(rv) drop(db.tt)
def testRun(self): for ref, bigint in [('reference', False), ('big-reference', True)]: db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint) db.define_table('tt', Field('vv')) db.define_table('ttt', Field('vv'), Field('tt_id', '%s tt' % ref, unique=True)) id_i = db.tt.insert(vv='pydal') # Null tt_id db.ttt.insert(vv='pydal') # first insert is OK db.ttt.insert(tt_id=id_i) self.assertRaises(Exception, db.ttt.insert, tt_id=id_i) # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('val_and_insert', Field('aa'), Field('bb', 'integer', requires=IS_INT_IN_RANGE(1, 5))) rtn = db.val_and_insert.validate_and_insert(aa='test1', bb=2) if NOSQL: self.assertEqual(isinstance(rtn.id, long), True) else: self.assertEqual(rtn.id, 1) #errors should be empty self.assertEqual(len(rtn.errors.keys()), 0) #this insert won't pass rtn = db.val_and_insert.validate_and_insert(bb="a") #the returned id should be None self.assertEqual(rtn.id, None) #an error message should be in rtn.errors.bb self.assertNotEqual(rtn.errors.bb, None) #cleanup table drop(db.val_and_insert)
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('vv')) vv = 'ἀγοραζε' id_i = db.tt.insert(vv=vv) row = db(db.tt.id == id_i).select().first() self.assertEqual(row.vv, vv) db.commit() drop(db.tt) db.close()
class DbHelper(object): """docstring for DbHelper""" def __init__(self, arg): super(DbHelper, self).__init__() self.arg = arg self.db = DAL('mongodb://140.143.247.178:27099/spider') self.define_table() ''' self.db.thing.insert(name='Chair') query = self.db.thing.name.startswith('C') rows = self.db(query).select() print(rows[0].name) self.db.commit() ''' def define_table(self): print(self.db._dbname) self.db.define_table('douban_topic',Field('title'),Field('title_url'),Field('people'),Field('people_url') ,Field('replay_num'),Field('post_time')) def insert_models(self,table_name='',items=[]): a = list(map(dict,items)) self.db.douban_topic.bulk_insert(a) self.db.commit()
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('aa',Field('name')) db.define_table('bb',Field('aa','reference aa'),Field('name')) for k in ('x','y','z'): i = db.aa.insert(name=k) for j in ('u','v','w'): db.bb.insert(aa=i,name=k+j) db.commit() rows = db(db.aa).select() rows.join(db.bb.aa, fields=[db.bb.name], orderby=[db.bb.name]) self.assertEqual(rows[0].bb[0].name, 'xu') self.assertEqual(rows[0].bb[1].name, 'xv') self.assertEqual(rows[0].bb[2].name, 'xw') self.assertEqual(rows[1].bb[0].name, 'yu') self.assertEqual(rows[1].bb[1].name, 'yv') self.assertEqual(rows[1].bb[2].name, 'yw') self.assertEqual(rows[2].bb[0].name, 'zu') self.assertEqual(rows[2].bb[1].name, 'zv') self.assertEqual(rows[2].bb[2].name, 'zw') rows = db(db.bb).select() rows.join(db.aa.id, fields=[db.aa.name]) self.assertEqual(rows[0].aa.name, 'x') self.assertEqual(rows[1].aa.name, 'x') self.assertEqual(rows[2].aa.name, 'x') self.assertEqual(rows[3].aa.name, 'y') self.assertEqual(rows[4].aa.name, 'y') self.assertEqual(rows[5].aa.name, 'y') self.assertEqual(rows[6].aa.name, 'z') self.assertEqual(rows[7].aa.name, 'z') self.assertEqual(rows[8].aa.name, 'z') rows_json = rows.as_json() drop(db.bb) drop(db.aa) db.close()
self.y + other.y ) def as_dict( self ): return dict( x = self.x, y = self.y ) def __repr__( self ): return '<Point2D x:%s, y:%s>' % ( self.x, self.y ) db = DAL('sqlite:memory:') db.define_table('point2d', Field('x',type='double'), Field('y',type='double') ) p1 = Point2D(10,10) p2 = Point2D(10,20) p3 = Point2D(20,20) p4 = Point2D(20,10) db.point2d.insert( **p1.as_dict() ) db.point2d.insert( **p2.as_dict() ) db.point2d.insert( **p3.as_dict() ) db.point2d.insert( **p4.as_dict() )
#!/usr/bin/python # -*- coding: utf-8 -*- import os from core.crud import crud from pydal import DAL, Field ##db = DAL('mysql://*****:*****@192.168.0.200/fasa', migrate=False) db = DAL('sqlite://'+os.path.join('modelos','pyfactura.db'), migrate=False) db.define_table('pagos', Field('pago', 'string', length=2, required=True,), Field('detalle', 'string', length=40, required=True,), Field('dia', 'date', length=8), Field('des1', 'decimal(12,2)', length=10), primarykey=['pago'], ) formato = {'pago':{'width':30, 'text':'Pagos', 'id':True}, } def main(): abm = crud(tabla=db.pagos, basedatos=db, formato=formato, ) if __name__ == '__main__': main()
from pydal import DAL, Field db = DAL('sqlite:memory:') db.define_table('persons', Field('name'), Field('age') ) amy = db.persons.insert( name='Amy', age=52 ) bob = db.persons.insert( name='Bob', age=48 ) cat = db.persons.insert( name='Cat', age=23 ) dan = db.persons.insert( name='Dan', age=17 ) edd = db.persons.insert( name='Edd', age=77 ) fan = db.persons.insert( name='Fan', age=65 ) gin = db.persons.insert( name='Gin', age=27 ) hil = db.persons.insert( name='Hil', age=30 ) iri = db.persons.insert( name='Iri', age=62 ) jac = db.persons.insert( name='Jac', age=18 ) db.commit() # Export the 'persons' database with open( 'persons.csv', 'wb' ) as f: f.write( str(db(db.persons.id).select()) ) # Export only the young persons with open( 'young-people.csv', 'wb') as f: people = db( db.persons.age <= 30 ).select() f.write( str( people ) )
#!/usr/bin/env python # -*- coding: utf-8 -*- # Created by Alex on 2016/11/30 from pydal import DAL, Field from ConfPars import DB_URL sys_tab = DAL(DB_URL) sys_tab.define_table('COLUMNS', Field("TABLE_SCHEMA", ), Field("TABLE_NAME"), Field("COLUMN_NAME"), Field("IS_NULLABLE"), Field("DATA_TYPE"), Field("COLUMN_TYPE"), primarykey=[], migrate=False) if __name__ == "__main__": print sys_tab(sys_tab.COLUMNS.TABLE_SCHEMA == "test").select()
def __init__(self, arg): super(DbHelper, self).__init__() self.arg = arg self.db = DAL('mongodb://140.143.247.178:27099/spider') self.define_table() '''
valid_transitions = [ { 'id':1, 'trigger':'begin', 'source':'start', 'dest':'proc', 'after':'increase_processings' }, { 'id':2, 'trigger':'end', 'source':'proc', 'dest':'finish', 'after':'noop' }, { 'id':3, 'trigger':'reprocess', 'source':'finish', 'dest':'proc', 'after':'increase_processings' } ] db.config_workflow.truncate() db.config_wfstate.truncate() for i in valid_transitions: db.config_workflow.insert(trigger=i['trigger'], source=i['source'], dest=i['dest'], after=i['after']) db.config_wfstate.insert(name='start') db.config_wfstate.insert(name='proc') db.config_wfstate.insert(name='finish') db.commit() db = DAL(uri='sqlite://temp.db', folder='db') db.define_table('config_workflow', Field('trigger'), Field('source'), Field('dest'), Field('after')) db.define_table('config_wfstate', Field('name')) db_config_init(db) flow = db(db.config_workflow).select() #state = db(db.config_wfstate).select() state=['start','proc','finish'] m = Mincer(valid_states=state, valid_transitions=flow, initial='start') print state print flow print m.state m.begin() m.end() m.reprocess()
def model(): db = DAL('sqlite://pin.db',pool_size=1,folder='./',migrate=False) Pin=db.define_table('pin',Field('title'),Field('image')) return (db,Pin)
from pydal import DAL, Field db = DAL('sqlite://storage.db') db.define_table('thing',Field('name')) db.thing.insert(name='Chair') query = db.thing.name.startswith('C') rows = db(query).select() print rows[0].name db.commit()
This program is a demo of how to use the PyDAL and xtopdf Python libraries together to publish database data to PDF. PyDAL is at: https://github.com/web2py/pydal/blob/master/README.md xtopdf is at: https://bitbucket.org/vasudevram/xtopdf and info about xtopdf is at: http://slides.com/vasudevram/xtopdf or at: http://slid.es/vasudevram/xtopdf """ # imports from pydal import DAL, Field from PDFWriter import PDFWriter SEP = 60 # create the database db = DAL('sqlite://house_depot.db') # define the table db.define_table('furniture', \ Field('id'), Field('name'), Field('quantity'), Field('unit_price') ) # insert rows into table items = ( \ (1, 'chair', 40, 50), (2, 'table', 10, 300), (3, 'cupboard', 20, 200), (4, 'bed', 30, 400) ) for item in items: db.furniture.insert(id=item[0], name=item[1], quantity=item[2], unit_price=item[3])
def testRun(self): db = DAL(None) db.define_table('no_table', Field('aa')) self.assertIsInstance(db.no_table.aa, Field) self.assertIsInstance(db.no_table['aa'], Field) db.close()