def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.223245', None) self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 12) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.223245Z', None) self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 12) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.223245-2:0', None) self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 10) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36+1:0', None) self.assertEqual(dt.microsecond, 0) self.assertEqual(dt.hour, 13) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.123', None) self.assertEqual(dt.microsecond, 123000) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.00123', None) self.assertEqual(dt.microsecond, 1230) dt = db._adapter.parsemap['datetime']('2015-09-04t12:33:36.1234567890', None) self.assertEqual(dt.microsecond, 123456) db.close()
def testRun(self): for ref, bigint in [("reference", False), ("big-reference", True)]: db = DAL(DEFAULT_URI, check_reserved=["all"], bigint_id=bigint) if bigint and "big-id" not in db._adapter.types: continue db.define_table("tt", Field("vv")) db.define_table( "ttt", Field("vv"), Field("tt_id", "%s tt" % ref, unique=True), Field("tt_uq", "integer", unique=True), ) id_1 = db.tt.insert(vv="pydal") id_2 = db.tt.insert(vv="pydal") # Null tt_id db.ttt.insert(vv="pydal", tt_uq=1) # first insert is OK db.ttt.insert(tt_id=id_1, tt_uq=2) self.assertRaises(Exception, db.ttt.insert, tt_id=id_1, tt_uq=3) self.assertRaises(Exception, db.ttt.insert, tt_id=id_2, tt_uq=2) # The following is mandatory for backends as PG to close the aborted transaction db.commit() drop(db.ttt) drop(db.tt) db.close()
def connect(self): if not self.db: self.db = DAL(self.uri, folder=self.folder, pool_size=5, lazy_tables=False) #, migrate_enabled=False, migrate=False, lazy_tables=True self.tables() else: print("Error: db already open")
def _connect(self): self.db = DAL(self.uri, pool_size=self.pool_size, folder=self.folder, db_codec=self.db_codec, check_reserved=self.check_reserved, migrate=self.migrate, fake_migrate=self.fake_migrate, migrate_enabled=self.migrate_enabled, fake_migrate_all=self.fake_migrate_all, decode_credentials=self.decode_credentials, driver_args=self.driver_args, adapter_args=self.adapter_args, attempts=self.attempts, auto_import=self.auto_import, bigint_id=self.bigint_id, debug=self.debug, lazy_tables=self.lazy_tables, db_uid=self.db_uid, do_connect=self.do_connect, after_connection=self.after_connection, tables=self.tables, ignore_field_case=self.ignore_field_case, entity_quoting=self.entity_quoting, table_hash=self.table_hash) if self.define_tables: # tables definitions self.define_tables(self.db)
def setUp(self): db = DAL("sqlite:memory") db.define_table("color", Field("name", requires=IS_NOT_IN_DB(db, "color.name"))) db.color.insert(name="red") db.color.insert(name="green") db.color.insert(name="blue") db.define_table("thing", Field("name"), Field("color", "reference color")) db.thing.insert(name="Chair", color=1) db.thing.insert(name="Chair", color=2) db.thing.insert(name="Table", color=1) db.thing.insert(name="Table", color=3) db.thing.insert(name="Lamp", color=2) db.define_table( "rel", Field("a", "reference thing"), Field("desc"), Field("b", "reference thing"), ) db.rel.insert(a=1, b=2, desc="is like") db.rel.insert(a=3, b=4, desc="is like") db.rel.insert(a=1, b=3, desc="is under") db.rel.insert(a=2, b=4, desc="is under") db.rel.insert(a=5, b=4, desc="is above") api = RestAPI(db, ALLOW_ALL_POLICY) self.db = db self.api = api
def __new__(self, ): dalString = 'sqlite://Agenda.db' db = DAL(dalString, migrate=True) db.define_table('users', Field('name', 'string', unique=True), Field('age', 'integer'), Field('weight', 'string'), Field('cell', 'string'), Field('id', type='id')) return db
def __new__(self, ): #dalString = 'mongodb://localhost/leakManager' #uncomment to use mongodb dalString = 'sqlite://leakManager.db' #uncomment to use sqlite db = DAL(dalString, migrate=True) db.define_table('leaks', Field('username'), Field('email'), Field('password'), Field('database')) return db
def model(): dbinfo = 'sqlite://storage.sqlite' folder = "./database" db = DAL(dbinfo, folder=folder, pool_size=1) table(db) return db
def connect(path): # sqlite DB db = DAL(f"sqlite://badass.sqlite", folder=path) db.define_table("users", Field("email", "string", unique=True), Field("firstname", "string"), Field("lastname", "string"), Field("password", "string"), Field("salt", "string"), Field("group", "string"), Field("roles", "list:string"), Field("studentid", "string"), Field("activated", "boolean")) db.define_table("submissions", Field("user", "reference users"), Field("date", "datetime"), Field("course", "string"), Field("exercise", "string"), Field("path", "string")) # configuration config = configparser.ConfigParser() config.read(f"{path}/badass.cfg") cfg = cfgtree("CODES", "GROUPS") for sec in config: for key, val in config[sec].items(): try: cfg[sec][key] = ast.literal_eval(val or "None") except: cfg[sec][key] = val class User(BaseUser): pass User.db = db return db, cfg, User, Roles(cfg)
def __init__(self, uri_db): self.log.info("creating instance of DBClient: {0}".format(uri_db)) self.db = DAL(uri_db, migrate_enabled=False) self.wells = self.db.define_table('wells', Field('uuid'), Field('project_uuid'), Field('well_name'), Field('uwi'), Field('created_at'), Field('modified_at'), primarykey=['uuid']) self.clients = self.db.define_table('clients', Field('uuid'), Field('company_name'), Field('created_at'), Field('modified_at'), primarykey=['uuid']) self.projects = self.db.define_table('projects', Field('uuid'), Field('client_uuid'), Field('name'), Field('created_at'), Field('modified_at'), Field('basin'), Field('shapefile'), primarykey=['uuid'])
def connect(self, *args, **kwargs): if not args: kwargs.setdefault('uri', DEFAULT_URI) kwargs.setdefault('check_reserved', ['all']) ret = DAL(*args, **kwargs) self._connections.append(ret) return ret
def setUp(self): db = DAL('sqlite:memory') db.define_table('color', Field('name', requires=IS_NOT_IN_DB(db, 'color.name'))) db.color.insert(name='red') db.color.insert(name='green') db.color.insert(name='blue') db.define_table('thing', Field('name'), Field('color', 'reference color')) db.thing.insert(name='Chair', color=1) db.thing.insert(name='Chair', color=2) db.thing.insert(name='Table', color=1) db.thing.insert(name='Table', color=3) db.thing.insert(name='Lamp', color=2) db.define_table('rel', Field('a', 'reference thing'), Field('desc'), Field('b', 'reference thing')) db.rel.insert(a=1, b=2, desc='is like') db.rel.insert(a=3, b=4, desc='is like') db.rel.insert(a=1, b=3, desc='is under') db.rel.insert(a=2, b=4, desc='is under') db.rel.insert(a=5, b=4, desc='is above') api = DBAPI(db, ALLOW_ALL_POLICY) self.db = db self.api = api
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) #: skip for adapters that use drivers for datetime parsing if db._adapter.parser.registered.get('datetime') is None: return parse = lambda v: db._adapter.parser.parse(v, 'datetime', 'datetime') dt = parse('2015-09-04t12:33:36.223245') self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 12) dt = parse('2015-09-04t12:33:36.223245Z') self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 12) dt = parse('2015-09-04t12:33:36.223245-2:0') self.assertEqual(dt.microsecond, 223245) self.assertEqual(dt.hour, 10) dt = parse('2015-09-04t12:33:36+1:0') self.assertEqual(dt.microsecond, 0) self.assertEqual(dt.hour, 13) dt = parse('2015-09-04t12:33:36.123') self.assertEqual(dt.microsecond, 123000) dt = parse('2015-09-04t12:33:36.00123') self.assertEqual(dt.microsecond, 1230) dt = parse('2015-09-04t12:33:36.1234567890') self.assertEqual(dt.microsecond, 123456) db.close()
def __init__(self): self.db = DAL('sqlite://ccaccounts.db') self.db.define_table('accounts', Field('name'), Field('account_number', 'integer'), Field('balance', 'integer'), Field('balance_limit', 'integer')) self.log = Logger()
def retrieve_dal_connection(db_host, db_name, db_user, db_password): """Create pyDAL connection or retrieves it from the connection pool Args: db_host (str): Database hostname and port, ex.: "localhost:3306" db_name (str): Database schema name db_user (str): Databse user db_password (str): Database password Returns: DAL: pyDAL connection object """ uri = "mysql://{0}:{1}@{2}/{3}".format(db_user, db_password, db_host, db_name) db = DAL( uri, pool_size=10, folder='./', migrate=True, fake_migrate=True, fake_migrate_all=True, check_reserved=['all'], ) define_tables(db) return db
def indexx(): #response.flash = T("Hello World") #response.menu += [ # (T('My Sites'), False, URL('admin', 'default', 'site')) #] import os.path # x=os.getcwd()+'\..\models\database_registry.py.bak' # x=os.getcwd()+'\models\database_registry.py.bak' x = os.getcwd( ) + '\\applications\\' + request.application + '\models\database_registry.py.bak' outfile = os.getcwd( ) + '\\applications\\' + request.application + '\models\database_registry.py.out' # y=x + request.application # return 'ZZZ \>' + y + str(os.path.isfile(y)) + '\\' + request.application + ' \< ZZZ' #print(x) #return [os.path.dirname(os.path.abspath(__file__)), " <".join(os.getcwd()).join(">> "), os.path.isfile(os.getcwd().join('/../models/database_registry.py.bak'))] # return [os.getcwd(), os.path.isfile(fname)] from pydal import DAL, Field # DAL() db = DAL('mssql4://BuildDbAdmin:Alt0ids76@localhost/master') results = db.executesql('select * from sys.databases') with open(outfile, 'w') as f: for row in results: # print row.name # f.write("%s\n" % str(row.name)) # register('ApplicationConfiguration', 'mssql4://BuildDbAdmin:Alt0ids76@localhost/ApplicationConfiguration') registerx(row.name, 'mssql4://BuildDbAdmin:Alt0ids76@localhost/' + row.name) # return 'ZZZ \>' + x + str(os.path.isfile(x)) + '\\' + request.application + ' \< ZZZ' return DBREG
def find_course(self, course_name=None, class_id=None, instructor=None): db = DAL('sqlite://courses.db', folder='dbs') db.define_table('courses', Field('class_id', type='integer'), Field('class_name'), Field('date_time'), Field('descriptive_link'), Field('enrolled'), Field('instructor'), Field('link_sources'), Field('location'), Field('status')) if (course_name != None): rows = db(db.courses.class_name.like('%' + course_name + '%')).select() if len(rows) > 0 and class_id != None: return db( db.courses.class_name.like('%' + course_name + '%') and db.courses.class_id == class_id).select().first() elif len(rows) > 0 and instructor != None: return db( db.courses.class_name.like('%' + course_name + '%') and db.courses.instructor == instructor).select().first() else: return rows.first() elif (class_id != None): rows = db(db.courses.class_id == class_id).select() return rows.first() else: return None
def update_database(self): db = DAL('sqlite://courses.db', folder='dbs') try: response = requests.get( 'http://localhost:5000/api/v1.0/courses/all/2000') response.raise_for_status() jsonResponse = response.json() db.define_table('courses', Field('class_id', type='integer'), Field('class_name'), Field('date_time'), Field('descriptive_link'), Field('enrolled'), Field('instructor'), Field('link_sources'), Field('location'), Field('status')) for key in jsonResponse: db.courses.insert(class_id=key['class_id'], class_name=key['class_name'], date_time=key['date_time'], descriptive_link=key['descriptive_link'], enrolled=key['enrolled'], instructor=key['instructor'], link_sources=key['link_sources'], location=key['location'], status=key['status']) rows = db().select(db.courses.ALL) finally: if db: db.close()
def __new__(self, ): #dalString = 'mongodb://localhost/leakManager' # New counter solution does not work with mongo, because the id of a entry is not predictable, use sqlite, it works fine, even with more then 1Mi leaks. dalString = 'sqlite://leakManager.db' #uncomment to use sqlite db = DAL(dalString, migrate=True) db.define_table('leaks', Field('username'), Field('email'), Field('password'), Field('database')) db.define_table('counter', Field('total', 'integer')) return db
def __init__(self, ): self.migrate = True if os.path.exists(os.path.abspath('database/banco.db')): self.migrate = False self.DATABASE_TYPE = 'sqlite://' self.DATABASE = self.DATABASE_TYPE + os.path.abspath( 'database/banco.db') self.db = DAL(self.DATABASE, migrate=self.migrate)
def __new__(self, ): #dalString = 'mongodb://localhost/HomeProxy' #uncomment to use mongodb dalString = 'sqlite://HomeProxy.db' #uncomment to use sqlite db = DAL(dalString, migrate=True) db.define_table('logs', Field('date'), Field('url'), Field('client_ip'), Field('useragent'), Field('status'), Field('host')) db.define_table('blacklists', Field('url'), Field('host')) return db
def __new__(self, ): dalString = 'sqlite://encryptandshare.db' #uncomment to use sqlite #dalString = 'mysql://*****:*****@127.0.0.1/encryptandshare' #uncomment to use sqlite #db = DAL(dalString,fake_migrate_all=True) db = DAL(dalString, migrate=True) db.define_table('files', Field('fname'), Field('limit'), Field('status', 'boolean'), Field('fid')) #db.commit() return db
def model(): db = DAL('sqlite://todo.db',pool_size=1,folder='./') #,migrate=False) Todos=db.define_table('todos',Field('title'),Field('isCompleted','boolean',default=False)) if not db(db.todos).count(): for i in range(1,16): db.todos.insert(title='الموعد '+str(i)) db.commit() return (db)
def __new__(self, ): path = 'sqlite://{0}'.format( os.path.abspath(os.path.join('db', 'storage.db'))) db = DAL(path) db.define_table('users', Field('user_service_id', type='integer'), Field('email')) db.define_table('leaks', Field('leaksource'), Field('user_id', 'reference users')) return db
def __new__(self, ): dbPath = os.path.abspath(os.path.join('data', 'storage.db')) path = 'sqlite://{0}'.format(dbPath) migrate = True if os.path.exists(dbPath): migrate = False db = DAL(path, migrate=migrate) db.define_table('servers', Field('url')) return db
def __init__(self): self.db = DAL('sqlite://storage.db', folder=path.join("../database")) try: self.db.define_table('trenddates', Field('trend', 'text'), Field('date', 'date')) except: print("Trenddates exists...") if len(self.db(self.db.trenddates).select().as_list()) > 0: print("It has entries, somehow.")
def init_db(path='sqlite://storage.db'): '''Connect to DB''' global db db = DAL(path) db = create_tables(db) logger.error('*******' + str(db)) db.commit() return db
def registerx(name, uri): if not name in DBREG: try: conn = DAL(uri, pool_size=1, migrate_enabled=False, check_reserved=['all']) DBREG[name] = conn except Exception as e: DBREG[name] = e
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) db.define_table('tt', Field('vv')) vv = 'ἀγοραζε' id_i = db.tt.insert(vv=vv) row = db(db.tt.id == id_i).select().first() self.assertEqual(row.vv, vv) db.commit() drop(db.tt) db.close()
def model(): try: dbinfo = os.environ['DBSTRING'] if connection.ConnectionPool().check_active_connection: db = connection.ConnectionPool().reconnect() return db else: connection.ConnectionPool().close_all_instances(action='commit') db = DAL(dbinfo, folder='./database', pool_size=1) except FileNotFoundError: os.mkdir('database') except pymysql.err.InternalError: connection.ConnectionPool().close_all_instances(action='commit') db = DAL(dbinfo, folder='./database', pool_size=1, migrate=True) finally: connection.ConnectionPool().close_all_instances(action='commit') db = DAL(dbinfo, folder='./database', pool_size=1, migrate=False) table(db) return db