def test_DALcache(self): s = Storage({'application': 'admin', 'folder': 'applications/admin'}) cache = Cache(s) db = DAL(check_reserved=['all']) db.define_table('t_a', Field('f_a')) db.t_a.insert(f_a='test') db.commit() a = db(db.t_a.id > 0).select(cache=(cache.ram, 60), cacheable=True) b = db(db.t_a.id > 0).select(cache=(cache.ram, 60), cacheable=True) self.assertEqual(a.as_csv(), b.as_csv()) c = db(db.t_a.id > 0).select(cache=(cache.disk, 60), cacheable=True) d = db(db.t_a.id > 0).select(cache=(cache.disk, 60), cacheable=True) self.assertEqual(c.as_csv(), d.as_csv()) self.assertEqual(a.as_csv(), c.as_csv()) self.assertEqual(b.as_csv(), d.as_csv()) e = db(db.t_a.id > 0).select(cache=(cache.disk, 60)) f = db(db.t_a.id > 0).select(cache=(cache.disk, 60)) self.assertEqual(e.as_csv(), f.as_csv()) self.assertEqual(a.as_csv(), f.as_csv()) g = db(db.t_a.id > 0).select(cache=(cache.ram, 60)) h = db(db.t_a.id > 0).select(cache=(cache.ram, 60)) self.assertEqual(g.as_csv(), h.as_csv()) self.assertEqual(a.as_csv(), h.as_csv()) db.t_a.drop() db.close()
class BaseTestScheduler(unittest.TestCase): def setUp(self): self.db = None self.cleanfolder() from gluon.globals import current s = Storage({'application': 'welcome', 'folder': 'applications/welcome', 'controller': 'default'}) current.request = s T = TranslatorFactory('', 'en') current.T = T self.db = DAL('sqlite://dummy2.db', check_reserved=['all']) def cleanfolder(self): if self.db: self.db.close() try: os.unlink('dummy2.db') except: pass tfiles = glob.glob('*_scheduler*.table') for a in tfiles: os.unlink(a) def tearDown(self): self.cleanfolder() try: self.inner_teardown() except: pass
class BaseTestScheduler(unittest.TestCase): def setUp(self): self.db = None self.cleanfolder() from gluon import current s = Storage({ 'application': 'welcome', 'folder': 'applications/welcome', 'controller': 'default' }) current.request = s T = translator('', 'en') current.T = T self.db = DAL('sqlite://dummy2.db', check_reserved=['all']) def cleanfolder(self): if self.db: self.db.close() try: os.unlink('dummy2.db') except: pass tfiles = glob.glob('*_scheduler*.table') for a in tfiles: os.unlink(a) def tearDown(self): self.cleanfolder() try: self.inner_teardown() except: pass
def testRun(self): from gluon.serializers import custom_json, xml from gluon import sqlhtml db = DAL(check_reserved=['all']) self.assertEqual(db.serializers['json'], custom_json) self.assertEqual(db.serializers['xml'], xml) self.assertEqual(db.representers['rows_render'], sqlhtml.represent) self.assertEqual(db.representers['rows_xml'], sqlhtml.SQLTABLE) db.close()
def testRun(self): from gluon.serializers import custom_json, xml from gluon import sqlhtml db = DAL(check_reserved=['all']) self.assertEqual(db.serializers['json'], custom_json) self.assertEqual(db.serializers['xml'], xml) self.assertEqual(db.representers['rows_render'], sqlhtml.represent) self.assertEqual(db.representers['rows_xml'], sqlhtml.SQLTABLE) db.close()
def from5(db): return db_old = DAL( "sqlite://storage.sqlite", #pool_size=1, #check_reserved=['all'], # this keyword buil model on fly on load auto_import=True, folder="../../ipay5-m/databases") db_new = DAL( "sqlite://storage.sqlite", #pool_size=1, #check_reserved=['all'], # this keyword buil model on fly on load auto_import=True, folder="../../ipay6-a/databases") import json print '\nimport 5 to 6' for xcurr in db(db.xcurrs).select(): pass deal = db(db.deals.name == 'to phone +7 RUBs').select().first() if not deal: return 'not deal "to phone +7 RUBs"' print "for deal:", deal for rec in db_old(db_old.to_phone).select(): # найдем неимпортированные записи # acc = db((db.deal_accs.deal_id == deal.id) & (db.deal_accs.acc == rec.phone)).select().first() #print acc #continue if acc: acc_id = acc.id else: print 'insert deal_acc', rec.phone acc_id = db.deal_accs.insert(deal_id=deal.id, acc=rec.phone) acc_addr = db( (db.deal_acc_addrs.deal_acc_id == acc_id) & (db.deal_acc_addrs.addr == rec.wallet) & (db.deal_acc_addrs.xcurr_id == rec.xcurr_id)).select().first() if acc_addr: continue print 'insert acc_addr ', rec.xcurr_id, rec.wallet db.deal_acc_addrs.insert(deal_acc_id=acc_id, addr=rec.wallet, xcurr_id=rec.xcurr_id, incomed=rec.unspent, converted=rec.unspent) ####### теперь платежи #for p_in in db_old(db_old.payments).select(): db_old.close()
def testSerialization(self): from gluon._compat import pickle db = DAL(check_reserved=['all']) db.define_table('t_a', Field('f_a')) db.t_a.insert(f_a='test') a = db(db.t_a.id > 0).select(cacheable=True) s = pickle.dumps(a) b = pickle.loads(s) self.assertEqual(a.db, b.db) db.t_a.drop() db.close()
def testSerialization(self): import pickle db = DAL(check_reserved=['all']) db.define_table('t_a', Field('f_a')) db.t_a.insert(f_a='test') a = db(db.t_a.id > 0).select(cacheable=True) s = pickle.dumps(a) b = pickle.loads(s) self.assertEqual(a.db, b.db) db.t_a.drop() db.close()
class PlacesTableImporter(TableImporter.TableImporter): """ Imports metadata related to places""" def __init__(self, settings): super(PlacesTableImporter, self).__init__(settings) self.db_type = self.config.get('db', 'db_type') self.db_folder = self.config.get('db', 'db_folder') self.db_file = self.config.get('db', 'db_file') self.web2py_loc = self.config.get('db', 'web2py_loc') print(self.db_type) print(self.db_folder) print(self.db_file) print(self.web2py_loc) # Add DAL to path sys.path.append(self.web2py_loc) # import required objects from gluon.dal import DAL # establish a database connection self.db = DAL(self.db_type + '://' + self.db_file, folder=self.db_folder) def create_table(self): from gluon.dal import Field self.db.define_table('places', Field('placeID', 'integer'), Field('countryID', 'integer', default=0), Field('stateID', 'integer', default=0), Field('districtID', 'integer', default=0), Field('localityID', 'integer', default=0), Field('name', 'string')) def populate_table(self): print(self.db.places.insert(placeID=1, countryID=1, stateID=2, districtID=3, localityID=4, name='Pune')) print(self.db.places.insert(placeID=2, countryID=11, stateID=12, districtID=3, localityID=4, name='Hyderabad')) print(self.db.places.insert(placeID=3, countryID=21, stateID=42, districtID=53, localityID=64, name='Mumbai')) print(self.db.places.insert(placeID=4, countryID=1, stateID=22, districtID=23, localityID=41, name='Junagad')) print(self.db.places.insert(placeID=5, countryID=111, stateID=2, districtID=3, localityID=49, name='Lohagad')) print(self.db.places.insert(placeID=6, countryID=18, stateID=42, districtID=13, localityID=4, name='Raipur')) print(self.db.places.insert(placeID=7, countryID=14, stateID=24, districtID=3, localityID=444, name='Rampur')) print(self.db.places.insert(placeID=8, countryID=1, stateID=2, districtID=3, name='Nagpur')) print(self.db.places.insert(placeID=9, countryID=1, stateID=2, name='Maharashtra')) print(self.db.places.insert(placeID=10, countryID=1, name='India')) def __del__(self): if self.db: self.db.commit() self.db.close() self.db = None gc.collect()