def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) t1 = db.define_table( 't1', Field('int_level', 'integer', requires=IS_INT_IN_RANGE(1, 5))) i_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=1) u_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=2) e_response = t1.validate_and_update_or_insert((t1.int_level == 1), int_level=6) self.assertTrue(i_response.id != None) self.assertTrue(u_response.id != None) self.assertTrue(e_response.id == None and len(e_response.errors.keys()) != 0) self.assertEqual(len(db(t1).select()), 1) self.assertEqual(db(t1).count(), 1) self.assertEqual(db(t1.int_level == 1).count(), 0) self.assertEqual(db(t1.int_level == 6).count(), 0) self.assertEqual(db(t1.int_level == 2).count(), 1) drop(db.t1) return
def create_table(self): self.connect() self.db_instance.define_table( 'products', Field('name', required=True, default=""), Field('url', required=True, default="", unique=True), Field('shop', required=True, default="")) self.db_instance.define_table( 'prices', Field('product_id', "reference products", required=True, default="", ondelete='CASCADE'), Field('price', type="double", required=True, default=""), Field('time', type="datetime", required=True, default=datetime.datetime.now()))
def define_table(db): db.define_table("house_res", Field('res_title'), Field('res_content'), Field('pub_time', type='datetime'), Field('owner_id', type="reference auth_user"))
import os import shutil import youtube_dl from flask import Flask, render_template, redirect, request, escape from pydal import DAL, Field app = Flask(__name__) app.config["SECRET_KEY"] = "PUT_YOUR_OWN_SECURE_KEY_HERE" app.config["SONGS_PATH"] = "static/music/" db = DAL("sqlite://data.db") db.define_table("songs", Field("name"), Field("video_id")) @app.route("/") def index(): """ Renders viewport for main player home page. """ return render_template("index.html", songs_path=app.config["SONGS_PATH"], songs=get_songs()) @app.route("/add", methods=["GET", "POST"]) def add(): """ Renders viewport for download page. """ if request.method == "GET": return render_template("add.html", space=get_free_space()) elif request.method == "POST": results = download(request.form["link"])
""" 第8章/pydal_example/gen_sql.py """ from pydal import DAL, Field if __name__ == '__main__': db = DAL("mysql://*****:*****@127.0.0.1/mydb") db.define_table('student', Field('name'), Field('age')) # 在相应的语句前添加下划线用于生成SQL语句而不执行 sql = db.student._insert(name="小云", age="20") print(sql) sql = db(db.student.id == 2)._update(name="小明", age="10") print(sql) sql = db(db.student.id > 0)._select() print(sql) sql = db(db.student.id == 1)._delete() print(sql)
def __init__(self, uri_db): self.log.info("creating instance of DBClient: {0}".format(uri_db)) self.db = DAL(uri_db, migrate_enabled=False) self.wells = self.db.define_table('wells', Field('uuid'), Field('project_uuid'), Field('well_name'), Field('uwi'), Field('created_at'), Field('modified_at'), primarykey=['uuid']) self.clients = self.db.define_table('clients', Field('uuid'), Field('company_name'), Field('created_at'), Field('modified_at'), primarykey=['uuid']) self.projects = self.db.define_table('projects', Field('uuid'), Field('client_uuid'), Field('name'), Field('created_at'), Field('modified_at'), Field('basin'), Field('shapefile'), primarykey=['uuid'])
def testRun(self): db = DAL(DEFAULT_URI, check_reserved=['all']) # ----------------------------------------------------------------------------- # Seems further imports are required for the commented field types below # db.define_table('referred_table', # Field('represent_field', 'string')) # NOTE : Don't forget to uncomment the line # drop(db.referred_table) at the very end below # if the above are uncommented db.define_table( 'a_table', Field('string_field', 'string'), Field('text_field', 'text'), Field('boolean_field', 'boolean'), Field('integer_field', 'integer'), Field('double_field', 'double'), # Field('decimal_field', 'decimal'), # Field('date_field', 'date'), # Field('time_field', 'time'), # Field('datetime_field', 'datetime'), # Field('reference_field', 'reference referred_table'), # Field('list_string_field', 'list:string'), # Field('list_integer_field', 'list:integer'), # Field('list_reference_field', 'list:reference referred_table') ) fields = [ db.a_table.id, db.a_table.string_field, db.a_table.text_field, db.a_table.boolean_field, db.a_table.integer_field, db.a_table.double_field, # db.a_table.decimal_field, # db.a_table.date_field, # db.a_table.time_field, # db.a_table.reference_field, # db.a_table.list_string_field, # db.a_table.list_integer_field, # db.a_table.list_reference_field ] # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Test with boolean field # Operator under test # operators = \ # [(' starts with ','startswith'), # (' ends with ','endswith'), # ('contains', 'N/A'), # ('like', 'N/A') # ] # # keywords = 'a_table.boolean_field = True' q = (db.a_table.boolean_field == True) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) if not IS_GAE: # Test string field query # starts with keywords = 'a_table.string_field starts with "pydal"' q = (db.a_table.string_field.startswith('pydal')) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ends with keywords = 'a_table.string_field ends with "Rocks!!"' q = (db.a_table.string_field.endswith('Rocks!!')) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # contains keywords = 'a_table.string_field contains "Rocks"' q = (db.a_table.string_field.contains('Rocks')) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # Don't work for some reason # # like # keywords = 'a_table.string_field like "%Rocks%"' # q = (db.a_table.string_field.like('%Rocks%')) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q) # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Tests with integer field # For generating these tests # def generate_tests(): # operators = \ # [('=', '='), # ('==', '='), # (' is ','='), # (' equal ', '='), # (' equals ', '='), # (' equal to ', '='), # ('<>', '!='), # (' not equal ', '!='), # (' not equal to ', '!='), # ('<', '<'), # (' less than ', '<'), # ('<=', '<='), # ('=<', '<='), # (' less or equal ', '<='), # (' less or equal than ', '<='), # (' equal or less ', '<='), # (' equal or less than ', '<='), # ('>', '>'), # (' greater than ', '>'), # ('=>', '>='), # ('>=', '>='), # (' greater or equal ', '>='), # (' greater or equal than ', '>='), # (' equal or greater ', '>='), # (' equal or greater than ', '>=')] # JUST APPEND MORE OPERATORS HERE # # for op in operators: # print """ # # {op} # keywords = 'a_table.integer_field {test_op} 1' # q = (db.a_table.integer_field {result_op} 1) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q)""".format(op=op, # test_op=op[0], # result_op='==' if op[1] == '=' else op[1]) # ('=', '=') keywords = 'a_table.integer_field = 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ('==', '=') keywords = 'a_table.integer_field == 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' is ','=') keywords = 'a_table.integer_field is 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal ', '=') keywords = 'a_table.integer_field equal 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equals ', '=') keywords = 'a_table.integer_field equals 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal to ', '=') keywords = 'a_table.integer_field equal to 1' q = (db.a_table.integer_field == 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # This one not allow over integer it seems # # ('<>', '!=') # keywords = 'a_table.integer_field <> 1' # q = (db.a_table.integer_field != 1) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q) # (' not equal ', '!=') keywords = 'a_table.integer_field not equal 1' q = (db.a_table.integer_field != 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' not equal to ', '!=') keywords = 'a_table.integer_field not equal to 1' q = (db.a_table.integer_field != 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ('<', '<') keywords = 'a_table.integer_field < 1' q = (db.a_table.integer_field < 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' less than ', '<') keywords = 'a_table.integer_field less than 1' q = (db.a_table.integer_field < 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ('<=', '<=') keywords = 'a_table.integer_field <= 1' q = (db.a_table.integer_field <= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # This one is invalid, maybe we should remove it from smart_query # # ('=<', '<=') # keywords = 'a_table.integer_field =< 1' # q = (db.a_table.integer_field <= 1) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q) # (' less or equal ', '<=') keywords = 'a_table.integer_field less or equal 1' q = (db.a_table.integer_field <= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' less or equal than ', '<=') keywords = 'a_table.integer_field less or equal than 1' q = (db.a_table.integer_field <= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal or less ', '<=') keywords = 'a_table.integer_field equal or less 1' q = (db.a_table.integer_field <= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal or less than ', '<=') keywords = 'a_table.integer_field equal or less than 1' q = (db.a_table.integer_field <= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ('>', '>') keywords = 'a_table.integer_field > 1' q = (db.a_table.integer_field > 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' greater than ', '>') keywords = 'a_table.integer_field greater than 1' q = (db.a_table.integer_field > 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # This one is invalid, maybe we should remove it from smart_query # # ('=>', '>=') # keywords = 'a_table.integer_field => 1' # q = (db.a_table.integer_field >= 1) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q) # ('>=', '>=') keywords = 'a_table.integer_field >= 1' q = (db.a_table.integer_field >= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' greater or equal ', '>=') keywords = 'a_table.integer_field greater or equal 1' q = (db.a_table.integer_field >= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' greater or equal than ', '>=') keywords = 'a_table.integer_field greater or equal than 1' q = (db.a_table.integer_field >= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal or greater ', '>=') keywords = 'a_table.integer_field equal or greater 1' q = (db.a_table.integer_field >= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # (' equal or greater than ', '>=') keywords = 'a_table.integer_field equal or greater than 1' q = (db.a_table.integer_field >= 1) smart_q = smart_query(fields, keywords) self.assertEqual(smart_q, q) # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Belongs and not belongs # NOTE : The below tests don't works # Issue : https://github.com/web2py/pydal/issues/161 # (' in ', 'belongs') -> field.belongs(1, 2, 3) # keywords = 'a_table.integer_field in "1, 2, 3"' # q = (db.a_table.integer_field.belongs([1, 2, 3])) # smart_q = smart_query(fields, keywords) # self.assertEqual(smart_q, q) # keywords = 'a_table.id in "1, 2, 3"' # q = (db.a_table.id.belongs([1, 2, 3])) # smart_q = smart_query(fields, keywords) # self.assertEqual(smart_q, q) # # # (' not in ' , 'notbelongs'), # keywords = 'a_table.integer_field not in "1, 2, 3"' # q = (~db.a_table.id.belongs([1, 2, 3])) # smart_q = smart_query(fields, keywords) # self.assertTrue(smart_q == q) # ----------------------------------------------------------------------------- # cleanup table drop(db.a_table)
def preloop(self): db = DAL('sqlite://grades.db') db.define_table( 'students', Field('fname', notnull=True), Field('lname', notnull=True), Field('dob', 'date', notnull=True), Field('sex_is_f', 'boolean', notnull=True), ) db.define_table( 'courses', Field('course_id', notnull=True), Field('category', notnull=True), Field('description', notnull=True), Field('professor', notnull=True), Field('classroom', notnull=True), Field('long_descr', 'text'), Field('start_date', 'date', notnull=True), Field('end_date', 'date', notnull=True), ) db.define_table( 'students_to_courses', Field('student', 'reference students', notnull=True), Field('course', 'reference courses', notnull=True), ) db.define_table( 'assignments', Field('course', 'reference courses', notnull=True), Field('description', notnull=True), Field('max_points', 'integer', notnull=True), Field('due_date', 'date', notnull=True), ) db.define_table( 'grades', Field('student', 'reference students', notnull=True), Field('assignment', 'reference assignments', notnull=True), Field('grade', 'integer'), ) self.db = db
def table(db): db.define_table("user", Field("name", "string"), Field("email", "string"), Field("password", "password")) db.define_table("genero", Field("nome", "string")) db.define_table( "musica", Field("nome", "string"), Field("cantor", "string"), Field("album", "string"), Field("arquivo", "string"), Field("tempo", "string"), Field("genero", "list:reference genero"), ) db.define_table( "preferidas", Field("musica", "reference musica"), Field("user", "reference user"), ) db.define_table( "tocada", Field("tocadaem", "datetime", default=datetime.datetime.now()), Field("musica", "reference musica"), Field("user", "reference user"))
# -*- coding: UTF-8 -*- """:mod:`db.py` manages the database. """ from pydal import DAL, Field import os os.mkdir('db') if not os.path.isdir('db') else None # DB file definition folder = os.getcwd() + ('/db' if os.name != 'nt' else '\db') db = DAL('sqlite://storage.sqlite', folder=folder, migrate_enabled=True) db.define_table('investments', Field('date', 'datetime'), Field('name', 'string'), Field('vol', 'json'), Field('cmr', 'json'), Field('cmp', 'json'), Field('prc', 'json')) db.define_table('portfolios', Field('date', 'datetime'), Field('name', 'string'), Field('vol', 'json'), Field('cmr', 'json'), Field('cmp', 'json'), Field('prc', 'json')) db.define_table('symbols', Field('ticker', 'string'), Field('name', 'string')) db.define_table('checks', Field('name', 'string'), Field('vol', 'json'), Field('cmr', 'json'), Field('cmp', 'json'), Field('date', 'string'))
V 2.1.1 ''' #from pydal import DAL and Field from pydal import DAL, Field #Root user db = DAL('mysql://*****:*****@localhost/HOME_storage') #use root#msql://user:password@localhost/database #HARDWHERE_home tables #---------Cloud_IP addres #stores cloud IP db.define_table('IP', Field('NevIP', 'string'), Field('CloudIP', 'string')) #----------alarm #motion sensors = 1/0 #panic button = 1/0 db.define_table('Alarm', Field('AlarmNAME', 'string'), Field('AlarmCND', 'integer'), Field('AlarmOOS', 'integer'), Field('AlarmSR', 'integer')) #-----------Lights #state from payload = 1/0 #feedback from switches = on/off db.define_table('Lights', Field('Name', 'string'), Field('onCode', 'string'), Field('offCode', 'string'), Field('state', 'string')) #--------Irrigation
def get_fields(columns: list) -> List[Field]: """Get list of pydal field objects""" logging.info( _LOG_PREFIX_, f"Get table fields {columns}" ) return [Field(i) for i in columns]
from pydal import DAL, Field db = DAL("mysql://*****:*****@localhost/sakila?set_encoding=utf8mb4", fake_migrate_all=True) db.define_table('film', Field('film_id', type='id'), Field('title'), Field('description'), Field('release_year'), Field('language_id'), Field('original_language_id'), Field('rental_duration'), Field('rental_rate'), Field('length'), Field('replacement_cost'), Field('rating'), Field('special_features'), Field('last_update') ) def select_star(): film_rows = db(db.film.film_id > 0).select() return film_rows def specific_select(): film_rows = db(db.film.film_id > 0).select(db.film.title, db.film.description) return film_rows if __name__ == '__main__':
def __init__(self, projectConfig, DALDatabase, i18nTranslator=None, logger_api=None): self.DALDatabase = DALDatabase self.logger_api = logger_api self.DALDatabase.define_table( 'client', Field('token', 'text'), Field('auth_user', 'reference auth_user', requires=IS_EMPTY_OR( IS_IN_DB(self.DALDatabase, self.DALDatabase.auth_user))), Field('date_created', 'datetime', default=datetime.now(), requires=IS_EMPTY_OR(IS_DATETIME())), Field('last_resign', 'datetime', default=datetime.now()), Field('remember_me', 'boolean', default=False), Field('locked', 'boolean', default=False)) self.DALDatabase.define_table( 'captcha', Field('token', 'text'), Field('form_identify', 'string'), Field('user_agent', 'string'), Field('ip', 'string'), Field('date_created', 'datetime', default=datetime.now(), requires=IS_DATETIME()), Field('client', 'reference client', requires=IS_EMPTY_OR( IS_IN_DB(self.DALDatabase, self.DALDatabase.client)))) self.DALDatabase.define_table( 'google_captcha', Field('token', 'text'), Field('form_identify', 'string'), Field('user_agent', 'string'), Field('ip', 'string'), Field('date_created', 'datetime', default=datetime.now(), requires=IS_DATETIME()), Field('client', 'reference client', requires=IS_EMPTY_OR( IS_IN_DB(self.DALDatabase, self.DALDatabase.client)))) self.DALDatabase.define_table( 'csrf', Field('token', 'text'), Field('form_identify', 'string'), Field('user_agent', 'string'), Field('ip', 'string'), Field('date_created', 'datetime', default=datetime.now(), requires=IS_DATETIME()), Field('client', 'reference client', requires=IS_EMPTY_OR( IS_IN_DB(self.DALDatabase, self.DALDatabase.client))))
def make_job(db): db.define_table('jobs', Field('repository', type='string', length=255), Field('ssh_url', type='string', length=512), Field('clone_url', type='string', length=512), Field('commit_id', type='string', length=255), Field('commit_msg', type='text'), Field('commit_url', type='string', length=512), Field('author', type='string', length=255), Field('committer', type='string', length=255), Field('datetime', type='datetime'), Field('logs', type='text'), Field('status', type='integer', default=JobStatus.PENDING))
## 'date','time','datetime','blob','upload', 'reference TABLENAME' ## There is an implicit 'id integer autoincrement' field ## Consult manual for more options, validators, etc. ## ## More API examples for controllers: ## ## >>> db.mytable.insert(myfield='value') ## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL) ## >>> for row in rows: print row.id, row.myfield ######################################################################### ## after defining tables, uncomment below to enable auditing # auth.enable_record_versioning(db) db.define_table('nets',\ Field('name', 'string', unique=True),\ Field('spacegroup', 'string'),\ Field('spacegroup_number', 'integer'),\ Field('cella','double'),\ Field('cellb','double'),\ Field('cellc','double'),\ Field('cellalpha','double'),\ Field('cellbeta','double'),\ Field('cellgamma','double'),\ Field('p', 'integer'),\ Field('q', 'integer'),\ Field('r', 'integer'),\ Field('s', 'integer'),\ Field('natoms', 'integer'),\ Field('thumb', 'string',default='default_thumb'),\ Field('xyzfile', 'upload', uploadfolder=path2files+'/static/nets/files'),\
def define_tables(uri, migrate_enabled=False, fake_migrate_all=False): db = DAL(uri, migrate_enabled=migrate_enabled, fake_migrate_all=fake_migrate_all, adapter_args=dict(migrator=InDBMigrator), pool_size=10) old_date = datetime.datetime(year=1980, month=1, day=1) db.define_table( 'page', Field('page_id', 'integer'), Field('page_title'), # We want to analyze this page only from this date onwards. If None, then # analyze from the beginning. Field('analysis_start_time', 'datetime'), ) db.define_table( 'revision', Field('rev_id', 'integer'), # In which block it is. Field('block_id', 'reference revision_block'), # user id on the wikipedia for this user. Field('user_id', 'integer'), Field('rev_date', 'datetime'), Field('rev_page', 'reference page'), # ID of a blob in GCS or S3 where the text can be found. Field('rev_text'), # ID of a blob in GCS or S3 where the reputation-annotated text can be found. Field('annotated_text'), # Date at which the annotation has been computed, used to estimate whether # it should be recomputed. Field('annotation_date', 'datetime'), ) db.define_table( 'user', # The user_id is the same as on the wikipedia. Field('user_id', 'integer'), Field('user_name'), Field('user_real_name'), # Value of reputation Field('reputation', 'double'), ) db.define_table( 'revision_block', Field('page_id', 'reference page'), # These fields are used to know in which order to stitch blocks # together, and also, if there are holes between them. # Id of the last revision before the block Field('prev_revision_id', 'integer'), # long?? Field('next_revision_id', 'integer'), # To put blocks in chronological order by page. Field('initial_timestamp', 'datetime'), Field('storage_id'), # ID for S3 / GCS ) db.define_table( 'user_reputation', Field('user_id', 'reference user'), Field('block_id', 'reference revision_block'), Field('amount', 'double'), )
from pydal import DAL, Field db = DAL("mysql://*****:*****@localhost/sakila?set_encoding=utf8mb4", fake_migrate_all=True) db.define_table('film', Field('title'), Field('description')) films = [ "BENEATH RUSH", "CADDYSHACK JEDI", "REUNION WITCHES", "VOLCANO TEXAS", "SWARM GOLD" ] def query_in_a_loop(): rows = [] for film in films: film_row = db(db.film.title == film).select( db.film.title, db.film.description).first() rows.append(film_row) return rows def query_belongs(): film_rows = db(db.film.title.belongs(films)).select( db.film.title, db.film.description).first() return film_rows if __name__ == '__main__': import timeit print(
from pydal import DAL, Field from datetime import datetime db = DAL('sqlite://download.db') market = db.define_table( 'market', Field('name'), Field('ask', type='double'), Field('timestamp', type='datetime', default=datetime.now)) db.executesql('CREATE INDEX IF NOT EXISTS tidx ON market (timestamp);') db.executesql('CREATE INDEX IF NOT EXISTS m_n_idx ON market (name);') buy = db.define_table( 'buy', Field('market'), Field('purchase_price', type='double'), Field('selling_price', type='double'), Field('amount', type='double'), ) db.executesql('CREATE INDEX IF NOT EXISTS sidx ON buy (selling_price);') picks = db.define_table( 'picks', Field('market'), Field('old_price', type='double'), Field('new_price', type='double'), Field('gain', type='double'), Field('timestamp', type='datetime', default=datetime.now))
from pydal import DAL, Field from os import path from datetime import date from string import punctuation db = DAL('sqlite://storage.db', folder=path.join('../database')) db.define_table('words', Field('word', 'string'), Field('freq', 'integer'), Field('dates', 'list:string')) try: db.define_table('trenddates', Field('trend', 'text'), Field('date', 'date')) except: print("trenddates already exists.") def splitByWord(trend): if trend[0] == '#': trend = trend[1:] t = [trend.lower()] foundDay = [] daysOfWeek = ['mon', 'tues', 'wednes', 'thurs', 'fri', 'satur', 'sun'] for day in daysOfWeek: if day + 'day' in trend: t = trend.split(day + 'day') foundDay.append(day + 'day') if len(foundDay) > 0: for i in range(len(t)): word = t[i] if len(word) == 0: t[i] = foundDay[0] foundDay.pop(0)
def define_table(db): db.define_table("auth_membership", Field('group_id', "reference user_group"), Field('user_id', "reference user"))
#Reviewed by AJV 01/23/2018 """Define the relational database schema. Todo: * `db`, `market` and `buy` are invalid constant names. However capitalizing them requires a lot of corrections throughout the code. """ # Core from datetime import datetime # 3rd Party from pydal import DAL, Field db = DAL('sqlite://storage.sqlite3') market = db.define_table( 'market', Field('name'), Field('ask', type='double'), Field('timestamp', type='datetime', default=datetime.now)) db.executesql('CREATE INDEX IF NOT EXISTS tidx ON market (timestamp);') db.executesql('CREATE INDEX IF NOT EXISTS m_n_idx ON market (name);') buy = db.define_table( 'buy', Field('order_id'), Field('config_file'), Field('market'), Field('purchase_price', type='double'), Field('selling_price', type='double'), Field('sell_id'), Field('amount', type='double'), Field('timestamp', type='datetime', default=datetime.now)) db.executesql('CREATE INDEX IF NOT EXISTS sidx ON buy (selling_price);')
from pydal import DAL, Field db = DAL("mysql://*****:*****@localhost/sakila?set_encoding=utf8mb4", fake_migrate_all=True) db.define_table('film', Field('title'), Field('description'), Field('rental_rate')) films = [ "BENEATH RUSH", "CADDYSHACK JEDI", "REUNION WITCHES", "VOLCANO TEXAS", "SWARM GOLD" ] def query_leaving_relational_space(): sub_query = db(db.film.rental_rate < 1.0)._select( db.film.rental_rate, groupby=db.film.rental_rate, orderby=db.film.rental_rate, ) film_rows = db(db.film.rental_rate.belongs(sub_query)).select( db.film.title, db.film.description, db.film.rental_rate, orderby=db.film.rental_rate) return film_rows def query_in_relational_space(): sub_query = db(db.film.rental_rate < 1.0)._select(db.film.rental_rate, )
from pydal import DAL, Field dbConnect = DAL('mysql://*****:*****@localhost/sampleConnect') try: #The syntax to create order table and insert data in it dbConnect.define_table('order', Field('orderId', required=True), Field('userId', required=True), Field('ngaymua', type='date'), Field('name', type='text'), Field('event_name', type='text'), Field('age', type='integer'), Field('phone', type='text'), Field('email', type='text'), Field('cach_thanh_toan', type='text'), Field('phuong_thuc_nhan', type='text'), Field('quan_huyen', type='text'), primarykey=['orderId']) # The syntax to create order detail table and insert data in it dbConnect.define_table('order_detail', Field('eventId', required=True), Field('userId', required=True), Field('name_event', type='text'), Field('diadiem_tochuc', type='text'), Field('thoigian_tochuc', type='datetime'), Field('quan_huyen_tochuc', type='text'), primarykey=['eventId', 'userId']) # The syntax to create order detail table and insert data in it
def __init__(self, host, user, passw, database, port, tipo_bd): if tipo_bd == "sqlite": self.db = DAL("sqlite://" + database + ".db") elif tipo_bd == "mysql": self.db = DAL("mysql://" + user + ":" + passw + "@" + host + "/" + database + "") elif tipo_bd == "postgres": self.db = DAL("postgres://" + user + ":" + passw + "@" + host + "/" + database + "") elif tipo_bd == "sqlserver": self.db = DAL("mssql4://" + user + ":" + passw + "@" + host + "/" + database + "") elif tipo_bd == "firebird": self.db = DAL("firebird://" + user + ":" + passw + "@" + host + "/" + database + "") elif tipo_bd == "oracle": self.db = DAL("oracle://" + user + ":" + passw + "@" + host + "/" + database + "") elif tipo_bd == "db2": self.db = DAL("db2://" + user + ":" + passw + "@" + database + "") """ Ingres ingres://usuario:contraseña@localhost/nombrebd Sybase sybase://usuario:contraseña@localhost/nombrebd Informix informix://usuario:contraseña@nombrebd Teradata teradata://DSN=dsn;UID=usuario;PWD=contraseña;DATABASE=nombrebd Cubrid cubrid://usuario:contraseña@localhost/nombrebd SAPDB sapdb://usuario:contraseña@localhost/nombrebd IMAP imap://user:contraseña@server:port MongoDB mongodb://usuario:contraseña@localhost/nombrebd """ # Vincular a una tabla 1 preexistente. self.db.define_table( "bienes", # Indicarle a pyDAL cuál es la clave principal. Field("id_bienes", type="id"), Field("identificacion", type="integer"), Field("tipo"), Field("serie"), Field("marca"), Field("codigo", type="integer"), Field("nombre"), Field("ficha_formacion", type="integer"), # Desactivar migraciones. migrate=False) # Vincular a una tabla 2 preexistente. self.db.define_table( "controles", # Indicarle a pyDAL cuál es la clave principal. Field("id_control", type="id"), Field("identificacion", type="integer"), Field("fechahe", type="datetime"), Field("fechahs", type="datetime"), Field("control"), # Desactivar migraciones. migrate=False) # Vincular a una tabla 3 preexistente. self.db.define_table( "personal", # Indicarle a pyDAL cuál es la clave principal. Field("id_personal", type="id"), Field("nombre"), Field("identificacion", type="integer"), Field("telefono", type="integer"), Field("correo"), Field("ficha_formacion", type="integer"), Field("rol"), # Desactivar migraciones. migrate=False) # Vincular a una table preexistente. self.db.define_table( "usuario", # Indicarle a pyDAL cuál es la clave principal. Field("id_usuario", type="id"), Field("user"), Field("clave"), Field("rol"), # Desactivar migraciones. migrate=False) """ Tipos de datos
def __new__( cls, db, action, # 'define_table' or 'Table' prefix=None, name=None, auto_pk=None, common_hook=None, ): tname = name or cls.__name__ fields = list() hooks = dict() table_methods = dict( ) # https://github.com/web2py/pydal/blob/232e841765ee97ac6f7af45be794d46432085c4d/pydal/objects.py#L340 kwargs = dict() need_pk = False for name, attr in cls.__dict__.items(): if name.startswith('__'): continue # hooks if name in HOOKS: hooks[name] = attr # Field.Virtual elif isinstance(attr, property): fields.append(DalField.Virtual(name, attr.fget)) # table method elif isinstance(attr, classmethod): table_methods[name] = attr.__func__ # just Field elif isinstance(attr, Field): attr.name = name if not need_pk and auto_pk and name == 'id' and attr.args and attr.args[ 0] != 'id': need_pk = True fields.append(DalField(name, *attr.args, **attr.kwargs)) # Field.Method elif callable(attr): fields.append(DalField.Method(name, attr)) else: kwargs[name] = attr for sign in cls.__bases__: if sign is not Table: fields.append(get_signature(db, sign)) args = [tname] + fields if action == 'Table': args.insert(0, db) action = getattr(db, action) # auto_pk if need_pk and kwargs.get('primarykey') is None: kwargs['primarykey'] = ['id'] # rname prefix if prefix: kwargs['rname'] = f"{prefix}{tname}" # define table tbl = action(*args, **kwargs) or db[tname] # set hooks for hook_name, hook in hooks.items(): tbl[f"_{hook_name}"].append(hook) if common_hook: for hook_name in HOOKS: tbl[f"_{hook_name}"].append( lambda *args, hook_name=hook_name, **kw: common_hook( tbl, hook_name, *args, **kw)) # set table methods for meth_name, meth in table_methods.items(): tbl.add_method.register(meth_name)(meth) return tbl
FILE_PATH = path.join( path.split(path.abspath(path.dirname(__file__)))[0], '.config/db.json') with open(FILE_PATH) as data_file: DATA = json.load(data_file) DB = DAL("postgres://%s:%s@%s:%s/%s" % (DATA['user'], DATA['pass'], DATA['host'], DATA['port'], DATA['db']), pool_size=0, lazy_tables=True) MIGRATE = False # Modelo de las tablas de la base de datos DB.define_table('acciones', Field('id', type='id'), Field('nombre', type='string', length=50), Field('created', type='datetime'), Field('updated', type='datetime'), Field('nombre_canonico', type='string', length=15), migrate=MIGRATE) DB.define_table('alumnos', Field('id', type='id'), Field('facultad', type='string', length=5), Field('lu', type='string', length=10), Field('nombre', type='string', length=70), Field('dni', type='string', length=12), Field('materias', type='string', length=3), migrate=MIGRATE)
def define_table(self): print(self.db._dbname) self.db.define_table('douban_topic', Field('title'), Field('title_url'), Field('people'), Field('people_url'), Field('replay_num'), Field('post_time'))
and info about xtopdf is at: http://slides.com/vasudevram/xtopdf or at: http://slid.es/vasudevram/xtopdf """ # imports from pydal import DAL, Field from PDFWriter import PDFWriter SEP = 60 # create the database db = DAL('sqlite://house_depot.db') # define the table db.define_table('furniture', \ Field('id'), Field('name'), Field('quantity'), Field('unit_price') ) # insert rows into table items = ( \ (1, 'chair', 40, 50), (2, 'table', 10, 300), (3, 'cupboard', 20, 200), (4, 'bed', 30, 400) ) for item in items: db.furniture.insert(id=item[0], name=item[1], quantity=item[2], unit_price=item[3])
import configparser from pydal import DAL, Field config = configparser.ConfigParser() config.read("config.ini") db = DAL( f'postgres://{config["db"]["user"]}:{config["db"]["password"]}@{config["db"]["host"]}/' f'{config["db"]["database"]}', migrate=False) db.define_table('countries', Field('name', length=50, rname='"name"')) db.define_table('cities', Field('country_id', 'reference countries', ondelete='CASCADE'), Field('name', length=50, rname='"name"')) db.define_table('weather_stations', Field('country_id', 'reference countries', ondelete='CASCADE'), Field('number', 'integer'), Field('city_id', 'reference cities', ondelete='CASCADE'), Field('latitude', 'double'), Field('longitude', 'double'), Field('rp5_link', length=255), Field('last_date', 'date'), Field('data_type', length=50)) db.define_table('wind_directions', Field('name', length=50)) db.define_table('cloudiness', Field('description', length=100), Field('scale', 'integer')) db.define_table('cloudiness_cl', Field('description', length=100), Field('scale', 'integer')) # Fields 'date' and 'weather_station_id' must be unique # For postgresql: CREATE UNIQUE INDEX weather_station_and_datetime_unq ON weather (weather_station_id, "date"); db.define_table( 'weather', Field('weather_station_id',