Example #1
0
 def get_db(self):
     """
         Return the connected db
     """
     if not os.path.exists(self.get_option('dbmetadata')):
         os.makedirs(self.get_option('dbmetadata'))
     db=DAL(self.get_option('database'),lazy_tables=True,folder=self.get_option("dbmetadata"))
     db.define_table('storages',
                 Field('storagename','string'),
                 Field('creation_ts','datetime',
                       default=datetime.datetime.now()),
                 Field('modified_ts','datetime',
                       default=datetime.datetime.now(),
                       update=datetime.datetime.now()),
                 )
     db.define_table('files',
                 Field('storages_id',db.storages),
                 Field('parent_id','reference files'),
                 Field('filename','string'),
                 Field('description','string'),
                 Field('mime','string'),
                 Field('ftype','string'),
                 Field('mode','integer'),
                 Field('inode','integer'),
                 Field('dev','string'),
                 Field('nlink','integer'),
                 Field('uid','integer'),
                 Field('gid','integer'),
                 Field('size','double'),
                 Field('ctime','datetime'),
                 Field('mtime','datetime'),
                 Field('atime','datetime'),
                 )
     return db
Example #2
0
    def copyDB(self):
        other_db = DAL("%s://%s" % (
            self.targetdbType, self.targetdbName), folder=self.targetFolder)

        print 'creating tables...'

        for table in self.db:
            other_db.define_table(
                table._tablename, *[field for field in table])
            '''
            should there be an option to truncAte target DB?
            if yes, then change args to allow for choice
            and set self.trancate to the art value

            if self.truncate==True:
                other_db[table._tablename].truncate()
            '''

        print 'exporting data...'
        self.db.export_to_csv_file(open('tmp.sql', 'wb'))

        print 'importing data...'
        other_db.import_from_csv_file(open('tmp.sql', 'rb'))
        other_db.commit()
        print 'done!'
        print 'Attention: do not run this program again or you end up with duplicate records'
Example #3
0
    def copyDB(self):
        other_db = DAL("{0}://{1}".format(self.targetdbType,
                                          self.targetdbName),
                       folder=self.targetFolder)

        print 'creating tables...'

        for table in self.db:
            other_db.define_table(table._tablename,
                                  *[field for field in table])
            '''
            should there be an option to truncAte target DB?
            if yes, then change args to allow for choice
            and set self.trancate to the art value

            if self.truncate==True:
                other_db[table._tablename].truncate()
            '''

        print 'exporting data...'
        self.db.export_to_csv_file(open('tmp.sql', 'wb'))

        print 'importing data...'
        other_db.import_from_csv_file(open('tmp.sql', 'rb'))
        other_db.commit()
        print 'done!'
        print 'Attention: do not run this program again or you end up with duplicate records'
Example #4
0
	def GetDocMeta( self, params = None ):
		if params is None:
			params = self.params
		searchText = params["searchText"]
		searchLimit = params["searchLimit"]
		searchOffset = params["searchOffset"]
		
		print "web2py"
		start = time.clock()
		
		# get data using web2py application layer
		db = DAL('sqlite://doc-meta.sqlite', migrate_enabled=False)
		db.define_table('DocMeta', Field('Key'), Field('DocContent'), Field('DocID'), primarykey=['Key'])
		
		# build query using params
		q = db.DocMeta.DocContent.like('%' + searchText + '%')
		matches = db(q).count();
		limitb = searchOffset + searchLimit
		
		count = 0
		pyResults = {}
		queryResults = db(q).select(orderby=db.DocMeta.DocID, orderby_on_limitby = False, limitby=(searchOffset, limitb));
		for row in queryResults:
		    count += 1
		    pyResults[row.DocID] = dict(row)
		
		elapsed = (time.clock() -start)
		print "elapsed = ", elapsed
		        
		        
		print "exhaustive"
		start = time.clock()
		# get data using exhaustive search of JSON
		filename = os.path.join( self.request.folder, 'data/corpus', 'doc-meta.json' )
		with open( filename ) as f:
			content = json.load( f, encoding = 'utf-8' )
			results = {}
			matchCount = 0
			discardCount = 0
			keys = sorted(content.keys())
			for index in range(len(keys)):
			    obj = content[keys[index]]
			    docContent = obj["DocContent"]
			    if searchText in docContent:
			        matchCount += 1
			        if len(results.keys()) < searchLimit and discardCount >= searchOffset:
			            results[obj["DocID"]] = obj
			        elif discardCount < searchOffset:
			            discardCount += 1
		elapsed = (time.clock() - start)
		print "elapsed = ", elapsed
		
		return {
			"Documents" : pyResults,
			"docCount" : len(pyResults),
			"docMaxCount" : matches
		}
Example #5
0
def append():
    db=DAL('sqlite://storage.sqlite', folder='/home/www-data/web2py/applications/tgmonitor/databases')
    db.define_table('tg_load',
        Field('check_date','datetime'),
        Field('tg_number','integer', notnull=True),
        Field('busy', 'integer'),
        Field('installed', 'integer')
        )

    db.tg_load.insert(check_date='',tg_number=2, busy=45, installed=60)
    db.commit()
Example #6
0
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            import shutil
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder)

        # Copy Table structure
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [
                    table[field] for field in table.fields
                    if field != "the_geom"
                ]
                db_bak.define_table(tablename, *fields)
            else:
                db_bak.define_table(tablename, db[tablename])

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        filename = "%s/data.csv" % folder
        file = open(filename, "w")
        db.export_to_csv_file(file)
        file.close()
        file = open(filename, "r")
        db_bak.import_from_csv_file(file, unique="uuid2")  # designed to fail
        file.close()
        db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak
Example #7
0
def adding_new_fields(new_unique_field,changed_table):
    """
    This function adds a new_uniwue_field into the changed_table , while keeping all the rest of 
    the properties of the table ubchanged
    """
    database_string = "sqlite://storage.db"
    old_database_folder = "%s/applications/%s/databases" % (WEB2PY_PATH, APP)
    temp_db = DAL( database_string, folder = old_database_folder, migrate_enabled=True ,migrate = True)
    new_field = Field(new_unique_field,"integer")
    try:
        changed_table_primary_key = db[changed_table]._primarykey
    except KeyError:
        changed_table_primary_key = None
    temp_db.define_table(changed_table ,db[changed_table],new_field,primarykey = changed_table_primary_key)
    return temp_db
Example #8
0
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            import shutil
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder)

        # Copy Table structure
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [table[field] for field in table.fields if field != "the_geom"]
                db_bak.define_table(tablename, *fields)
            else:
                db_bak.define_table(tablename, db[tablename])

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        filename = "%s/data.csv" % folder
        file = open(filename, "w")
        db.export_to_csv_file(file)
        file.close()
        file = open(filename, "r")
        db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail
        file.close()
        db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak
Example #9
0
def adding_renamed_fields(table_name,old_field_name,new_field_name,attributes_to_copy):
    """
    This function is used add a field in table mentioned while 
    renaming a field . The renamed field is added separately to the table with the 
    same properties as the original field.     
    """
    database_string = "sqlite://storage.db"
    old_database_folder = "%s/applications/%s/databases" % (WEB2PY_PATH, APP)
    temp_db = DAL( database_string, folder = old_database_folder, migrate_enabled=True ,migrate = True)
    new_field = Field(new_field_name)
    try:
        table_primary_key = db[table_name]._primarykey
    except KeyError:
        table_primary_key = None
    for attribute in attributes_to_copy:
        exec_str = "new_field.%(attribute)s = db[table_name][old_field_name].%(attribute)s" % {"attribute":attribute}
        exec exec_str in globals() , locals()
    temp_db.define_table(table_name ,db[table_name],new_field,primarykey = table_primary_key)
    return temp_db
Example #10
0
class Config(object):
    """Build and read config on GAE or config.cfg file"""
    def __init__(self, autogetconfig=True):
        from gluon import current
        self.request = current.request
        self.ongae = self.request.env.web2py_runtime_gae
        self.T = current.T
        self.cache = current.cache
        from gluon import DAL
        if self.ongae:
            self._db = DAL("google:datastore")
        else:
            self._db = DAL("sqlite://config_movuca.sqlite")

        self.define_tables()
        if autogetconfig:
            self.get_config()

    def define_tables(self):
        from datamodel.setup import Setup
        setup = Setup()
        self.tables = filter(lambda item: not item.startswith("_"), dir(setup))
        for table in self.tables:
            entity = self._db.define_table(table,
                *setup.__getattribute__(table),
                **dict(migrate="config_%s.table" % table)
            )
            self.__setattr__(table, entity)

    def set_default_config(self):
        now = self.request.now
        for table in self.tables:
            self.__getattribute__(table).insert(setuptime=now)
            self._db.commit()

    def get_config(self, expire=300):
        for table in self.tables:
            result = self._db(self._db[table]).select(cache=(self.cache.ram, expire)).last()
            self.__setattr__(table.split("_")[0],
                             result)

    def get_list(self, table, option):
        options = self.__getattribute__(table)[option]
        assert isinstance(options, list)
        if not "mailhide" in option:
            return [(option.split(":")[0], str(self.T(option.split(":")[1]))) for option in options]
        else:
            return [(option.split(":")[0], str(option.split(":")[1])) for option in options]
Example #11
0
def get_ticket_storage(app):
    private_folder = apath('%s/private' % app, r=request)
    db_string = open(os.path.join(private_folder, 'ticket_storage.txt')).read().replace('\r','').replace('\n','').strip()
    tickets_table = 'web2py_ticket'
    tablename = tickets_table + '_' + app
    db_path = apath('%s/databases' % app, r=request)
    from gluon import DAL
    ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
    if not ticketsdb.get(tablename):
        table = ticketsdb.define_table(
                tablename,
                Field('ticket_id', length=100),
                Field('ticket_data', 'text'),
                Field('created_datetime', 'datetime'),
                )
    return ticketsdb , ticketsdb.get(tablename)
Example #12
0
def get_ticket_storage(app):
    private_folder = apath('%s/private' % app, r=request)
    db_string = open(os.path.join(private_folder, 'ticket_storage.txt')).read().replace('\r','').replace('\n','').strip()
    tickets_table = 'web2py_ticket'
    tablename = tickets_table + '_' + app
    db_path = apath('%s/databases' % app, r=request)
    from gluon import DAL
    ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
    if not ticketsdb.get(tablename):
        table = ticketsdb.define_table(
                tablename,
                Field('ticket_id', length=100),
                Field('ticket_data', 'text'),
                Field('created_datetime', 'datetime'),
                )
    return ticketsdb , ticketsdb.get(tablename)
Example #13
0
def get_ticket_storage(app):
    private_folder = apath("%s/private" % app, r=request)
    db_string = (
        open(os.path.join(private_folder, "ticket_storage.txt")).read().replace("\r", "").replace("\n", "").strip()
    )
    tickets_table = "web2py_ticket"
    tablename = tickets_table + "_" + app
    db_path = apath("%s/databases" % app, r=request)
    from gluon import DAL

    ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
    if not ticketsdb.get(tablename):
        table = ticketsdb.define_table(
            tablename,
            Field("ticket_id", length=100),
            Field("ticket_data", "text"),
            Field("created_datetime", "datetime"),
        )
    return ticketsdb, ticketsdb.get(tablename)
Example #14
0
class Application(Tkinter.Frame):
    def __init__(self, master=None):
        Tkinter.Frame.__init__(self, master)
        self.grid()

        self.db = DAL("sqlite://teste.sqlite")
        ne = IS_NOT_EMPTY()
        self.table = self.db.define_table("produto",
            Field("nome", requires=ne),
            Field("marca", requires=ne),
            Field("peso", "double", requires=ne),
            Field("valor", "double", requires=ne),
            Field("quantidade", "integer", requires=ne),
        )

        self.create_form()
        self.show_result()

    def create_form(self):
        self.form = Tkinter.LabelFrame(self, text="Cadastro", padx="30", pady="15")
        self.form.pack(side=Tkinter.TOP)
        self.vars = {}
        # self.nome_label = Tkinter.Label(self.form, text="nome do produto")
        # self.nome_label.pack()
        # self.nome = Tkinter.Entry(self.form)
        # self.nome.pack()
        self.SQLTKFORM(self.table)

    def show_result(self):
        self.result = Tkinter.LabelFrame(self, text="Results", padx="30", pady="15")
        self.result.pack(side=Tkinter.BOTTOM)
        # query db for records
        records = self.db(self.table).select()
        if records:
            self.records = Tkinter.Text(self.result)
            lines = [",".join(self.table.fields), "_" * 80]
            for record in records:
                lines.append(",".join([str(record[field]) for field in self.table.fields]))
            self.records.insert(1.0, "\n".join(lines))
        else:
            self.records = Tkinter.Label(self.result, text="Nothing found")
        self.records.pack()

    def SQLTKFORM(self, table, hidden="id"):
        fields = table.fields
        for field in fields:
            if field not in hidden:
                self.create_input(field, table[field].label)

        self.save_button = Tkinter.Button(self.form,
                                          text="Salvar",
                                          command=self.save_to_db).pack()

    def save_to_db(self):
        try:
            values = [(name, value()) for name, value in self.vars.items()]
            record = self.table.validate_and_insert(**dict(values))
            if record.errors:
                self.db.rollback()
                for error, error_message in record.errors.items():
                    label_text = getattr(self, error + "_label_text")
                    actual_text = label_text.get()
                    if not ":" in actual_text:
                        label_text.set(actual_text + ": " + error_message)
                        getattr(self, error).configure(bg="red")
                return
        except Exception:
            raise Exception("something wrong....")
        else:
            self.db.commit()
            self.reset_form()

    def reset_form(self):
        self.form.destroy()
        self.create_form()
        self.result.destroy()
        self.show_result()

    def create_input(self, name, text):
        # LABEL TEXT IS VARIABLE
        setattr(self, name + "_label_text", Tkinter.StringVar(self.form))
        label_text = getattr(self, name + "_label_text")
        label_text.set(text)
        # LABEL
        setattr(self, name + "_label", Tkinter.Label(self.form, textvariable=label_text))
        # INPUT
        setattr(self, name, Tkinter.Entry(self.form))
        # PACK
        getattr(self, name + "_label").pack()
        getattr(self, name).pack()
        # VARS
        self.vars[name] = getattr(self, name).get
Example #15
0
# coding: utf8
from gluon import *
from gluon.tools import Auth
from gluon import request, DAL, response, session

#from plugin_lazy_options_widget import lazy_options_widget
from plugin_suggest_widget import suggest_widget
from cascadedrop import CascadingSelect
from gluon.contrib.populate import populate

db = DAL('sqlite://storage.sqlite',pool_size=1,check_reserved=['all'])
auth = Auth(db)

db.define_table('school',
                Field('name', requires=IS_NOT_EMPTY()),
                auth.signature,
                format='%(name)s'
                )


db.define_table('department',
                Field('school', db.school), #, requires=IS_IN_DB(db, db.school.id, '%(name)s')),
                Field('name', requires=IS_NOT_EMPTY()),
                Field('HOD', 'reference auth_user'),
                auth.signature,
                format='%(name)s'
                )
db.define_table('program',
                Field('department', 'reference department'),
                Field('classification', requires=IS_IN_SET(['ND', 'HND', 'PRE-ND', 'PGD','NON-ND'])),
                Field('the_option', label='Option'),
Example #16
0
import sys, os, telnetlib, time, datetime


def get_circuits(s='',find_par=''):
    pos = s.find(find_par)+35
    return s[pos:pos+6].strip()



sys.path.append('/home/www-data/web2py')
from gluon import DAL, Field
db=DAL('sqlite://storage.sqlite', folder='/home/www-data/web2py/applications/tgmonitor/databases')
db.define_table('tg_load',
    Field('check_date','datetime'),
    Field('tg_number',length=17),
    Field('busy', 'integer'),
    Field('installed', 'integer')
    )


host = '10.200.66.70'
port = '6000'
tn = telnetlib.Telnet(host,port)
tn.write('LGI:op="monitor",PWD ="dspoftk",SER="10.100.100.104---O&M System";')
ans = tn.read_until('END')


tn.write('DSP OFTK: LT=TG, TG=44, DT=AT;')
ans = tn.read_until('END')

Example #17
0
#01.当py文件中中文时一定要使用上面的一个编码注释
from gluon import DAL, Field
import pyodbc
#02.设置字符串缺省的编码,如果不这样的话str无法使用
import sys
import json
from datetime import datetime

reload(sys)
sys.setdefaultencoding('utf-8')
str_db = u'mssql4://sa:1@localhost/BIDDING'
#03.连接需要用utf8字符集,这样返回的中文结果可直接解码
db = DAL(str_db,migrate_enabled=False)
print db._uri
print db._dbname
db.define_table('BankRecord',Field('Account'),Field('CompanyName'),Field('CreateDate'),Field('Money'),Field('Note'),Field('TradingTime'))
db.define_table('Customer',Field('Address1'),Field('Address2'),Field('CompanyName'),Field('CompanyPhone'),Field('ContactName'),Field('CreationDate'),Field('Email'),Field('EmployeeId'),Field('Fax'),Field('IsDelete'),Field('MobilePhone'),Field('Note'),Field('PassWord'),Field('Position'),Field('Type'),Field('UserName'),Field('ZipCode'))
db.define_table('Employee',Field('Address'),Field('Age'),Field('Code'),Field('CompanyPhone'),Field('CreationDate'),Field('DateOfBirth'),Field('Department'),Field('Email'),Field('EmergencyContactName'),Field('EmergencyContactPhone'),Field('EmergencyContactRelationship'),Field('HomePhone'),Field('IsDelete'),Field('MobilePhone'),Field('Name'),Field('Note'),Field('PassWord'),Field('Position'),Field('SexId'),Field('Type'),Field('UserName'))
db.define_table('Finance',Field('Activity'),Field('CreationDate'),Field('EmployeeId'),Field('Income'),Field('IsDelete'),Field('Note'),Field('ProjectCodeId'),Field('ProtocolCodeId'),Field('Spending'),Field('TargetId'),Field('TitleId'))
db.define_table('Log',Field('Agent'),Field('CreationDate'),Field('Ip'),Field('Kind'),Field('Note'),Field('UserId'))
db.define_table('Management',Field('Code'),Field('CreationDate'),Field('IsDelete'),Field('Name'))
db.define_table('MoneyType',Field('CreationTime'),Field('IsDelete'),Field('Name'))
db.define_table('ProjectCode',Field('CreationTime'),Field('EmployeeId'),Field('IsDelete'),Field('Option1'),Field('Option2'),Field('Option3'),Field('ProjectNumber'),Field('ProjectTypeId'),Field('ProtocolId'))
db.define_table('ProjectResource',Field('CreationTime'),Field('IsDelete'),Field('Name'))
db.define_table('Project',Field('Assistant'),Field('BuyerId'),Field('ChargeRate'),Field('CreationDate'),Field('EmployeeId'),Field('EntrustMoney'),Field('IsDelete'),Field('MakeOutDate'),Field('ManagementStyleId'),Field('Note'),Field('Package'),Field('ProjectCodeId'),Field('ProjectName'),Field('ProjectSourceId'),Field('ProjectTypeId'),Field('ProtocolCodeId'),Field('SigningDate'),Field('SourcesOfFundingId'),Field('StateId'),Field('WinningCompany'),Field('WinningMoney'))
db.define_table('ProjectStatus',Field('CreationTime'),Field('IsDelete'),Field('Name'))
db.define_table('ProtocolCode',Field('CreationTime'),Field('EmployeeId'),Field('IsDelete'),Field('ProtocolNumber'),Field('TypeId'))
db.define_table('ProtocolCodeType',Field('TypeCode'),Field('TypeId'),Field('TypeName'))
db.define_table('Suggest',Field('Content'),Field('CreationTime'),Field('IsDelete'),Field('UserId'))
db.define_table('Task',Field('CreationDate'),Field('Deadline'),Field('EmployeeId'),Field('IsDelete'),Field('Note'),Field('PlaceId'),Field('ProjectId'),Field('StateId'),Field('TitleId'))
db.define_table('TaskLocation',Field('CreationTime'),Field('IsDelete'),Field('Name'))
Example #18
0
File: uodb.py Project: BrenCam/uodb
#db = DAL('sqlite://storage.sqlite', folder='../databases')
db = DAL('sqlite://storage.sqlite')

migrate=True

db.define_table('patient',
    #Field('mrn', type='string', length=10, required=True, unique=True, label=T('Medical Rec #')),
    Field('mrn', type='string', length=10, required=True, unique=True),
    Field('date_of_birth', type='datetime'),
    Field('fname', type='string', length=25),
    Field('mname', type='string', length=10),
    Field('lname', type='string', length=25),
    Field('ethnicity', type='string', length=8),
    Field('primary_tx', type='string', length=8),
    Field('marital_status', type='string', length=8),
    Field('review_status', type='string', length=8),
    Field('review_date', type='datetime'),
    Field('review_uid', type='string', length=12),
    Field('memo', type='string', length=255),
    Field('create_date', type='datetime'),
    Field('create_uid', type='string', length=12),
    Field('modify_date', type='datetime'),
    Field('modify_uid', type='string', length=12),
    migrate=migrate)

#
# Family History for patient -  references patient tbl
# define parent reference here
#
db.define_table('fhist',
Example #19
0
    msg['To'] = send_email
    me = 'Emilius<*****@*****.**>'
    to = '*****@*****.**'
    mailSrv = smtplib.SMTP("mail.ru", 25)
    mailSrv.ehlo()
    mailSrv.login('*****@*****.**', 'passsswordd')
    mailSrv.sendmail(me, to, msg.as_string())
    mailSrv.close()


sys.path.append('/home/www-data/web2py')
from gluon import DAL, Field
db = DAL('sqlite://storage.sqlite',
         folder='/home/www-data/web2py/applications/tgmonitor/databases')
db.define_table('tg_load', Field('check_date', 'datetime'),
                Field('tg_number', length=17), Field('busy', 'integer'),
                Field('installed', 'integer'))

db.define_table(
    'tg_list',
    Field('tg_group', length=12, default='group1'),
    Field('tg_number', length=17, notnull=True),
    Field('tg_descr', length=17),
    Field('tg_type', length=3, default='sip'),
    Field('color', length=16, default='black'),
    Field('alarm', 'integer', default=90),
    Field('alarm_email', length=20),
    Field('alarm_call_number', length=32),
)

h = height - 20
Example #20
0
old_database_folder = "%s/applications/%s/databases" % (WEB2PY_PATH, APP)
temp_db = DAL( database_string, folder = old_database_folder, migrate_enabled=True ,migrate = True)

#MIGRATION SCRIPT

list_of_fields = []
list_of_fields.append(Field(new_field,"integer"))

list_of_new_table_fields = []
list_of_new_table_fields.append(Field(new_table_field,"integer"))

try:
	db[changed_table]._primarykey
except KeyError:
	db[changed_table]._primarykey = None

temp_db.define_table(changed_table ,db[changed_table],*list_of_fields,primarykey = db[changed_table]._primarykey)
temp_db.define_table(new_table , *list_of_new_table_fields)

temp_db.commit()

#Adding a new field of int type in a class

for old_row in temp_db().select(temp_db[changed_table][old_field]):
    if (len(temp_db(temp_db[new_table][new_table_field] == old_row[old_field]).select()) == 0):
        row = temp_db[new_table].insert(name = old_row[old_field])
        new_id = int(row["id"])
        temp_db(temp_db[changed_table][old_field] == old_row[old_field]).update(type_id = new_id)
    
temp_db.commit()
Example #21
0
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        moves = self.moves
        news = self.news
        strints = self.strints
        strbools = self.strbools
        if not moves and not news and not strbools and not strints:
            # Nothing to backup
            return

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False})

        # Copy Table structure
        skip = []
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [table[field] for field in table.fields if field != "the_geom"]
                try:
                    db_bak.define_table(tablename, *fields)
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
            else:
                try:
                    db_bak.define_table(tablename, db[tablename])
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
        while skip:
            _skip = []
            for tablename in skip:
                if tablename == "gis_location":
                    table = db[tablename]
                    fields = [table[field] for field in table.fields if field != "the_geom"]
                    try:
                        db_bak.define_table(tablename, *fields)
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
                else:
                    try:
                        db_bak.define_table(tablename, db[tablename])
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
            skip = _skip

        # Which tables do we need to backup?
        tables = []
        if moves:
            for tablename in moves:
                tables.append(tablename)
        if news:
            for tablename in news:
                new = news[tablename]
                for t in new["tables"]:
                    tables.append(t)
                for s in new["supers"]:
                    tables.append(s)
                    stable = db[s]
                    rows = db(stable._id > 0).select(stable.instance_type)
                    instance_types = set([r.instance_type for r in rows])
                    for t in instance_types:
                        tables.append(t)
        if strbools:
            for tablename, fieldname in strints:
                tables.append(tablename)
        if strints:
            for tablename, fieldname in strints:
                tables.append(tablename)

        # Remove duplicates
        tables = set(tables)

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        for tablename in tables:
            filename = "%s/%s.csv" % (folder, tablename)
            file = open(filename, "w")
            rows = db(db[tablename].id > 0).select()
            rows.export_to_csv_file(file)
            file.close()
            file = open(filename, "r")
            db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit!
            file.close()
            db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak
Example #22
0
import sys 
sys.path.append('/home/www-data/web2py')
from gluon import DAL, Field

db4=DAL('postgres://*****:*****@localhost:5433/vpak',migrate_enabled=False, pool_size=20, lazy_tables=True, fake_migrate_all=False)

db4.define_table('geocode',
                 Field('zip', 'string'),
                 Field('city', 'string'),
                 Field('state','string'),
                 Field('longitude','string'),
                 Field('latitude','string'))

flat = '88.10'
flon = '47.1'
fradius = 50

rows = db4.executesql("select city,longitude,latitude, radians(cast(latitude as float)),radians({0}), radians({1}) from geocode where city like 'Bolingbrook'".format(flat, flon))

print rows[1]
Example #23
0
# -*- coding: utf-8 -*-

import sys, os, telnetlib, time, datetime


def get_circuits(s='', find_par=''):
    pos = s.find(find_par) + 35
    return s[pos:pos + 6].strip()


sys.path.append('/home/www-data/web2py')
from gluon import DAL, Field
db = DAL('sqlite://storage.sqlite',
         folder='/home/www-data/web2py/applications/tgmonitor/databases')
db.define_table('tg_load', Field('check_date', 'datetime'),
                Field('tg_number', length=17), Field('busy', 'integer'),
                Field('installed', 'integer'))

host = '10.200.66.70'
port = '6000'
tn = telnetlib.Telnet(host, port)
tn.write('LGI:op="monitor",PWD ="dspoftk",SER="10.100.100.104---O&M System";')
ans = tn.read_until('END')

tn.write('DSP OFTK: LT=TG, TG=44, DT=AT;')
ans = tn.read_until('END')

_busy = get_circuits(ans, 'Busy')
_ins_num = get_circuits(ans, 'Installation number')

last = db.tg_load.check_date.max()
Example #24
0
CREATE TABLE Category (
    IdCategory INTEGER        PRIMARY KEY AUTOINCREMENT
                              NOT NULL,
    Name       VARCHAR( 60 )  NOT NULL,
    Url        VARCHAR( 60 )  NOT NULL
                              UNIQUE
);

CREATE INDEX idx_CategoryUrl ON Category (
    Url COLLATE NOCASE ASC
);

"""
db.define_table('category',
                Field('idcategory', type='id'),
                Field('name'),
                Field('url'))



"""
User
Fields: IdUser (integer) PRIMARY KEY AUTO INCREMENT, RealName (string), DateOfBirth (date), UserName (string(120)) UNIQUE INDEX
Computed: Age (integer)

IdUser is set to be hidden from the forms (writable=False, readable=False). Setting default=None allows the database to
generate a unique primary key rather than relying on web2py to generate this.

Age is as the number of years from request.now and the birthdate stored in the record.

              migrate=True)

# Migration Script
list_of_fields = []
list_of_fields.append(Field(new_field, "integer"))

list_of_new_table_fields = []
list_of_new_table_fields.append(Field(new_table_field, "integer"))

try:
    db[changed_table]._primarykey
except KeyError:
    db[changed_table]._primarykey = None

temp_db.define_table(changed_table,
                     db[changed_table],
                     *list_of_fields,
                     primarykey=db[changed_table]._primarykey)
temp_db.define_table(new_table, *list_of_new_table_fields)

temp_db.commit()

# Add a new field of int type in a class
for old_row in temp_db().select(temp_db[changed_table][old_field]):
    if (len(
            temp_db(temp_db[new_table][new_table_field] ==
                    old_row[old_field]).select()) == 0):
        row = temp_db[new_table].insert(name=old_row[old_field])
        new_id = int(row["id"])
        temp_db(
            temp_db[changed_table][old_field] == old_row[old_field]).update(
                type_id=new_id)
Example #26
0
    pass  # directory already created
for img in listdir(repo_img_dir):
    if splitext(img)[1] == '.jpg':
        copy(join(repo_img_dir, img), wa_img_dir)

# Connect to database
db_dir = abspath(join(dirname(__file__), pardir, 'databases'))
try:
    makedirs(db_dir)
except os.error:
    pass
db = DAL('sqlite://../databases/storage.db', folder=db_dir)

# Create images table.
db.define_table('images',
                Field('represent', type='string', length=100, required=True),
                Field('file_name', type='string', length=100, required=True))

# Insert image file names and meanings.
img_pattern = re.compile(r'(\D+?)\d*\.')
for img in listdir(wa_img_dir):
    match = img_pattern.match(img)
    if match is not None:
        db.images.insert(represent=match.group(1).capitalize(), file_name=img)

# Create scores table.
db.define_table('scores', Field('mode', type='string', length=6,
                                required=True),
                Field('difficulty', type='string', length=6, required=True),
                Field('name', type='string', length=100, required=True),
                Field('score', type='integer', required=True),
Example #27
0
    msg['To'] = send_email
    me='Emilius<*****@*****.**>'
    to='*****@*****.**'
    mailSrv=smtplib.SMTP("mail.ru",25)
    mailSrv.ehlo()
    mailSrv.login('*****@*****.**','passsswordd')      
    mailSrv.sendmail(me, to, msg.as_string())      
    mailSrv.close()


sys.path.append('/home/www-data/web2py')
from gluon import DAL, Field
db=DAL('sqlite://storage.sqlite', folder='/home/www-data/web2py/applications/tgmonitor/databases')
db.define_table('tg_load',
    Field('check_date','datetime'),
    Field('tg_number',length=17),
    Field('busy', 'integer'),
    Field('installed', 'integer')
    )


db.define_table('tg_list',
    Field('tg_group', length=12, default='group1'),
    Field('tg_number', length=17, notnull=True),
    Field('tg_descr', length=17),
    Field('tg_type', length=3, default='sip'),
    Field('color', length=16, default='black'),
    Field('alarm', 'integer', default=90),
    Field('alarm_email', length=20),
    Field('alarm_call_number', length=32),
    )
Example #28
0
class Application(Tkinter.Frame):
    def __init__(self, master=None):
        Tkinter.Frame.__init__(self, master)
        self.grid()
        
        self.define_table()
        self.create_form()

    def define_table(self):
        self.db = DAL("sqlite://produtos.sqlite")
        self.table = self.db.define_table("produto",
           Field("nome"),
           Field("marca"),
           Field("peso", "double", label="Peso do produto"),
           Field("valor", "double"),
           Field("quant", "integer"),
        ) 

    def create_form(self):
       self.vars = {}
       self.form = Tkinter.LabelFrame(self, text="Cadastro de produtos", padx="30", pady="15")
       self.form.pack(side=Tkinter.TOP)

       self.SQLTKFORM(self.table, self.form)

    def SQLTKFORM(self, table, master, hidden=['id']):
        fields = table.fields
        for field in fields:
            if field not in hidden:
                self.create_input(field, table[field].label, master)

        master.save_button = Tkinter.Button(master,
                                            text="Salvar",
                                            command=self.save_to_db).pack()
    
    
    def save_to_db(self):
        try:
            values = [(field, value()) for field, value in self.vars.items()]
            self.table.insert(**dict(values))
        except Exception:
            self.db.rollback()
        else:
            self.db.commit()
            self.reset_form()

    def reset_form(self):
        self.form.destroy()
        self.create_form()


    def create_input(self, field, label, master):
        # self.label = Tkinter.Label(master, text=label)
        # label.pack()
        setattr(self, field+"_label", Tkinter.Label(master, text=label))
        getattr(self, field+"_label").pack()

        setattr(self, field, Tkinter.Entry(master))
        getattr(self, field).pack()

        self.vars[field] = getattr(self, field).get
Example #29
0
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        moves = self.moves
        news = self.news
        strints = self.strints
        strbools = self.strbools
        if not moves and not news and not strbools and not strints:
            # Nothing to backup
            return

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            import shutil
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False})

        # Copy Table structure
        skip = []
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [table[field] for field in table.fields if field != "the_geom"]
                try:
                    db_bak.define_table(tablename, *fields)
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
            else:
                try:
                    db_bak.define_table(tablename, db[tablename])
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
        while skip:
            _skip = []
            for tablename in skip:
                if tablename == "gis_location":
                    table = db[tablename]
                    fields = [table[field] for field in table.fields if field != "the_geom"]
                    try:
                        db_bak.define_table(tablename, *fields)
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
                else:
                    try:
                        db_bak.define_table(tablename, db[tablename])
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
            skip = _skip

        # Which tables do we need to backup?
        tables = []
        if moves:
            for tablename in moves:
                tables.append(tablename)
        if news:
            for tablename in news:
                new = news[tablename]
                for t in new["tables"]:
                    tables.append(t)
                for s in new["supers"]:
                    tables.append(s)
                    stable = db[s]
                    rows = db(stable._id > 0).select(stable.instance_type)
                    instance_types = set([r.instance_type for r in rows])
                    for t in instance_types:
                        tables.append(t)
        if strbools:
            for tablename, fieldname in strints:
                tables.append(tablename)
        if strints:
            for tablename, fieldname in strints:
                tables.append(tablename)

        # Remove duplicates
        tables = set(tables)

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        for tablename in tables:
            filename = "%s/%s.csv" % (folder, tablename)
            file = open(filename, "w")
            rows = db(db[tablename].id > 0).select()
            rows.export_to_csv_file(file)
            file.close()
            file = open(filename, "r")
            db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit!
            file.close()
            db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak
Example #30
0
from gluon import DAL, Field

#db = DAL('sqlite://storage.sqlite')
db = DAL('postgres://*****:*****@db/postgres')
db.define_table('ipligence2', Field('ip_from', 'integer', 10, '0000000000'),
                Field('ip_to', 'integer', 10, '0000000000'),
                Field('country_code', 'string', 10),
                Field('country_name', 'string', 255),
                Field('continent_code', 'string', 10),
                Field('continent_name', 'string', 255),
                Field('time_zone', 'string', 10),
                Field('region_code', 'string', 10),
                Field('region_name', 'string', 255),
                Field('the_owner', 'string', 255),
                Field('city_name', 'string', 255),
                Field('county_name', 'string', 255),
                Field('latitude', 'double'), Field('longitude', 'double'))
db.ipligence2.import_from_csv_file(
    open(
        '/home/www-data/web2py/applications/TrackR/private/ipligence-max.mysqldump.sql'
    ), 'rb')
db.commit()
Example #31
0
    pass   # directory already created
for img in listdir(repo_img_dir):
    if splitext(img)[1] == '.jpg':
        copy(join(repo_img_dir, img), wa_img_dir)

# Connect to database
db_dir = abspath(join(dirname(__file__), pardir, 'databases'))
try:
    makedirs(db_dir)
except os.error:
    pass
db = DAL('sqlite://../databases/storage.db', folder=db_dir)

# Create images table.
db.define_table('images',
    Field('represent', type='string', length=100, required=True),
    Field('file_name', type='string', length=100, required=True))

# Insert image file names and meanings.
img_pattern = re.compile(r'(\D+?)\d*\.')
for img in listdir(wa_img_dir):
    match = img_pattern.match(img)
    if match is not None:
        db.images.insert(
            represent=match.group(1).capitalize(),
            file_name=img)

# Create scores table.
db.define_table('scores',
    Field('mode', type='string', length=6, required=True),
    Field('difficulty', type='string', length=6, required=True),
Example #32
0
from gluon.tools import *

# reload(sys)
# sys.setdefaultencoding('utf-8')
str_db = u'mssql4://sa:sohunj123@localhost/BIDDING'
#03.连接需要用utf8字符集,这样返回的中文结果可直接解码

db = DAL(str_db,migrate_enabled=False)
dbu = DAL(str_db,migrate_enabled=False)
auth = Auth(dbu)
auth.settings.extra_fields['auth_user']= [Field('chinesename'), Field('code')]
auth.define_tables(username=True)
crud = Crud(dbu)
print db._uri

db.define_table('BankRecord',Field('Account'),Field('CompanyName'),Field('CreateDate'),Field('Money'),Field('Note'),Field('TradingTime'))
db.define_table('BiddingCountType',Field('BiddingCountTypeCode'),Field('BiddingCountTypeId'),Field('BiddingCountTypeName'))
db.define_table('BiddingSiteStatisticType',Field('BiddingSiteStatisticTypeCode'),Field('BiddingSiteStatisticTypeId'),Field('BiddingSiteStatisticTypeName'))
db.define_table('Columns_ProtocolCode',Field('columnlabel'),Field('columnname'),Field('typename'))
db.define_table('Customer',Field('Address1'),Field('Address2'),Field('CompanyName'),Field('CompanyPhone'),Field('ContactName'),Field('CreationDate'),Field('Email'),Field('EmployeeId'),Field('Fax'),Field('IsDelete'),Field('MobilePhone'),Field('Note'),Field('PassWord'),Field('Position'),Field('Type'),Field('UserName'),Field('ZipCode'))
db.define_table('Employee',Field('Address'),Field('Age'),Field('Code'),Field('CompanyPhone'),Field('CreationDate'),Field('DateOfBirth'),Field('Department'),Field('Email'),Field('EmergencyContactName'),Field('EmergencyContactPhone'),Field('EmergencyContactRelationship'),Field('HomePhone'),Field('IsDelete'),Field('MobilePhone'),Field('Name'),Field('Note'),Field('PassWord'),Field('Position'),Field('SexId'),Field('Type'),Field('UserName'))
db.define_table('Finance',Field('Activity'),Field('CreationDate'),Field('EmployeeId'),Field('Income'),Field('IsDelete'),Field('Note'),Field('ProjectCodeId'),Field('ProtocolCodeId'),Field('Spending'),Field('TargetId'),Field('TitleId'))
db.define_table('Log',Field('Agent'),Field('CreationDate'),Field('Ip'),Field('Kind'),Field('Note'),Field('UserId'))
db.define_table('Management',Field('Code'),Field('CreationDate'),Field('IsDelete'),Field('Name'))
db.define_table('ManagementStyle',Field('ManagementStyleCode'),Field('ManagementStyleId'),Field('ManagementStyleName'))
db.define_table('FundingSource',Field('CreationTime'),Field('IsDelete'),Field('Name'))
db.define_table('OperationType',Field('OperationTypeCode'),Field('OperationTypeId'),Field('OperationTypeName'))
db.define_table('ProjectProperty',Field('ProjectPropertyCode'),Field('ProjectPropertyId'),Field('ProjectPropertyName'))
db.define_table('Project',Field('Assistant'),Field('ContactorNameId'),Field('CustomerId'),Field('CreationDate',type='datetime',default=request.now),Field('EmployeeId',default=auth.user_id),Field('IsDelete'),Field('ManagementStyleId'),Field('PurchaseStyleId'),Field('Note'),Field('ProjectCode'),Field('ProjectName'),Field('ProjectSourceId'),Field('ProjectTypeId'),Field('ProtocolCodeId'),Field('FundingSourceId'),Field('ProjectStatusId'), Field('ProjectPropertyId'))
db.define_table('ProjectCode',Field('CreationTime'),Field('EmployeeId'),Field('IsDelete'),Field('Option1'),Field('Option2'),Field('Option3'),Field('ProjectNumber'),Field('ProjectTypeId'),Field('ProtocolId'))
db.define_table('ProjectPackage',Field('NoticeDate'),Field('ChargeRate'),Field('OpenDate'),Field('ReviewDate'),Field('PublicDate'),Field('CreationDate',type='datetime',default=request.now),Field('EntrustMoney'),Field('IsDelete'),Field('MakeOutDate'),Field('Note'),Field('PackageName'),Field('PackageNumber'),Field('ProjectId'),Field('SigningDate'),Field('StateId'),Field('WinningCompany'),Field('WinningMoney'), Field('SecondPublicDate'), Field('comment'), Field('WinningDollar'))
Example #33
0
import sys
import datetime
sys.path.append("../../")
from gluon import DAL, Field

db = DAL('mysql://*****:*****@localhost/test',migrate_enabled=True)
db.define_table('mytable', Field('temperature','double'),Field('pressure','double'),Field('dates','date'),\
 Field('time','time'),format='%(content)s',migrate=True)
print db._uri
print db._dbname
print db.mytable.fields
#print db.field
print db.tables

# insert data
json_string = "{  \
    'first': 1,\
    'second': 2,\
    'date': '2016-2-6',\
    'temperature': '37.5',\
    'unit': 'km'\
}";

#auth.define_tables(migrate=False)
#db.mytable.insert(temperature = 36.5,pressure = 90)
db.mytable.insert(temperature = 36.5,pressure = 90,dates = datetime.datetime.now().date(), \
time= datetime.datetime.now().time())
#db.mytable.insert()
db.commit()
#select