Ejemplo n.º 1
0
class DbHelper(object):
    """docstring for DbHelper"""
    def __init__(self, arg):
        super(DbHelper, self).__init__()
        self.arg = arg
        self.db = DAL('mongodb://140.143.247.178:27099/spider')
        self.define_table()
        '''
		self.db.thing.insert(name='Chair')
		query = self.db.thing.name.startswith('C')
		rows = self.db(query).select()
		print(rows[0].name)
		self.db.commit()
		'''

    def define_table(self):
        print(self.db._dbname)
        self.db.define_table('douban_topic', Field('title'),
                             Field('title_url'), Field('people'),
                             Field('people_url'), Field('replay_num'),
                             Field('post_time'))

    def insert_models(self, table_name='', items=[]):
        a = list(map(dict, items))
        self.db.douban_topic.bulk_insert(a)
        self.db.commit()
Ejemplo n.º 2
0
 def testRun(self):
     for ref, bigint in [("reference", False), ("big-reference", True)]:
         db = DAL(DEFAULT_URI, check_reserved=["all"], bigint_id=bigint)
         if bigint and "big-id" not in db._adapter.types:
             continue
         db.define_table("tt", Field("vv"))
         db.define_table(
             "ttt",
             Field("vv"),
             Field("tt_id", "%s tt" % ref, unique=True),
             Field("tt_uq", "integer", unique=True),
         )
         id_1 = db.tt.insert(vv="pydal")
         id_2 = db.tt.insert(vv="pydal")
         # Null tt_id
         db.ttt.insert(vv="pydal", tt_uq=1)
         # first insert is OK
         db.ttt.insert(tt_id=id_1, tt_uq=2)
         self.assertRaises(Exception, db.ttt.insert, tt_id=id_1, tt_uq=3)
         self.assertRaises(Exception, db.ttt.insert, tt_id=id_2, tt_uq=2)
         # The following is mandatory for backends as PG to close the aborted transaction
         db.commit()
         drop(db.ttt)
         drop(db.tt)
         db.close()
Ejemplo n.º 3
0
class Db:

    def __init__(self):
        self.uri = 'sqlite://database.sqlite'
        self.folder = os.path.join(current_dir, "sqlite")

        self.db = None

    def connect(self):
        if not self.db:
            self.db = DAL(self.uri, folder=self.folder, pool_size=5, lazy_tables=False)
            #, migrate_enabled=False, migrate=False, lazy_tables=True
            self.tables()
        else:
            print("Error: db already open")

    def close(self):
        self.db.close()
        self.db = None

    def tables(self):
        self.tableUsers()


    def tableUsers(self):
        try:
            #Note: id is created automattically if omited
            self.db.define_table('users', 
                Field('username', type='string'), 
                Field('email', type='string'),
                Field('password', type='string'),
                Field('uid', type='integer', default=0),
                Field('userRole', type='string', defaul='user'),#user, admin
                Field('secureKey', type='string', default='0', writable=False, readable=False),#secet key token - should be used if in server side
                Field('dateRegistration', type='datetime', writable=False, readable=False)
                )
        except:
            print('models db.DB() tableUsers Error')

    def tableLogs(self):
        pass


    def insertUser(self, username, email, password, dateRegistration):
        self.connect()
        import secrets#generate url safe token
        secureKey = secrets.token_urlsafe()
        self.db.users.insert(username=username, email=email, password=password, uid=0, userRole="user", secureKey=secureKey, dateRegistration=dateRegistration)
        self.db.commit()
        self.db.close()

    def loginUser(self, username, password):
        self.connect()
        self.db._adapter.reconnect()#gives thread error without
        user = self.db( (self.db.users.username == username) & (self.db.users.password == password) ).select().first()
        self.db.close()
        #if no user, it will return user = None
        return user
Ejemplo n.º 4
0
def init_db(path='sqlite://storage.db'):
    '''Connect to DB'''

    global db
    db = DAL(path)
    db = create_tables(db)
    logger.error('*******' + str(db))
    db.commit()
    return db
Ejemplo n.º 5
0
def model():
    db = DAL('sqlite://todo.db',pool_size=1,folder='./') #,migrate=False)
    Todos=db.define_table('todos',Field('title'),Field('isCompleted','boolean',default=False))
    if not db(db.todos).count():
      for i in range(1,16):
        db.todos.insert(title='الموعد '+str(i))
        db.commit()

    return (db)
Ejemplo n.º 6
0
 def testRun(self):
     db = DAL(DEFAULT_URI, check_reserved=["all"])
     db.define_table("tt", Field("vv"))
     vv = "ἀγοραζε"
     id_i = db.tt.insert(vv=vv)
     row = db(db.tt.id == id_i).select().first()
     self.assertEqual(row.vv, vv)
     db.commit()
     drop(db.tt)
     db.close()
Ejemplo n.º 7
0
 def testRun(self):
     db = DAL(DEFAULT_URI, check_reserved=['all'])
     db.define_table('tt', Field('vv'))
     db.define_table('ttt', Field('vv'), Field('tt_id', 'reference tt', notnull=True))
     self.assertRaises(Exception, db.ttt.insert, vv='pydal')
     # The following is mandatory for backends as PG to close the aborted transaction
     db.commit()
     drop(db.ttt)
     drop(db.tt)
     db.close()
Ejemplo n.º 8
0
 def testRun(self):
     db = DAL(DEFAULT_URI, check_reserved=['all'])
     db.define_table('tt', Field('vv'))
     vv = 'ἀγοραζε'
     id_i = db.tt.insert(vv=vv)
     row = db(db.tt.id == id_i).select().first()
     self.assertEqual(row.vv, vv)
     db.commit()
     drop(db.tt)
     db.close()
Ejemplo n.º 9
0
 def testRun(self):
     for ref, bigint in [('reference', False), ('big-reference', True)]:
         db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint)
         db.define_table('tt', Field('vv'))
         db.define_table('ttt', Field('vv'),
                         Field('tt_id', '%s tt' % ref, notnull=True))
         self.assertRaises(Exception, db.ttt.insert, vv='pydal')
         # The following is mandatory for backends as PG to close the aborted transaction
         db.commit()
         drop(db.ttt)
         drop(db.tt)
         db.close()
Ejemplo n.º 10
0
 def testRun(self):
     for ref, bigint in [('reference', False), ('big-reference', True)]:
         db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint)
         if bigint and 'big-id' not in db._adapter.types:
             continue
         db.define_table('tt', Field('vv'))
         db.define_table('ttt', Field('vv'), Field('tt_id', '%s tt' % ref,
                                                   notnull=True))
         self.assertRaises(Exception, db.ttt.insert, vv='pydal')
         # The following is mandatory for backends as PG to close the aborted transaction
         db.commit()
         drop(db.ttt)
         drop(db.tt)
         db.close()
Ejemplo n.º 11
0
 def testRun(self):
     for ref, bigint in [("reference", False), ("big-reference", True)]:
         db = DAL(DEFAULT_URI, check_reserved=["all"], bigint_id=bigint)
         if bigint and "big-id" not in db._adapter.types:
             continue
         db.define_table("tt", Field("vv"))
         db.define_table("ttt", Field("vv"),
                         Field("tt_id", "%s tt" % ref, notnull=True))
         self.assertRaises(Exception, db.ttt.insert, vv="pydal")
         # The following is mandatory for backends as PG to close the aborted transaction
         db.commit()
         drop(db.ttt)
         drop(db.tt)
         db.close()
Ejemplo n.º 12
0
    def test_single_transaction(self):

        db = DAL(DEFAULT_URI)
        db.define_table('tt', Field('aa'))
        self.assertEqual(db(db.tt).count(), 0)
        db.commit()
        try:
            with db.single_transaction():
                db.tt.insert(aa='test')
                1 / 0
        except ZeroDivisionError:
            pass
        self.assertEqual(db(db.tt).count(), 0)
        with db.single_transaction():
            db.tt.insert(aa='test')
        self.assertEqual(db(db.tt).count(), 1)
Ejemplo n.º 13
0
Archivo: base.py Proyecto: boa-py/pydal
 def testRun(self):
     for ref, bigint in [('reference', False), ('big-reference', True)]:
         db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint)
         db.define_table('tt', Field('vv'))
         db.define_table('ttt', Field('vv'), Field('tt_id', '%s tt' % ref,
                                                   unique=True))
         id_i = db.tt.insert(vv='pydal')
         # Null tt_id
         db.ttt.insert(vv='pydal')
         # first insert is OK
         db.ttt.insert(tt_id=id_i)
         self.assertRaises(Exception, db.ttt.insert, tt_id=id_i)
         # The following is mandatory for backends as PG to close the aborted transaction
         db.commit()
         drop(db.ttt)
         drop(db.tt)
         db.close()
Ejemplo n.º 14
0
 def testRun(self):
     for ref, bigint in [('reference', False), ('big-reference', True)]:
         db = DAL(DEFAULT_URI, check_reserved=['all'], bigint_id=bigint)
         if bigint and 'big-id' not in db._adapter.types:
             continue
         db.define_table('tt', Field('vv'))
         db.define_table('ttt', Field('vv'), Field('tt_id', '%s tt' % ref,
                                                   unique=True,
                                                   notnull=True))
         self.assertRaises(Exception, db.ttt.insert, vv='pydal')
         db.commit()
         id_i = db.tt.insert(vv='pydal')
         # first insert is OK
         db.ttt.insert(tt_id=id_i)
         self.assertRaises(Exception, db.ttt.insert, tt_id=id_i)
         # The following is mandatory for backends as PG to close the aborted transaction
         db.commit()
         drop(db.ttt)
         drop(db.tt)
         db.close()
Ejemplo n.º 15
0
    def testRun(self):
        db = DAL(DEFAULT_URI, check_reserved=["all"])
        db.define_table("aa", Field("name"))
        db.define_table("bb", Field("aa", "reference aa"), Field("name"))
        for k in ("x", "y", "z"):
            i = db.aa.insert(name=k)
            for j in ("u", "v", "w"):
                db.bb.insert(aa=i, name=k + j)
        db.commit()
        rows = db(db.aa).select()
        rows.join(db.bb.aa, fields=[db.bb.name], orderby=[db.bb.name])
        self.assertEqual(rows[0].bb[0].name, "xu")
        self.assertEqual(rows[0].bb[1].name, "xv")
        self.assertEqual(rows[0].bb[2].name, "xw")
        self.assertEqual(rows[1].bb[0].name, "yu")
        self.assertEqual(rows[1].bb[1].name, "yv")
        self.assertEqual(rows[1].bb[2].name, "yw")
        self.assertEqual(rows[2].bb[0].name, "zu")
        self.assertEqual(rows[2].bb[1].name, "zv")
        self.assertEqual(rows[2].bb[2].name, "zw")

        rows = db(db.bb).select()
        rows.join(db.aa.id, fields=[db.aa.name])

        self.assertEqual(rows[0].aa.name, "x")
        self.assertEqual(rows[1].aa.name, "x")
        self.assertEqual(rows[2].aa.name, "x")
        self.assertEqual(rows[3].aa.name, "y")
        self.assertEqual(rows[4].aa.name, "y")
        self.assertEqual(rows[5].aa.name, "y")
        self.assertEqual(rows[6].aa.name, "z")
        self.assertEqual(rows[7].aa.name, "z")
        self.assertEqual(rows[8].aa.name, "z")

        rows_json = rows.as_json()
        drop(db.bb)
        drop(db.aa)
        db.close()
Ejemplo n.º 16
0
    def testRun(self):
        db = DAL(DEFAULT_URI, check_reserved=['all'])
        db.define_table('aa',Field('name'))
        db.define_table('bb',Field('aa','reference aa'),Field('name'))
        for k in ('x','y','z'):
            i = db.aa.insert(name=k)
            for j in ('u','v','w'):
                db.bb.insert(aa=i,name=k+j)
        db.commit()
        rows = db(db.aa).select()
        rows.join(db.bb.aa, fields=[db.bb.name], orderby=[db.bb.name])
        self.assertEqual(rows[0].bb[0].name, 'xu')
        self.assertEqual(rows[0].bb[1].name, 'xv')
        self.assertEqual(rows[0].bb[2].name, 'xw')
        self.assertEqual(rows[1].bb[0].name, 'yu')
        self.assertEqual(rows[1].bb[1].name, 'yv')
        self.assertEqual(rows[1].bb[2].name, 'yw')
        self.assertEqual(rows[2].bb[0].name, 'zu')
        self.assertEqual(rows[2].bb[1].name, 'zv')
        self.assertEqual(rows[2].bb[2].name, 'zw')

        rows = db(db.bb).select()
        rows.join(db.aa.id, fields=[db.aa.name])
        
        self.assertEqual(rows[0].aa.name, 'x')
        self.assertEqual(rows[1].aa.name, 'x')
        self.assertEqual(rows[2].aa.name, 'x')
        self.assertEqual(rows[3].aa.name, 'y')
        self.assertEqual(rows[4].aa.name, 'y')
        self.assertEqual(rows[5].aa.name, 'y')
        self.assertEqual(rows[6].aa.name, 'z')
        self.assertEqual(rows[7].aa.name, 'z')
        self.assertEqual(rows[8].aa.name, 'z')

        rows_json = rows.as_json()
        drop(db.bb)
        drop(db.aa)
        db.close()
Ejemplo n.º 17
0
class DbHelper(object):
	"""docstring for DbHelper"""
	def __init__(self, arg):
		super(DbHelper, self).__init__()
		self.arg = arg
		self.db = DAL('mongodb://140.143.247.178:27099/spider')
		self.define_table()
		'''
		self.db.thing.insert(name='Chair')
		query = self.db.thing.name.startswith('C')
		rows = self.db(query).select()
		print(rows[0].name)
		self.db.commit()
		'''

	def define_table(self):
		print(self.db._dbname)
		self.db.define_table('douban_topic',Field('title'),Field('title_url'),Field('people'),Field('people_url')
                             ,Field('replay_num'),Field('post_time'))

	def insert_models(self,table_name='',items=[]):
		a = list(map(dict,items))
		self.db.douban_topic.bulk_insert(a)
		self.db.commit()
Ejemplo n.º 18
0
class DataBase:

    def __init__(self, dbname='postgres', schema='Acervo', pool_size=5):

        load_dotenv()

        self.dbname = dbname
        self.schema = schema
        self.username = os.getenv("DBUSERNAME")
        self.password = os.getenv("DBPASS")
        self.host = os.getenv("DBHOST")
        self.port = os.getenv("DBPORT")
        self.folder = 'Resources' + os.sep + 'database'

        self.dbinfo = \
            'postgres://' + str(self.username) + ':' + str(self.password) + '@' \
            + str(self.host) + ':' + str(self.port) + '/' + str(self.dbname)

        self.db = DAL(
            self.dbinfo,
            folder=self.folder,
            pool_size=pool_size,
            migrate=False,
            attempts=1
        )
        self.connection = None

    def execute_sql(self, sql, as_dict=True):
        retorno = list()
        try:
            retorno = self.db.executesql(query=sql, as_dict=as_dict)
            self.db.commit()
            logging.debug('[DataBase] status=' + str(True))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] retorno=' + str(retorno))
            prc = True, retorno, str(self.db._lastsql)

        except Exception as e:
            self.db.rollback()
            logging.debug('[DataBase] status=' + str(False))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] exception=' + str(e))
            retorno.append(e)
            prc = False, retorno, str(sql)

        except:
            e = 'Exceção não tratada'
            logging.debug('[DataBase] status=' + str(False))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] exception2=' + str(e))
            retorno.append(e)
            prc = False, e, str(sql)

        return prc

    def __conectar_banco__(self):
        try:
            self.connection = self.db.__call__()
        except Exception as e:
            logging.debug('[DataBase] ' + str(e))
        return self

    def definir_schema(self, schema):
        self.schema = schema
        self.execute_sql("SET search_path TO " + self.schema, as_dict=False)

    def fechar_conexao(self):
        self.db.close()
Ejemplo n.º 19
0
class DbModel:
    """the abstract layer of vh.

    make the caller need not to care about what and how to storage/fetch data

    Attributes:
        db: string the data base name,just to idenity.

    """

    def __init__(self, db_name="vh"):
        self.db_name = db_name
        folder_path = os.getenv("VH_USER_PATH") or "./vol/vh-user"
        self.folder_path = folder_path
        self._init_db()
        pass

    def __del__(self):
        if hasattr(self, "db"):
            self.db.close()
        pass

    def _init_db(self):
        if not os.path.exists(self.folder_path):
            os.mkdir(self.folder_path)
        sqlite_uri = f'sqlite://{self.db_name}.sqlite'
        self.db = DAL(sqlite_uri, folder=self.folder_path)
        # TODO  pydal会自动自动创建id字段为自增主键 user-id 是不是多余了
        self.db.define_table('word', Field('user_id'), Field(
            'word'), Field('word_type'), Field('time'))
        self.db.define_table('meta', Field('key'), Field('value'))

        self.db.meta.insert(key='db_version', value=DB_VERSION)
        self.db.commit()

        self.init_corpus_table(self.db)

    def _db(self):
        return self.db

    def get_all_unknow_word_by_id(self, id):
        """
        Args:
            id: string user id
        Returns:
            [string]
        """
        return [item.word for item in self.db((self.db.word.user_id == id) & (self.db.word.word_type == "unknow")).select(self.db.word.ALL)]

    def get_all_know_word_by_id(self, id):
        """
        Args:
            id: string user id
        Returns:
            [string]
        """
        return [item.word for item in self.db((self.db.word.user_id == id) & (self.db.word.word_type == "know")).select(self.db.word.ALL)]
        pass

    def mark_know_word_by_id(self, id, words):
        """
        Args:
            id: string user id
            words: [string]
        """
        for word in words:
            self.db.word.update_or_insert(
                (self.db.word.word == word) & (self.db.word.user_id == id),
                word=word,
                user_id=id,
                word_type="know",
                time=pytime.asctime(pytime.gmtime()))
        self.db.commit()
        pass

    def mark_unknow_word_by_id(self, id, words):
        """
        Args:
            id: string user id
            words: [string]
        """
        for word in words:
            self.db.word.update_or_insert(
                (self.db.word.word == word) & (self.db.word.user_id == id),
                word=word,
                user_id=id,
                word_type="unknow",
                time=pytime.asctime(pytime.gmtime()))
        self.db.commit()
        pass

    def init_corpus_table(self, db):
        """
初始化与语料相关的表 也许以后应当是横切的数据库
corpus_meta {
    id,                 # 自动生成主键之类的
    md5,            # corpus 应当是纯文字的感觉
    type,            # corpus类型 设计上有 article subtitle book 之类的
    struct,         # 存储来此corpus的结构 之后查询的时候 通过这个结构和倒排索引 拿到初始范围  {start:xx,end:xxx,type:"root",child:[{start:xx,end:xxx etc}]
    time, 
    source,        # 例如url之类的
    name,
}
corpus {
    id,
    content,
}

user_corpus {
    user_id,
    corpus_id
}

word_invert_index {
    word,
    lemma,
    corpus_id
    position, # left-right
}
        """
        db.define_table("corpus_meta",
                        Field("id", type="id"),
                        Field("md5", type="string", length=255),
                        Field("type", type="string", length=255),
                        Field("struct", type="string", length=10240),  # 应该够了
                        Field("time", 'datetime'),
                        Field("source", type="string", length=1024),
                        Field("name", type="string", length=1024),
                        )
        db.define_table("corpus",
                        Field("corpus_id", type='reference corpus_meta'),
                        Field("content", type="text", length=1024*1024),
                        )

        db.define_table("user_corpus",
                        Field("user_id", type="string"),
                        Field("corpus_id", type="reference corpus_meta")
                        )
        db.define_table("word_invert_index",
                        Field("word", type="string"),
                        Field("lemma", type="string"),
                        Field("corpus_id", type="reference corpus_meta"),
                        Field("span", type="string"),
                        )
        pass

    def has_article(self, article):
        record = self.db(self.db.corpus_meta.md5 ==
                         article["md5"]).select().first()
        return record != None
        pass

    def find_article_meta(self, id):
        record = self.db.corpus_meta(id)
        if record is None:
            return None
        record = record.as_dict()
        record["struct"] = json.loads(record["struct"])
        return record

    def find_article(self, id, span):
        data = self.db(self.db.corpus.corpus_id == id).select(
            self.db.corpus.content[span[0]:span[1]]).first()
        raw = list(data._extra.as_dict().values())[0]
        return raw

    def save_article(self, article):
        raw_article = article["article"]
        corpus_meta_data = {"name": article["name"], "source": article["source"], "type": article["type"],
                            "time": article["time"], "struct": article["struct"], "md5": article["md5"]}
        cropus_id = self.db.corpus_meta.insert(**corpus_meta_data)
        self.db.corpus.insert(corpus_id=cropus_id, content=raw_article)
        self.db.commit()

        return cropus_id
        pass

    def save_word_invert_index(self, corpus_id, words):
        def map_word(corpus_id, word):
            word["corpus_id"] = corpus_id
            word["span"] = json.dumps(word["span"])
            return word
        words = [map_word(corpus_id, w) for w in words]
        self.db.word_invert_index.bulk_insert(words)
        self.db.commit()

        pass

    def connect_user_and_corpus(self, user_id, corpus_id):
        self.db.user_corpus.insert(
            **{"user_id": user_id, "corpus_id": corpus_id})
        self.db.commit()
        pass

    def find_word_invert_index(self, user_id, lemma):
        words = self.db(
            (self.db.user_corpus.user_id == user_id)
            & (self.db.word_invert_index.corpus_id == self.db.user_corpus.corpus_id)
            & (self.db.word_invert_index.lemma == lemma)
        ).select(self.db.word_invert_index.ALL)
        for w in words:
            w = w.as_dict()
            del w["id"]
            w["span"] = json.loads(w["span"])
            yield w
        pass
Ejemplo n.º 20
0
class KeyValueStore(object):
    def __init__(self):
        self.db = DAL('sqlite://keyvalstore.db', folder=get_config_dir())
        self.db.define_table(
            'store', Field('key', type='string', required=True, unique=True),
            Field('value', type='blob'),
            Field('version', type='integer', default=0))

    def __del__(self):
        self.db.close()

    def set(self, key, value):
        # We have to execute pure SQL statements because pydal does not support blobs as it seems
        res = self.db.executesql('SELECT value from store where key=?',
                                 placeholders=[key])
        if len(res) > 0:
            self.db.executesql('UPDATE store SET value=? where key=?',
                               placeholders=[pickle.dumps(value), key])
        else:
            self.db.executesql(
                'INSERT INTO "store"("key","value") VALUES (?, ?)',
                placeholders=[key, pickle.dumps(value)])
        self.db.commit()

    def get(self, key, default=""):
        res = self.db.executesql('SELECT value from store where key=?',
                                 placeholders=[key])
        return default if len(res) == 0 else pickle.loads(res[0][0])

    def set_versioned(self, key, value, version):
        response = requests.post(
            'https://dibser.vserverli.de/php/keyvaluestore.php?key={}&version={}'
            .format(key, version),
            data=pickle.dumps(value),
            headers={'Content-Type': 'application/octet-stream'})
        response.raise_for_status()
        res = self.db.executesql('SELECT value from store where key=?',
                                 placeholders=[key])
        if len(res) > 0:
            self.db.executesql(
                'UPDATE store SET value=?, version=? where key=?',
                placeholders=[pickle.dumps(value), version, key])
        else:
            self.db.executesql(
                'INSERT INTO "store"("key","value", "version") VALUES (?, ?, ?)',
                placeholders=[key, pickle.dumps(value), version])
        self.db.commit()

    def get_versioned(self, key, default=""):
        res = self.db.executesql('SELECT value,version from store where key=?',
                                 placeholders=[key])
        value = None
        if len(res) > 0:
            newest_version = local_version = res[0][1]
            if not local_version: local_version = 1
            value = res[0][0]

        # check remote version and update if remote is newer (status code == 200 if a newer version is available, otherwise status code 404 if not available or 304 if older/same version)
        response = requests.get(
            'https://dibser.vserverli.de/php/keyvaluestore.php?key={}&version={}'
            .format(key, local_version))
        if response.status_code == 200:
            newest_version = int(response.headers['X-Keyvalstore-Version'])
            value = response.content
            print(
                "Remote version is newer for key={}. Local version={} remote version={}"
                .format(key, local_version, newest_version))

        return (default, -1) if value == None else (pickle.loads(value),
                                                    newest_version)
Ejemplo n.º 21
0
class CalDav(object):
  def __init__(self):
    self.db = DAL('sqlite://caldav.db', folder=BaseDirectory.save_data_path('pyhttpserver'))
    self.db.define_table('collections', Field('url', type='string', required=True, unique=True),
                                        Field('displayname', type='string'),
                                        Field('subscribed', type='boolean', default=True))
    self.db.define_table('colitems', Field('href', type='string', required=True),
                                  Field('etag', type='string'),
                                  Field('collection', type='reference collections'),
                                  Field('content', type='blob'),
                                  Field('local_status', type='integer', default=0))
    self.davStorages = {}

  def __del__(self):
    self.db.close()


  def sync_collections(self):
    deletedCollections = set([ x.url for x in self.db(self.db.collections.url != None).select() ])
    cfg = common.get_davconfig()
    for serverCfg in cfg.get('servers', []):
      config = dict(username=serverCfg['username'], password=serverCfg['password'], url=serverCfg['url'])
      config['url'] = requests.get(config['url']).url
      storage = CalDAVStorage(**config)

      # Fetch all locally available collections, then fetch the collections from the server side and add them locally if they are not available yet
      # Afterwards delete all collections from the database that were not available on the server side anymore
      for s in storage.discover(**config):
        subStorage = CalDAVStorage(**s)
        displayname = subStorage.get_meta('displayname')
        deletedCollections.discard(s['url'])
        self.db.collections.update_or_insert(self.db.collections.url==s['url'], url=s['url'], displayname=displayname)
        self.davStorages[s['url']] = subStorage
    for deletedCollection in deletedCollections:
      print("Collection with url={} was deleted on the server side".format(deletedCollection))
      self.db(self.db.collections.url == deletedCollection).delete()
    self.db.commit()


  def sync(self, socketio=None):
    #~ if socketio: socketio.sleep(3)
    self.sync_collections()
    # sync only subsribed collections...
    for collectionRow in self.db(self.db.collections.subscribed == True).select():
      davUrl = collectionRow.url
      davStorage = self.davStorages[davUrl]
      id = collectionRow.id

      # Sync local changes to the server
      self.sync_local_changes(davUrl)

      # get changes from server
      updated = set()
      new = set()
      deleted = set([ x['href']  for x in self.db(self.db.colitems.collection == id).select() ])
      for href,etag in davStorage.list():
        if href in deleted:
          deleted.remove(href)
          if self.db((self.db.colitems.collection == id) & (self.db.colitems.href == href)).select()[0].etag != etag:
            updated.add(href)
        else:
          new.add(href)

      if len(new)>0:     print("New objects at server: ", new)
      if len(updated)>0: print("Updated objects at server: ", updated)
      if len(deleted)>0: print("Deleted objects at server: ", deleted)

      # Delete server deleted items also from local database
      for deletedItem in deleted:
        print("Deleted object with href={} on the server side".format(deletedItem))
        self.db(self.db.colitems.href == deletedItem).delete()
      # Fetch server created/modified items and update in local db
      for href, item, etag in davStorage.get_multi(updated|new):
        self.db.colitems.update_or_insert((self.db.colitems.collection==id) & (self.db.colitems.href==href), href=href, etag=etag, collection=id, content=item.raw, local_status=0)

    for collectionRow in self.db(self.db.collections.subscribed == False).select():
      self.db(self.db.colitems.collection==collectionRow.id).delete();

    self.db.commit()

  def get_collection_id(self, url):
    return self.db(self.db.collections.url == url).select()[0].id


  def sync_local_changes(self, davStorageUrl):
    davStorage = self.davStorages[davStorageUrl]
    collectionId = self.get_collection_id(davStorageUrl)
    for deletedItem in self.db((self.db.colitems.local_status == 2) & (self.db.colitems.collection == collectionId)).select():
      print("Deleting locally removed item with etag={} from server".format(deletedItem.etag))
      davStorage.delete(deletedItem.href, deletedItem.etag)
      deletedItem.delete_record()
    for modifiedItem in self.db((self.db.colitems.local_status == 1) & (self.db.colitems.collection == collectionId)).select():
      print("Updating locally modified item with etag={} at server".format(modifiedItem.etag))
      newEtag = davStorage.update(modifiedItem.href, Item(modifiedItem.content), modifiedItem.etag)
      modifiedItem.update_record(etag=newEtag, local_status=0)
    for newItem in self.db((self.db.colitems.local_status == 3) & (self.db.colitems.collection == collectionId)).select():
      print("Adding a new ical to the server")
      href, etag = davStorage.upload(Item(newItem.content))
      newItem.update_record(etag=etag, href=href, local_status=0)
    self.db.commit()

  def get_subscribed_collections(self):
    return [ {'url': x.url, 'id': x.id, 'name': x.displayname} for x in self.db(self.db.collections.subscribed == True).select() ]

  def get(self, id):
    return vcard21_to_vcard30(vobject.readOne(self.db(self.db.colitems.id == id).select()[0].content))

  def get_structured(self, id):
    vcard = self.get(id)
    ret = {}
    vcardAsDict = vcard.contents
    if 'fn' not in vcardAsDict:
      print("vCard with id={} does not have the field FN set".format(id))
    else:
      if len(vcardAsDict['fn']) > 1: print("vCard with id={} has more than one FN field. Ignoring all but the first".format(id))
      ret['fn'] = vcardAsDict['fn'][0].value
    if 'n' not in vcardAsDict:
      print("vCard with id={} does not have the field N set".format(id))
      ret['fn'] = dict(given="",family="",prefix="",suffix="",additional="")
    else:
      if len(vcardAsDict['n']) > 1: print("vCard with id={} has more than one N field. Ignoring all but the first".format(id))
      ret['n'] = vcardAsDict['n'][0].value.__dict__
    if len(vcardAsDict.get('bday', [])) > 1: print("vCard with id={} has more than one BDAY field. Ignoring all but the first".format(id))
    ret['bday'] = ""
    if 'bday' in vcardAsDict and vcardAsDict['bday'][0].value:
      bday_datetime = parse(vcardAsDict['bday'][0].value).replace(tzinfo=None)
      ret['bday'] = '%02d. %02d. %04d' % (bday_datetime.day, bday_datetime.month, bday_datetime.year)
    for a in ['adr','tel','email']:
      for i in range(len(vcardAsDict.get(a, []))):
        setattr(vcardAsDict[a][i], 'orig_id', i)
    for attr in ['adr', 'tel', 'email']:
      ret[attr] = [ {'value': (x.value if type(x.value)==str else x.value.__dict__),
                     'type': [t.lstrip('x-') for t in x.params.get('TYPE', []) if t.lower() != 'pref'],
                     'pref': (vcard_get_pref_value(x) != 101),
                     'orig_id': x.orig_id
                    } for x in sorted(vcardAsDict.get(attr, []), key=vcard_get_pref_value) ]
    ret['itemid'] = id
    ret['colid'] = self.db(self.db.colitems.id == id).select()[0].collection;
    return ret

  def get_serialized(self, id):
    vcard = self.get_structured(id)
    vcard_formatted = 'Name: {}\n'.format(vcard['fn'])
    vcard_formatted += 'Full name: {} {} {} {} {}\n\n'.format(vcard['n']['prefix'],vcard['n']['given'],vcard['n']['additional'],vcard['n']['family'],vcard['n']['suffix'])
    if vcard['bday']: vcard_formatted += 'Birthday: {}\n\n'.format(vcard['bday'])
    for email in vcard['email']:
      vcard_formatted += 'Email {}{}:\n\t{}\n'.format(",".join(email['type']), " (pref)" if email['pref'] else "", email['value'])
    vcard_formatted += '\n'
    for adr in vcard['adr']:
      vcard_formatted += 'Address {}{}:\n\t{}\n\t{} {}\n\t{}\n\n'.format(",".join(adr['type']), " (pref)" if adr['pref'] else "", adr['value']['street'], adr['value']['code'], adr['value']['city'], adr['value']['country'])
    for tel in vcard['tel']:
      vcard_formatted += 'Telephone {}{}:\n\t{}\n'.format(",".join(tel['type']), " (pref)" if tel['pref'] else "", tel['value'])
    return vcard_formatted

  def get_all_items(self):
    def mk_item(db_item):
      ret = {"id": db_item.id, "colid": db_item.collection}
      vItem = self.get_structured(db_item.id)
      ret['fn'] = vItem['fn']
      ret['email'] = "<br />".join((x['value'] for x in vItem['email']))
      ret['tel'] = "<br />".join((x['value'] for x in vItem['tel']))
      ret['bday'] = vItem['bday']
      ret['bday_diff'] = 400
      if vItem['bday']:
        bday_datetime = datetime.strptime(vItem['bday'], "%d. %m. %Y")
        ret['bday_diff'] = (rrule(YEARLY, bymonth=bday_datetime.month, bymonthday=bday_datetime.day).after(bday_datetime, True) - datetime.today()).days
      return ret
    return ( mk_item(x) for x in self.db(self.db.colitems.local_status != 2).select() )

  def _merge_dict_to_vcard(self, itemDict, vcard):
    def get_vcard_item(vcard, item, append=False, cls=str):
      entry = vcard.add(item) if append else getattr(vcard, item) if hasattr(vcard, item) else vcard.add(item)
      if append: entry.value = cls()
      assert cls == type(entry.value)
      return entry
    get_vcard_item(vcard, 'n', cls=vobject.vcard.Name).value.__dict__.update(itemDict['n'])
    get_vcard_item(vcard, 'fn').value = "{0} {1}".format(itemDict['n']['given'], itemDict['n']['family'])
    if itemDict['bday']:
      get_vcard_item(vcard, 'bday').value = datetime.strptime(itemDict['bday'], "%d. %m. %Y").strftime("%Y%m%d")
      del vcard.contents['bday'][1:]
    elif 'bday' in vcard.contents: del vcard.contents['bday']
    for (attr, cls) in [('adr',vobject.vcard.Address), ('tel',str), ('email',str)]:
      entriesBefore = frozenset(range(len(vcard.contents.get(attr, []))))
      entriesNow = frozenset([ x['orig_id'] for x in itemDict[attr] if 'orig_id' in x ])
      for item in itemDict[attr]:
        orig_id = item.get('orig_id', None)
        entry = vcard.contents[attr][orig_id] if orig_id != None else get_vcard_item(vcard, attr, append="True", cls=cls)
        if (cls == str): entry.value = item['value']
        else: entry.value.__dict__.update(item['value'])
        knownTypes = frozenset(item['type']).intersection(defined_types[attr])
        userTypes = [ 'x-{}'.format(x) for x in frozenset(item['type']).difference(defined_types[attr]) ]
        pref = ['pref'] if item['pref'] else []
        entry.params['TYPE'] = list(knownTypes)+userTypes+pref
        if attr=='adr' and 'LABEL' in entry.params: del entry.params['LABEL']
      for deletedIdx in sorted(entriesBefore.difference(entriesNow), reverse=True): del vcard.contents[attr][deletedIdx]
    d = datetime.now()
    get_vcard_item(vcard, 'rev').value = "%04d%02d%02dT%02d%02d%02d"%(d.year,d.month,d.day,d.hour,d.minute,d.second)
    return vcard

  def update_item(self, id, itemDict, socketio=None):
    self.db(self.db.colitems.id == id).update(content=self._merge_dict_to_vcard(itemDict, self.get(id)).serialize(), local_status=1)
    self.db.commit()
    if socketio: socketio.start_background_task(self.sync, socketio)
    else: self.sync()
    return self.get_structured(id)

  def add_item(self, itemDict, collectionId=1, socketio=None):
    id = self.db.colitems.insert(content=self._merge_dict_to_vcard(itemDict, vobject.vCard()).serialize(), local_status=3, collection=collectionId, href="")
    self.db.commit()
    if socketio: socketio.start_background_task(self.sync, socketio)
    else: self.sync()
    return self.get_structured(id)

  def delete_item(self, id, socketio=None):
    item = self.db(self.db.colitems.id == id).select()[0]
    if item.local_status == 3: item.delete_record()
    else: item.update_record(local_status=2)
    self.db.commit()
    if socketio: socketio.start_background_task(self.sync, socketio)
    else: self.sync()


  def get_collections(self, user, pw, url):
    config = dict(username=user, password=pw, url=url)
    if not pw:
      keyringData = keyring.DbusKeyring().FindItem({'dav-url': url, 'dav-user': user})
      config['password'] = keyringData[1]
    config['url'] = requests.get(config['url']).url # do redirection magic
    storage = CalDAVStorage(**config)
    ret = []
    for s in storage.discover(**config):
      subscribed = False
      subStorage = CalDAVStorage(**s)
      displayname = subStorage.get_meta('displayname')
      dbSet = self.db(self.db.collections.url==s['url'])
      if not dbSet.isempty(): subscribed = dbSet.select()[0].subscribed
      ret.append({'url': s['url'], 'subscribed': subscribed, 'name': displayname})
    return ret

  def update_collection_subscriptions(self, incomingJson):
    for server in incomingJson['servers']:
      for caldav in server['caldav_collections']:
        self.db.collections.update_or_insert(self.db.collections.url==caldav['url'], url=caldav['url'], subscribed=caldav["subscribed"]);
Ejemplo n.º 22
0
class DBClient:

    log = customlogger(globalconfig.logging_level)

    def __init__(self, uri_db):
        self.log.info("creating instance of DBClient: {0}".format(uri_db))
        self.db = DAL(uri_db, migrate_enabled=False)
        self.wells = self.db.define_table('wells',
                                          Field('uuid'),
                                          Field('project_uuid'),
                                          Field('well_name'),
                                          Field('uwi'),
                                          Field('created_at'),
                                          Field('modified_at'),
                                          primarykey=['uuid'])
        self.clients = self.db.define_table('clients',
                                            Field('uuid'),
                                            Field('company_name'),
                                            Field('created_at'),
                                            Field('modified_at'),
                                            primarykey=['uuid'])
        self.projects = self.db.define_table('projects',
                                             Field('uuid'),
                                             Field('client_uuid'),
                                             Field('name'),
                                             Field('created_at'),
                                             Field('modified_at'),
                                             Field('basin'),
                                             Field('shapefile'),
                                             primarykey=['uuid'])

    @add_post_func_delay(0.5)
    def insert_well(self, *args):
        self.log.info("inserting well into DB..")
        self.wells.insert(uuid=uuid.uuid4(),
                          well_name=args[0],
                          uwi=args[1],
                          created_at=datetime.datetime.now(),
                          modified_at=datetime.datetime.now())
        self.db.commit()

    @add_post_func_delay(0.5)
    def insert_client(self, *args):
        self.log.info("inserting client into DB..")
        self.clients.insert(uuid=uuid.uuid4(),
                            company_name=args[0],
                            created_at=datetime.datetime.now(),
                            modified_at=datetime.datetime.now())
        self.db.commit()

    # TODO insert project has been modified on the front end such that there is no field called project type on DB table
    # def insert_project(self, *args):
    #     self.insert_client(args[0])
    #     c_uuid = self.clients(company_name=args[0])['uuid']
    #     # self.db.projects.insert(uuid=uuid.uuid4(), name=args[1], client_uuid=c_uuid, basin='Midland', created_at=datetime.datetime.now(), modified_at=datetime.datetime.now())
    #     self.projects.insert(uuid=uuid.uuid4(), name=args[1], client_uuid=c_uuid, basin='midland', created_at=datetime.datetime.now(), modified_at=datetime.datetime.now())
    #     self.db.commit()

    @add_post_func_delay(0.5)
    def delete_table(self, tablename):
        self.log.info("deleteing table {0}".format(tablename))
        table = self.db.get(tablename)
        table.truncate(mode='CASCADE')
        self.db.commit()

    def execute_sql(self, sql):
        self.log.info("executing sql: '{0}'".format(sql))
        return self.db.executesql(sql)

    def close(self):
        self.log.info("closing DB instance..")
        self.db.close()
Ejemplo n.º 23
0
class dataSource:
    db = ""  # Conector Base de datos

    # constructor
    def __init__(self, host, user, passw, database, port, tipo_bd):

        if tipo_bd == "sqlite":
            self.db = DAL("sqlite://" + database + ".db")
        elif tipo_bd == "mysql":
            self.db = DAL("mysql://" + user + ":" + passw + "@" + host + "/" +
                          database + "")
        elif tipo_bd == "postgres":
            self.db = DAL("postgres://" + user + ":" + passw + "@" + host +
                          "/" + database + "")
        elif tipo_bd == "sqlserver":
            self.db = DAL("mssql4://" + user + ":" + passw + "@" + host + "/" +
                          database + "")
        elif tipo_bd == "firebird":
            self.db = DAL("firebird://" + user + ":" + passw + "@" + host +
                          "/" + database + "")
        elif tipo_bd == "oracle":
            self.db = DAL("oracle://" + user + ":" + passw + "@" + host + "/" +
                          database + "")
        elif tipo_bd == "db2":
            self.db = DAL("db2://" + user + ":" + passw + "@" + database + "")
        """
        Ingres	ingres://usuario:contraseña@localhost/nombrebd
        Sybase	sybase://usuario:contraseña@localhost/nombrebd
        Informix	informix://usuario:contraseña@nombrebd
        Teradata	teradata://DSN=dsn;UID=usuario;PWD=contraseña;DATABASE=nombrebd
        Cubrid	cubrid://usuario:contraseña@localhost/nombrebd
        SAPDB	sapdb://usuario:contraseña@localhost/nombrebd
        IMAP	imap://user:contraseña@server:port
        MongoDB	mongodb://usuario:contraseña@localhost/nombrebd
        """

        # Vincular a una tabla 1 preexistente.
        self.db.define_table(
            "bienes",
            # Indicarle a pyDAL cuál es la clave principal.
            Field("id_bienes", type="id"),
            Field("identificacion", type="integer"),
            Field("tipo"),
            Field("serie"),
            Field("marca"),
            Field("codigo", type="integer"),
            Field("nombre"),
            Field("ficha_formacion", type="integer"),
            # Desactivar migraciones.
            migrate=False)
        # Vincular a una tabla 2 preexistente.
        self.db.define_table(
            "controles",
            # Indicarle a pyDAL cuál es la clave principal.
            Field("id_control", type="id"),
            Field("identificacion", type="integer"),
            Field("fechahe", type="datetime"),
            Field("fechahs", type="datetime"),
            Field("control"),
            # Desactivar migraciones.
            migrate=False)
        # Vincular a una tabla 3 preexistente.
        self.db.define_table(
            "personal",
            # Indicarle a pyDAL cuál es la clave principal.
            Field("id_personal", type="id"),
            Field("nombre"),
            Field("identificacion", type="integer"),
            Field("telefono", type="integer"),
            Field("correo"),
            Field("ficha_formacion", type="integer"),
            Field("rol"),
            # Desactivar migraciones.
            migrate=False)

        # Vincular a una table preexistente.
        self.db.define_table(
            "usuario",
            # Indicarle a pyDAL cuál es la clave principal.
            Field("id_usuario", type="id"),
            Field("user"),
            Field("clave"),
            Field("rol"),
            # Desactivar migraciones.
            migrate=False)
        """ Tipos de datos

        string          text        blob            boolean
        integer         double      decimal(n, m)   date
        time            datetime    password        upload
        reference <tabla>           list:string     list:integer
        list:reference <tabla>      json            bigint
        big-id          big-reference
        """

    def query(self, sql):
        try:
            self.db.executesql(sql)
            self.db.commit()
            return True
        except:
            return False

    def getData(self, sql):
        q = self.db.executesql(sql)
        self.db.commit()
        return q
Ejemplo n.º 24
0
class FileListComparator(object):
    def __init__(self):
        self.tempdb = None

    def trace(self, event, msg):
        logging.info('%s, %s' % (event, msg))

    def prepareTempDatabase(self, tdbname):
        self.trace('prepare-temp-db-start', tdbname)
        if tdbname == 'sqlite:memory:':
            self.tempdb = DAL(tdbname)
            for tableName in ['table1', 'table2', 'table1_unique', 'table2_unique']:
                self.tempdb.define_table(tableName,
                                         Field('fname', type='string', length=256),
                                         Field('fsize', type='integer'),
                                         Field('md5_hash', type='string', length=32),
                                         Field('wtime', type='text'),  # TODO(dan.dolbilov): switch to datetime
                                         Field('fo_path', type='string', length=512),
                                         Field('dk_name', type='string', length=256),
                                         Field('sz_md5_name', type='string')
                                         )
        else:
            self.trace('error', 'ERROR: tempdb <> memory is not implemented')
        self.trace('prepare-temp-db-done', tdbname)
        return self.tempdb

    def prepareSnapshotTableFromImage2011(self, tableName, dbname):
        # count rows in dest temp table
        n = self.tempdb(self.tempdb[tableName].id > 0).count()
        dk_name = dbname.replace('\\', '/').split('/')[-1].replace('.sqlite', '')
        self.trace('prepare-table-start',
                   'table=[%s], count1 = %u, src = [%s], dk_name = [%s]' % (tableName, n, dbname, dk_name))

        # connect to source database
        db1 = DAL(dbname, migrate_enabled=False)
        db1.define_table('Files',
                         Field('fileId', type='integer'),
                         Field('foId', type='integer'),
                         Field('fname', type='text'),
                         Field('fsize', type='integer'),
                         Field('ctime', type='text'),
                         Field('wtime', type='text'),
                         primarykey=['fileId']
                         )
        db1.define_table('FilesMD5',
                         Field('fileId', type='integer'),
                         Field('md5', type='text'),
                         Field('calcTime', type='text'),
                         primarykey=['fileId']
                         )
        db1.define_table('Folders',
                         Field('foId', type='integer'),
                         Field('path', type='text'),
                         Field('scanTime', type='text'),
                         primarykey=['foId']
                         )
        files_with_md5 = db1((db1.Files.fileId == db1.FilesMD5.fileId) & (db1.Files.foId == db1.Folders.foId))

        # copy data from source database to dest temp table
        for x in files_with_md5.select():
            self.tempdb[tableName].insert(
                fname=x.Files.fname, fsize=x.Files.fsize, md5_hash=x.FilesMD5.md5,
                wtime=x.Files.wtime,  # TODO(dan.dolbilov): switch to datetime
                fo_path=x.Folders.path, dk_name=dk_name,
                sz_md5_name='%i_%s_%s' % (x.Files.fsize, x.FilesMD5.md5, x.Files.fname)
            )
        self.tempdb.commit()

        # count rows in dest temp table
        n = self.tempdb(self.tempdb[tableName].id > 0).count()
        self.trace('prepare-table-done', 'table=[%s], count2 = %u' % (tableName, n))

    def compare(self, dbnames1, dbnames2, t1unique, t2unique):
        self.trace('compare-start', '')
        self.prepareTempDatabase('sqlite:memory:')

        if 0:  # clear dest temp tables
            for tableName in ['table1', 'table2', 'table1_unique', 'table2_unique']:
                self.tempdb(self.tempdb[tableName].id > 0).delete()

        for dbname1 in dbnames1:
            self.prepareSnapshotTableFromImage2011('table1', 'sqlite://' + dbname1)
        for dbname2 in dbnames2:
            self.prepareSnapshotTableFromImage2011('table2', 'sqlite://' + dbname2)
        tdb = self.tempdb

        if t1unique:
            uniqCount = uniqSize = 0
            q2 = tdb()._select(tdb.table2.sz_md5_name)
            for x in tdb(~tdb.table1.sz_md5_name.belongs(q2)).select():
                self.trace('table1-unique',
                           'fname = [%s], fsize = %i, md5 = %s, wtime = [%s], fo_path = [%s], dk_name = [%s]' % (
                               x.fname, x.fsize, x.md5_hash, x.wtime, x.fo_path, x.dk_name))
                uniqCount += 1
                uniqSize += x.fsize
                self.tempdb.table1_unique.insert(fname=x.fname, fsize=x.fsize, md5_hash=x.md5_hash,
                                                 wtime=x.wtime, fo_path=x.fo_path, dk_name=x.dk_name,
                                                 sz_md5_name=x.sz_md5_name)
            self.trace('table1-uniq-counters', 'files = %i, bytes = %i' % (uniqCount, uniqSize))

        if t2unique:
            uniqCount = uniqSize = 0
            q1 = tdb()._select(tdb.table1.sz_md5_name)
            for x in tdb(~tdb.table2.sz_md5_name.belongs(q1)).select():
                self.trace('table2-unique',
                           'fname = [%s], fsize = %i, md5 = %s, wtime = [%s], fo_path = [%s], dk_name = [%s]' % (
                               x.fname, x.fsize, x.md5_hash, x.wtime, x.fo_path, x.dk_name))
                uniqCount += 1
                uniqSize += x.fsize
                self.tempdb.table2_unique.insert(fname=x.fname, fsize=x.fsize, md5_hash=x.md5_hash,
                                                 wtime=x.wtime, fo_path=x.fo_path, dk_name=x.dk_name,
                                                 sz_md5_name=x.sz_md5_name)
            self.trace('table2-uniq-counters', 'files = %i, bytes = %i' % (uniqCount, uniqSize))

        n1 = self.tempdb(self.tempdb['table1_unique'].id > 0).count()
        n2 = self.tempdb(self.tempdb['table2_unique'].id > 0).count()
        self.trace('compare-done', 'unique1 = %i, unique2 = %i' % (n1, n2))
Ejemplo n.º 25
0
class DBConnector:
    def __init__(self):
        self.db = DAL('sqlite://ccaccounts.db')
        self.db.define_table('accounts', Field('name'),
                             Field('account_number', 'integer'),
                             Field('balance', 'integer'),
                             Field('balance_limit', 'integer'))
        self.log = Logger()

    def check_accounts(self, name):
        if self.db(self.db.accounts.name == name).count() == 0:
            self.log.log_message('NAE')
            return False
        else:
            return True

    def add_account(self, name, account_number, balance, balance_limit):
        # check that non zero or negative numbers are being passed
        # in the account number or in the balance limit

        # checks if account name and account number already exist
        # in the DB and returns error if the account exists
        if self.db(self.db.accounts.name == name).count() > 0 or self.db(
                self.db.accounts.account_number == account_number).count() > 0:
            self.log.log_message('AAE')
        else:
            # inserts the row into the DB with the balance limit as the initial balance.
            self.db.accounts.insert(name=name,
                                    account_number=account_number,
                                    balance=balance,
                                    balance_limit=balance_limit)
            self.db.commit()

    def charge_account(self, name, amount):
        # find row that matches the name given to do a check on balance limits
        for row in self.db(self.db.accounts.name == name).select(
                self.db.accounts.balance, self.db.accounts.balance_limit):
            account_limit = row.balance_limit
            balance = row.balance

        if 'balance' in locals():
            # get the sum of the balance and amount charged
            new_balance = int(amount) + int(balance)
            # make sure the balance will not exceed the balance limit.
            if new_balance > account_limit:
                self.log.log_message('SAL')
            else:
                self.db(self.db.accounts.name == name).update(
                    balance=new_balance)
                self.db.commit()
        else:
            self.log.log_message('NBRCH')
            pass

    def credit_account(self, name, amount):
        # find row that matches the name given to do a check on balance limits
        for row in self.db(self.db.accounts.name == name).select(
                self.db.accounts.balance, self.db.accounts.balance_limit):
            balance = row.balance
        if 'balance' in locals():
            # get the difference of the balance and amount credited
            new_balance = int(balance) - int(amount)
            self.db(self.db.accounts.name == name).update(balance=new_balance)
            self.db.commit()
        else:
            self.log.log_message('NBRCR')
            pass

    def account_balances(self):
        if __name__ == '__main__':
            for row in self.db().select(self.db.accounts.name,
                                        self.db.accounts.account_number,
                                        self.db.accounts.balance,
                                        orderby=self.db.accounts.name):
                name = row.name
                acct_number = row.account_number
                balance = row.balance
                print "{}, {}: {}".format(name, acct_number, balance)
Ejemplo n.º 26
0
#     db.define_table(
#         'student',
#         Field('name'),
#         Field('age')
#     )
#     db.student.insert(
#         name="小云",
#         age="20"
#     )
#     db.commit()
#     all_students = db(db.student.id > 0).select()
#     print(all_students)
#     pass

if __name__ == '__main__':
    # 连接数据库,默认将使用SQLite数据库引擎,将
    # 生成 dummy.db 文件,如果要修改保存的文件
    # 名为 data.db,写法如:DAL("sqlite://data.db")
    db = DAL()
    # 定义名为 student 的表,pyDAL会自动生成
    # 自增 id 列
    db.define_table('student', Field('name'), Field('age'))
    # 向 student 表中插入一条数据
    db.student.insert(name="小云", age="20")
    # 将数据保存
    db.commit()
    # 从 student 表中查询出所有数据
    all_students = db(db.student.id > 0).select()
    print(all_students)
    pass
Ejemplo n.º 27
0
class DataBase:
    def __init__(self,
                 username,
                 password,
                 host='localhost',
                 dbname='postgres',
                 port=5432,
                 pool_size=5):
        self.schema = 'soad'
        self.username = username
        self.host = host
        self.port = port
        self.dbname = dbname
        self.folder = 'Resources' + os.sep + 'database'
        os.environ["PGPASSWORD"] = password
        self.password = password
        self.dbinfo = 'postgres://' + username + ':' + password + '@' + host + ':' + str(
            port) + '/' + self.dbname
        self.db = DAL(self.dbinfo,
                      folder=self.folder,
                      pool_size=pool_size,
                      migrate=False,
                      attempts=1)
        self.connection = None
        self.threadpool = QThreadPool()

    def busca_registro(self,
                       nome_tabela,
                       coluna,
                       valor='',
                       operador='=',
                       filtro=None):

        if filtro == '' or filtro is None:
            filtro = '1=1'

        sql = "select * from " + self.schema + ".fnc_buscar_registro(" \
              + "p_tabela=>" + "'" + nome_tabela + "'" \
              + ", p_coluna=>" + "'" + coluna + "'" \
              + ", p_valor=>" + "'" + valor + "'" \
              + ", p_operador=>" + "'" + operador + "'" \
              + ", p_filtro=>" + "'" + filtro + "'" \
              + ");"

        return self.execute_sql(sql)

    def get_registro(self, fnc, campo, valor):

        sql = "select * from " + self.schema + "." + fnc + "(" \
              + "p_" + campo + "=>" + "'" + str(valor) + "'" \
              + ");"

        return self.execute_sql(sql)

    def call_procedure(self, schema='soad', params=None):
        if not params:
            return
        # Remove parametros vazios
        vazio = []
        for param in params["params"].items():
            if param[1] == '':
                vazio.append(param[0])

        logging.info('[DataBase] Parâmetros vazios: ' + str(vazio))

        for i in range(len(vazio)):
            del params["params"][vazio[i]]

        params = json.dumps(params, ensure_ascii=False)
        sql = "select * from " + schema + ".fnc_chamada_de_metodo(" \
              + "p_json_params=>" + "'" + params + "'" \
            + ");"

        return self.execute_sql(sql)

    def execute_sql(self, sql, as_dict=True):
        retorno = list()
        try:
            retorno = self.db.executesql(query=sql, as_dict=as_dict)
            self.db.commit()
            logging.debug('[DataBase] status=' + str(True))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] retorno=' + str(retorno))
            prc = True, retorno, str(self.db._lastsql)

        except Exception as e:
            self.db.rollback()
            logging.debug('[DataBase] status=' + str(False))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] exception=' + str(e))
            retorno.append(e)
            prc = False, retorno, str(sql)

        except:
            e = 'Exceção não tratada'
            logging.debug('[DataBase] status=' + str(False))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] exception2=' + str(e))
            retorno.append(e)
            prc = False, e, str(sql)

        return prc

    def __conectar_banco__(self, progress_callback):
        try:
            self.connection = self.db.__call__()
            #progress_callback.emit(100)
        except Exception as e:
            logging.debug('[DataBase] ' + str(e))
            os.environ["PGPASSWORD"] = ''
            pass
            #progress_callback.emit(0)
        return self

    def definir_schema(self, schema):
        self.schema = schema
        self.execute_sql("SET search_path TO " + self.schema, as_dict=False)

    def fechar_conexao(self):
        self.db.close()

    def progress_fn(self, n):
        print("%d%% done" % n)

    def retorno_conexao(self, s):
        self.connection = s

    def thread_complete(self):
        logging.debug('[DataBase] Thread Completed')

    def abrir_conexao(self):
        # Pass the function to execute
        worker = Worker(
            self.db.__call__
        )  # Any other args, kwargs are passed to the run function
        worker.signals.result.connect(self.retorno_conexao)
        worker.signals.finished.connect(self.thread_complete)
        #worker.signals.progress.connect(self.progress_fn)

        # Execute
        self.threadpool.start(worker)
Ejemplo n.º 28
0
db.define_table('persons',
	Field('name'),
	Field('age')
)

amy = db.persons.insert( name='Amy', age=52 )
bob = db.persons.insert( name='Bob', age=48 )
cat = db.persons.insert( name='Cat', age=23 )
dan = db.persons.insert( name='Dan', age=17 )
edd = db.persons.insert( name='Edd', age=77 )
fan = db.persons.insert( name='Fan', age=65 )
gin = db.persons.insert( name='Gin', age=27 )
hil = db.persons.insert( name='Hil', age=30 )
iri = db.persons.insert( name='Iri', age=62 )
jac = db.persons.insert( name='Jac', age=18 )
db.commit()

# Export the 'persons' database
with open( 'persons.csv', 'wb' ) as f:
	f.write( str(db(db.persons.id).select()) )

# Export only the young persons
with open( 'young-people.csv', 'wb') as f:
	people = db( db.persons.age <= 30 ).select()
	f.write( str( people ) )


# Let's make another table and export mutliple tables
db.define_table('point2d',
	Field('x',type='double'),
	Field('y',type='double')