def perform_ponyorm_benchmark(database, conn_str, args, benchmark_result):
    host, user, password, db = get_metadata_from_conn_str(conn_str)
    db = Database(database, host=host, user=user, passwd=password, db=db)

    class Person(db.Entity):
        name = Required(unicode)
        addresses = Set("Address")

    class Address(db.Entity):
        address = Required(unicode)
        person = Required(Person)

    db.generate_mapping(create_tables=True)

    test_data = test_data_from_args(args)
    assert test_data

    if 'ponyorm' not in benchmark_result:
        benchmark_result['ponyorm'] = dict()
    if database not in benchmark_result['ponyorm']:
        benchmark_result['ponyorm'][database] = dict()
    test_aspects = ['insert', 'read', 'update', 'delete']
    __builtin__.__dict__.update(locals())
    timeit_funcs = [
        '_{0}_{1}_data(test_data, Person, Address, db)'.format(
            'ponyorm', test_aspect
        )
        for test_aspect in test_aspects
    ]
    for index, tf in enumerate(timeit_funcs):
        rst = timeit.timeit(tf, number=args.num_repeats)
        benchmark_result['ponyorm'][database][test_aspects[index]] = rst
Пример #2
0
def _create_db_schemata(database_file, *schemata):
    from pony.orm import Database
    db = Database('sqlite', database_file)
    models = {}
    for schema in schemata:
        # Build db.Entity objects for the side effect of registering them
        # with the ORM
        models[schema] = type(schema.__name__, (db.Entity,),
                              _pony_schema_from_sheets_schema(schema))
    db.generate_mapping(create_tables=True)
    return db, models
Пример #3
0
    def db(self):
        '''
        :return: The pony orm db instance without db provider binding
        '''
        if self.__db:
            return self.__db

        self.__db = Database()
        return self.__db
"""
@author: Matheus Souza
"""

from pony.orm import Database, PrimaryKey, Optional, db_session, commit ,CommitException

db = Database(
    'mysql',
    host = 'localhost',
    user = '******',
    password = '',
    database = 'redes_sociais_tt'
)

class Tweet(db.Entity):
    id = PrimaryKey(int, auto = True)
    #text = Required(str)
    text = Optional(str)
    hashtag = Optional(int)
    
db.generate_mapping()

@db_session
def getAllTweets():
    return Tweet.select()

@db_session
def save(t):
    try:
        _ = Tweet(text=t)
        commit()
Пример #5
0
    def __init__(self,
                 provider=None,
                 user=None,
                 password=None,
                 host=None,
                 port=None,
                 database=None,
                 filename=None,
                 begin=None,
                 end=None,
                 order=None,
                 schema=None,
                 validate=None,
                 version="meta",
                 model=None,
                 meta=None):
        self.provider = provider or self.provider
        self.filename = filename or self.filename
        self.user = user or self.user
        self.password = password or self.password
        self.host = host or self.host
        self.port = port or self.port
        self.database = database or self.database
        self.schema = schema or self.schema
        self.begin = begin or self.begin
        self.end = end or self.end
        self.order = order or self.order
        self.model = model or self.model
        self.meta = meta or self.meta

        if self.provider == 'sqlite':
            if not self.filename.startswith('/'):
                self.filename = os.getcwd() + '/' + self.filename

            self.database = Database(provider=self.provider,
                                     filename=self.filename,
                                     create_db=True)
        elif self.provider == 'mysql':
            self.database = Database(provider=self.provider,
                                     user=self.user,
                                     password=self.password,
                                     host=self.host,
                                     port=self.port,
                                     database=self.database,
                                     charset="utf8mb4")
        elif self.provider == 'postgres':
            self.database = Database(
                provider=self.provider,
                user=self.user,
                password=self.password,
                host=self.host,
                port=self.port,
                database=self.database,
            )
        else:
            raise StoreException(f'provider {provider} not supported')

        self.tablename = self.__class__.__name__

        if not self.model:
            self.model = dict(
                id=PrimaryKey(int, auto=True),
                create=Required(datetime,
                                sql_default='CURRENT_TIMESTAMP',
                                default=lambda: datetime.utcnow()),
                update=Required(datetime,
                                sql_default='CURRENT_TIMESTAMP',
                                default=lambda: datetime.utcnow()),
                key=Required(str, index=True, unique=True),
                data=Required(Json, volatile=True, default={}),
                meta=Required(Json, volatile=True, default={}))

        self.store = type(self.tablename, (self.database.Entity, ), self.model)
        self.database.generate_mapping(create_tables=True, check_tables=True)
Пример #6
0
from pony.orm import Database

db = Database()
db.bind('sqlite', 'app.db')

Пример #7
0
 def get_database():
     if not DataBase.instance:
         DataBase.instance = Database()
     return DataBase.instance
Пример #8
0
from dotenv import load_dotenv
from pony.orm import Database


BOT_NAME = "olx"

SPIDER_MODULES = ["olx.spiders"]
NEWSPIDER_MODULE = "olx.spiders"

# Obey robots.txt rules
ROBOTSTXT_OBEY = False

if os.path.exists(".env"):
    load_dotenv(".env")

db = Database()

DB_CONFIG = {
    "host": os.environ["DB_HOST"],
    "port": os.environ["DB_PORT"],
    "user": os.environ["DB_USER"],
    "pwd": os.environ["DB_PWD"],
}

DOWNLOADER_MIDDLEWARES = {
    "scrapy.downloadermiddlewares.useragent.UserAgentMiddleware": None,
    "scrapy_useragents.downloadermiddlewares.useragents.UserAgentsMiddleware": 500,
}

USER_AGENTS = [
    (
Пример #9
0
class BandwidthDatabase:
    """
    Simple database that stores bandwidth transactions in Tribler as a work graph.
    """
    CURRENT_DB_VERSION = 9
    MAX_HISTORY_ITEMS = 100  # The maximum number of history items to store.

    def __init__(self,
                 db_path: Union[Path, type(MEMORY_DB)],
                 my_pub_key: bytes,
                 store_all_transactions: bool = False) -> None:
        """
        Sets up the persistence layer ready for use.
        :param db_path: The full path of the database.
        :param my_pub_key: The public key of the user operating the database.
        :param store_all_transactions: Whether we store all pairwise transactions in the database. This is disabled by
        default and used for data collection purposes.
        """
        self.my_pub_key = my_pub_key
        self.store_all_transactions = store_all_transactions

        self.database = Database()
        # This attribute is internally called by Pony on startup, though pylint cannot detect it
        # with the static analysis.
        # pylint: disable=unused-variable

        @self.database.on_connect(provider='sqlite')
        def sqlite_sync_pragmas(_, connection):
            cursor = connection.cursor()
            cursor.execute("PRAGMA journal_mode = WAL")
            cursor.execute("PRAGMA synchronous = 1")
            cursor.execute("PRAGMA temp_store = 2")
            # pylint: enable=unused-variable

        self.MiscData = misc.define_binding(self.database)
        self.BandwidthTransaction = db_transaction.define_binding(self)
        self.BandwidthHistory = history.define_binding(self)

        if db_path is MEMORY_DB:
            create_db = True
            db_path_string = ":memory:"
        else:
            create_db = not db_path.is_file()
            db_path_string = str(db_path)

        self.database.bind(provider='sqlite',
                           filename=db_path_string,
                           create_db=create_db,
                           timeout=120.0)
        self.database.generate_mapping(create_tables=create_db)

        if create_db:
            with db_session:
                self.MiscData(name="db_version",
                              value=str(self.CURRENT_DB_VERSION))

    def has_transaction(self, transaction: BandwidthTransactionData) -> bool:
        """
        Return whether a transaction is persisted to the database.
        :param transaction: The transaction to check.
        :return: A boolean value, indicating whether we have the transaction in the database or not.
        """
        return self.BandwidthTransaction.exists(
            public_key_a=transaction.public_key_a,
            public_key_b=transaction.public_key_b,
            sequence_number=transaction.sequence_number)

    @db_session
    def get_my_latest_transactions(
            self,
            limit: Optional[int] = None) -> List[BandwidthTransactionData]:
        """
        Return all latest transactions involving you.
        :param limit: An optional integer, to limit the number of results returned. Pass None to get all results.
        :return A list containing all latest transactions involving you.
        """
        results = []
        db_txs = select(tx for tx in self.BandwidthTransaction
                        if tx.public_key_a == self.my_pub_key or tx.public_key_b == self.my_pub_key)\
            .limit(limit)
        for db_tx in db_txs:
            results.append(BandwidthTransactionData.from_db(db_tx))
        return results

    @db_session
    def get_latest_transaction(
            self, public_key_a: bytes,
            public_key_b: bytes) -> BandwidthTransactionData:
        """
        Return the latest transaction between two parties, or None if no such transaction exists.
        :param public_key_a: The public key of the party transferring the bandwidth.
        :param public_key_b: The public key of the party receiving the bandwidth.
        :return The latest transaction between the two specified parties, or None if no such transaction exists.
        """
        db_obj = self.BandwidthTransaction.get(public_key_a=public_key_a,
                                               public_key_b=public_key_b)
        return BandwidthTransactionData.from_db(db_obj) if db_obj else None

    @db_session
    def get_latest_transactions(
            self,
            public_key: bytes,
            limit: Optional[int] = 100) -> List[BandwidthTransactionData]:
        """
        Return the latest transactions of a given public key, or an empty list if no transactions exist.
        :param public_key: The public key of the party transferring the bandwidth.
        :param limit: The number of transactions to return. (Default: 100)
        :return The latest transactions of the specified public key, or an empty list if no transactions exist.
        """
        db_txs = select(tx for tx in self.BandwidthTransaction
                        if public_key in (tx.public_key_a, tx.public_key_b))\
            .limit(limit)
        return [BandwidthTransactionData.from_db(db_txn) for db_txn in db_txs]

    @db_session
    def get_total_taken(self, public_key: bytes) -> int:
        """
        Return the total amount of bandwidth taken by a given party.
        :param public_key: The public key of the peer of which we want to determine the total taken.
        :return The total amount of bandwidth taken by the specified peer, in bytes.
        """
        return sum(transaction.amount
                   for transaction in self.BandwidthTransaction
                   if transaction.public_key_a == public_key)

    @db_session
    def get_total_given(self, public_key: bytes) -> int:
        """
        Return the total amount of bandwidth given by a given party.
        :param public_key: The public key of the peer of which we want to determine the total given.
        :return The total amount of bandwidth given by the specified peer, in bytes.
        """
        return sum(transaction.amount
                   for transaction in self.BandwidthTransaction
                   if transaction.public_key_b == public_key)

    @db_session
    def get_balance(self, public_key: bytes) -> int:
        """
        Return the bandwidth balance (total given - total taken) of a specific peer.
        :param public_key: The public key of the peer of which we want to determine the balance.
        :return The bandwidth balance the specified peer, in bytes.
        """
        return self.get_total_given(public_key) - self.get_total_taken(
            public_key)

    def get_my_balance(self) -> int:
        """
        Return your bandwidth balance, which is the total amount given minus the total amount taken.
        :return Your bandwidth balance, in bytes.
        """
        return self.get_balance(self.my_pub_key)

    @db_session
    def get_num_peers_helped(self, public_key: bytes) -> int:
        """
        Return the number of unique peers that a peer with the provided public key has helped.
        :param public_key: The public key of the peer of which we want to determine this number.
        :return The unique number of peers helped by the specified peer.
        """
        result = list(
            select(
                count(g.public_key_b) for g in self.BandwidthTransaction
                if g.public_key_a == public_key))
        return result[0]

    @db_session
    def get_num_peers_helped_by(self, public_key: bytes) -> int:
        """
        Return the number of unique peers that a peer with the provided public key has been helped by.
        :param public_key: The public key of the peer of which we want to determine this number.
        :return The unique number of peers that helped the specified peer.
        """
        result = list(
            select(
                count(g.public_key_a) for g in self.BandwidthTransaction
                if g.public_key_b == public_key))
        return result[0]

    @db_session
    def get_history(self) -> List:
        """
        Get the history of your bandwidth balance as an ordered list.
        :return A list. Each item in this list contains a timestamp and a balance.
        """
        history = []
        for history_item in self.BandwidthHistory.select().order_by(
                self.BandwidthHistory.timestamp):
            history.append({
                "timestamp": history_item.timestamp,
                "balance": history_item.balance
            })

        return history

    def shutdown(self) -> None:
        """
        Shutdown the database.
        """
        self.database.disconnect()
Пример #10
0
class MakeMigrations:
    def __init__(
            self,
            filename: Union[str, Path],  # chemin vers la ddb
            actual_version: Union[
                Version, str] = None,  # version actuelle (dans les sources)
            migrations: dict = None,  # pool de migrations
    ):
        # migrations = migrations
        self.actual_version = (actual_version if isinstance(
            actual_version, Version) else Version(actual_version))

        self.old_file = Path(filename)  # ddb à faire migrer

        # création d'une base temporaire pour effectuer les migrations
        tmp = tempfile.NamedTemporaryFile(suffix=".sqlite", delete=False)
        tmp.close()
        self.tmp_file = Path(tmp.name)
        shutil.copy(self.old_file, self.tmp_file)  # duplication de la DDB

        # outils pour migrations
        self.tmp_db = Database(provider="sqlite", filename=tmp.name)
        self.schema = Schema(file=self.tmp_db)
        if self.schema.version == self.actual_version:
            logger.info(f"version {self.actual_version}: No migration needed")
            return
        self.migrator = Migrator(self.tmp_db, self.actual_version, migrations)
        logger.info(
            f"starting migrations from version {self.schema.version} to {self.actual_version}"
        )

    def make_migrations(self):
        self.migrator()

    def check_migrations(self, check_cb: Callable) -> bool:
        """
        Check migration with cb.
        Test are done on another Database
        :return: True if success
        """
        if not check_cb:
            return True
        logger.info("Checking migrations...")
        f = tempfile.NamedTemporaryFile(delete=False)
        f.close()
        shutil.copy(self.tmp_file, f.name)
        check_db = Database(provider="sqlite", filename=f.name)
        res = check_cb(check_db)
        check_db.disconnect()  # sinon unlink fail on windows
        os.unlink(f.name)
        return res

    def generate_new_mapping(self, generate_cb: Callable):
        """
        Generate new mapping using pony models, and apply some more migrations.
        On compte sur exceptions en cas d'erreur
        """
        if not generate_cb:
            return
        logger.info("Generating new mapping...")
        generate_cb(self.tmp_db)
        self.tmp_db.generate_mapping(create_tables=True)
        self.tmp_db.disconnect()

    def _backup_name(self):

        old_schema = Schema(file=self.old_file)
        backup_name = (
            f"mycartable_backup-from_{old_schema.version}"
            f"-to_{self.actual_version}-{datetime.now().isoformat().replace(':','_')}"
        )
        return backup_name

    def backup_old(self):
        backup_file = self.old_file.parent / self._backup_name()
        shutil.copy(self.old_file, backup_file)
        return backup_file

    def move_tmp_to_old(self):
        self.tmp_file.replace(self.old_file)

    def restore_backup(self, backup_file: Path):
        fail_name = backup_file.name.replace("backup", "failed_migrate")
        if self.old_file.is_file():
            self.old_file.replace(self.old_file.parent / fail_name)
        backup_file.replace(self.old_file)

    def __call__(self,
                 check_cb: Callable = None,
                 generate_cb: Callable = None) -> bool:
        """
        check_cb: voir check_migrations
        generate_db: voir generate_new_mapping
        - on fait la sauvegarde
        - réalisation les migrations sur la base temporaire
        - on vérifie que les données sont compatible avec les schema des sources
        - on remplace l'ancienne par la nouvelle

        :return: True sinon False

        """
        if not hasattr(self, "migrator"):
            # no mirgator == same version, no migration do nothing, succeed
            return True

        backup_file = self.backup_old()

        try:
            self.make_migrations()
            self.generate_new_mapping(generate_cb)
            self.check_migrations(check_cb)
            self.move_tmp_to_old()
        except Exception as err:
            logger.exception(err)
            self.restore_backup(backup_file)
            return False
        return True
Пример #11
0
#!/usr/bin/env python
# coding=utf-8
# [email protected]
from datetime import datetime
from settings import SETTINGS
from pony.orm import Database, Required, Optional, PrimaryKey

db = Database()
db.bind('sqlite', SETTINGS['sqlite_file'], create_db=True)

class User(db.Entity):
    uuid = PrimaryKey(str, 36)
    wechat_uuid = Optional(str, 36)
    is_valid = Required(int, size=8)
    created_at = Required(datetime, sql_default='CURRENT_TIMESTAMP')


db.generate_mapping(create_tables=True)
Пример #12
0
class DatabaseConfig:

    db = Database(provider='sqlite', filename='new.db', create_db=True)
Пример #13
0
# coding: utf-8
#
# Copyright 2017 Kirill Vercetti
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from decimal import Decimal
from pony.orm import Database, Required, Optional, Set, PrimaryKey, buffer
db = Database()

class Customer(db.Entity):
    email = Required(str, unique=True)
    password = Required(str)
    name = Required(str)
    country = Optional(str)
    address = Optional(str)
    cart_items = Set('CartItem')
    orders = Set('Order')

class Product(db.Entity):
Пример #14
0
from pony.orm import Database, Required, Optional, Set, PrimaryKey, LongStr
from pony.orm import ObjectNotFound, DatabaseError
from pony.orm import buffer
from pony.orm import min, avg, sum, count, exists
from pony.orm import db_session
from urllib.parse import urlparse, parse_qsl
from uuid import UUID, uuid4

SCHEMA_VERSION = "20200607"


def now():
    return datetime.now().replace(microsecond=0)


metadb = Database()


class Meta(metadb.Entity):
    _table_ = "meta"
    key = PrimaryKey(str, 32)
    value = Required(str, 256)


db = Database()


@db.on_connect(provider="sqlite")
def sqlite_case_insensitive_like(db, connection):
    cursor = connection.cursor()
    cursor.execute("PRAGMA case_sensitive_like = OFF")
Пример #15
0
# coding: utf-8
from datetime import datetime

from pony.orm import Database, Required

db = Database()
db.bind('sqlite', 'carry_brick.db', create_db=True)


class TradeRecord(db.Entity):

    create_time = Required(datetime)
    before_cny = Required(str, 20)
    before_ltc = Required(str, 20)
    before_btc = Required(str, 20)
    current_cny = Required(str, 20)
    current_ltc = Required(str, 20)
    current_btc = Required(str, 20)
    profit = Required(str, 20)
    except_profit = Required(str, 20)
    sell_price = Required(str, 20)
    sell_count = Required(str, 20)
    sell_coin = Required(str, 20)
    sell_platform = Required(str, 20)
    buy_price = Required(str, 20)
    buy_count = Required(str, 20)
    buy_coin = Required(str, 20)
    buy_platform = Required(str, 20)


class Record(db.Entity):
Пример #16
0
#!/usr/bin/env python
from pony.orm import Database

db = Database()

from .post import Post

db.bind('sqlite', '../db.sqlite', create_db=True)
db.generate_mapping(create_tables=True)

__all__ = ('Post',)
Пример #17
0
def setup_db(tmp_path: Path):
    tmpfile = tmp_path / 'tmp.db'

    db = Database(provider='sqlite', filename=str(tmpfile), create_db=True)
    init_orm(db)
    return db, tmpfile
Пример #18
0
import sys
from datetime import (date, datetime)
from pony.orm import (Database, Required, Optional, Set, db_session, sql_debug, show, select, commit)

db = Database()

sql_debug(True)

class Task(db.Entity):
	
	task = Required(str)
	date_created = Required(date)
	date_of_completion = Optional(date)

db.bind('sqlite', filename='Jara.sqlite',create_db=True)
db.generate_mapping(create_tables=True)


def main():
    
    actions = {
    '1': action_find_all,
    '2': action_find_by_pk,
    '3': action_find_by_created,
    '4': action_find_by_date_of_completion,
    '5': action_add_task,
    '6': action_update_task,
    '7': action_add_date_of_completion,
    '8': action_add_date_of_completion,
    'm': action_show_menu,
    'q': action_exit
Пример #19
0
from pony.orm import Database, Required, Json
from bot_example_16.settings import DB_CONFIG

db = Database()
db.bind(**DB_CONFIG)


class UserState(db.Entity):
    """Состояние пользователя внутри сценария"""
    user_id = Required(str, unique=True)
    scenario_name = Required(str)
    step_name = Required(str)
    context = Required(Json)


class Registration(db.Entity):
    """Заявка на регистрацию"""
    name = Required(str)
    email = Required(str)


db.generate_mapping(create_tables=True)

DB_CONFIG = dict(provider='postgres',
                 user='******',
                 password='******',
                 host='localhost',
                 database='vk_chat_bot')
Пример #20
0
    def __init__(self,
                 user=None,
                 password=None,
                 host=None,
                 database=None,
                 port=5432,
                 workpath='/tmp'):
        """
        load all schemas from db with compatible version

        :param user: if None then used from config
        :param password: if None then used from config
        :param host: if None then used from config
        :param database: if None then used from config
        :param port: if None then used from config
        """
        if user is None or password is None or host is None or database is None or port is None or workpath is None:
            try:
                from config import DB_PASS, DB_HOST, DB_USER, DB_NAME, DB_PORT, WORKPATH
            except ImportError:
                raise ImportError('install config.py correctly')

        if user is None:
            user = DB_USER
        if password is None:
            password = DB_PASS
        if host is None:
            host = DB_HOST
        if database is None:
            database = DB_NAME
        if port is None:
            port = DB_PORT
        if workpath is None:
            workpath = WORKPATH

        db_config = Database()
        LazyEntityMeta.attach(db_config, database='CGRdb_config')
        db_config.bind('postgres',
                       user=user,
                       password=password,
                       host=host,
                       database=database,
                       port=port)
        db_config.generate_mapping()

        self.__schemas = {}

        with db_session:
            config = db_config.Config.select(
                lambda x: x.version == major_version)[:]

        for c in config:
            db = Database()
            LazyEntityMeta.attach(db, c.name, 'CGRdb')

            db.Molecule._fragmentor_workpath = db.Reaction._fragmentor_workpath = workpath
            for k, v in c.config.get('molecule', {}).items():
                setattr(db.Molecule, f'_{k}', v)
            for k, v in c.config.get('reaction', {}).items():
                setattr(db.Reaction, f'_{k}', v)

            db.bind('postgres',
                    user=user,
                    password=password,
                    host=host,
                    database=database,
                    port=port)
            db.generate_mapping()
            self.__schemas[c.name] = db
Пример #21
0
 def __init__(self, db_path):
     # parent constructor
     PonyDatabase.__init__(self)
     self.db_path = db_path
Пример #22
0
#!/usr/bin/env python
from datetime import datetime

from redis import Redis

from pony.orm import (Database, db_session, Required, Optional, PrimaryKey,
                      composite_key, Set, select)

db = Database()
# db.bind(provider='sqlite', filename=':memory:')
db.bind(provider='sqlite', filename='test.sqlite3', create_db=True)

r = Redis(charset="utf-8", decode_responses=True, db=5)
# r = Redis(charset="utf-8", decode_responses=True ,db=redisDbNumber, unix_socket_path=socketPath)


class User(db.Entity):
    name = PrimaryKey(str)
    password_hash = Required(str)
    albums = Set('Album')


class Album(db.Entity):
    album_id = PrimaryKey(str)
    band_name = Required(str)
    album_name = Required(str)
    limit = Required(int)
    # album_text = Required(str) # -> text-file
    user = Required('User')
    codes = Set('Code')
Пример #23
0
# -*- coding: utf-8 -*-
import CGRdb
from LazyPony import LazyEntityMeta
from os.path import abspath
from pony.orm import Database
from sys import path

parent = abspath('..')
if parent not in path:
    path.insert(0, parent)
LazyEntityMeta.attach(Database(), database='CGRdb')

author = 'Dr. Ramil Nugmanov'
copyright = '2017-2020, Dr. Ramil Nugmanov <*****@*****.**>'
version = '4.0'
project = 'CGRdb'

needs_sphinx = '1.8'
extensions = [
    'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'm2r', 'nbsphinx'
]

nbsphinx_kernel_name = 'python3'

exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '**.ipynb_checkpoints']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'

language = None
pygments_style = 'flasky'
Пример #24
0
from pony.orm import Database, Required, Optional, db_session
from datetime import datetime

application = Flask(__name__)
config = ConfigObj('bot.conf')
application.config.update(
    dict(DEBUG=False,
         PONY={
             "provider": "postgres",
             "host": config.get("host", ""),
             "user": config.get("user", ""),
             "password": config.get("password", ""),
             "dbname": config.get("database", "")
         }))

db = Database()
db.bind(**application.config['PONY'])

from bot.models import Stats
db.generate_mapping(create_tables=True)

Pony(application)

if config:
    user = u.User(config.get('host', ''), config.get('database', ''),
                  config.get('user', ''), config.get('password', ''))
    osmbot = OsmBot(config)
    telegram_api = TBot(config['token'])

api = OsmApi()
nom = pynominatim.Nominatim()
Пример #25
0
import logging
import os

from pony.orm import Database

# create database object and establish connection
database: Database = Database()
database.bind(
    provider="mysql",
    host=os.getenv("PAPERBOT.DATABASE.HOST"),
    user=os.getenv("PAPERBOT.DATABASE.USER"),
    passwd=os.getenv("PAPERBOT.DATABASE.PASSWD"),
    db=os.getenv("PAPERBOT.DATABASE.DB"),
    charset="utf8mb4",
)

# create logs folder
if not os.path.exists("logs/"):
    os.mkdir("logs/")

# discord log config
handler = logging.FileHandler(filename="logs/discord.log",
                              encoding="utf-8",
                              mode="w")
handler.setFormatter(
    logging.Formatter("%(asctime)s:%(levelname)s:%(name)s: %(message)s"))

logger = logging.getLogger("discord")
logger.addHandler(handler)

# paperbot log config
Пример #26
0
from sqlitedict import SqliteDict
from elasticsearch import Elasticsearch
from redis import StrictRedis
import functools
from pony.orm import Database

import oss2
from . import config

journal_mode = 'WAL'  # Write-Ahead Logging
sdict = functools.partial(SqliteDict,
                          autocommit=True,
                          journal_mode=journal_mode)

arxiv_db = sdict(config.ARIXV_DB_PATH)

es = Elasticsearch(hosts=config.ES_HOSTS)

pdf_db = sdict(config.PDF_DB_PATH)
pdf_thumbnail_db = sdict(config.PDF_THUMBNAIL_DB_PATH)
pdf_text_db = sdict(config.PDF_TEXT_DB_PATH)

aio_db = Database()

oss_arixv_auth = oss2.Auth(config.OSS2_ACCESS_KEY_ID,
                           config.OSS2_ACCESS_KEY_SECRET)
oss_arxiv_bucket = oss2.Bucket(oss_arixv_auth, config.OSS2_ARXIV_ENDPOINT,
                               config.OSS2_ARXIV_BUCKET_NAME)

redis = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
Пример #27
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

from pony.orm import Database
from openapi.config.config import NirvanaConfig

provider, host, user, password, database = NirvanaConfig().postgres_config()
db = Database(provider=provider,
              user=user,
              password=password,
              host=host,
              database=database)
Пример #28
0
from decimal import Decimal
from pony.orm import Database, Required, Set, PrimaryKey, sql_debug
from public_transport_analyser.database.db_details import dbhost, dbusername, dbpassword, dbname

# What is up with the scoping of this..
#db = Database("sqlite", "database.sqlite", create_db=False)
#db = Database('postgres', user=dbusername, password=dbpassword, host='localhost', database=dbname)
db = Database('mysql', host=dbhost, user=dbusername, password=dbpassword, database=dbname)


def init():
    sql_debug(False)
    db.generate_mapping(check_tables=True, create_tables=True)


def create():
    db.generate_mapping(create_tables=True)


class Origin(db.Entity):
    location = PrimaryKey(str)
    destinations = Set("Destination")


class Destination(db.Entity):
    """
    This class maps back only one origin - yet, you could feasibly have
    more than one origin pointing to the same destination (implying a
    many to many relationship). However, at the moment, I can't see a
    reason why it would be better do than the current setup, and one
    imagines you would only care about where you start, rather than all
Пример #29
0
from flask import Flask, render_template
from pony.orm import Database
from flask_wtf.csrf import CSRFProtect

app = Flask(__name__)
app.config.from_json('../config.json')

CSRFProtect(app)

db = Database()

from . import entities, views

db.bind(app.config['DB_TYPE'], **app.config['DB_CONFIG'])
db.generate_mapping(create_tables=True)
Пример #30
0
 def __init__(self):
     if not DataBase.instance:
         DataBase.instance = Database()
Пример #31
0
import os
import re
from datetime import datetime

from pony.orm import Database, Required, Optional, PrimaryKey, db_session, select, count

databaseUrl = os.environ.get("DATABASE_URL")
matcher = re.search(
    'postgres://(?P<user>.+):(?P<password>.+)@(?P<host>.+):(?P<port>.+)/(?P<database>.+)',
    databaseUrl)

db = Database()
db.bind(provider='postgres',
        user=matcher.group('user'),
        password=matcher.group('password'),
        host=matcher.group('host'),
        port=matcher.group('port'),
        database=matcher.group('database'))


class MessageEntity(db.Entity):
    _table_ = 'message_entity'
    chat_id = Required(int)
    message_id = Required(int)
    user_id = Required(int)
    date = Optional(datetime)
    text = Optional(str)
    # word_usages = Set(lambda: WordUsage, reverse='message')
    PrimaryKey(chat_id, message_id)

Пример #32
0
class Store(object):
    _safe_attrs = [
        'store', 'database', 'tablename', 'begin', 'end', 'order', 'add',
        'register_attr', 'slice', 'adjust_slice', 'provider', 'query_key',
        'count', 'desc', 'asc', 'query_meta', 'update_meta', 'delete_meta',
        'provider', 'user', 'password', 'host', 'port', 'database', 'filename',
        'schema', 'validate', 'model', 'meta', 'search', 'delete', 'create',
        'update', 'search_multi', 'search_return'
    ]

    provider = 'sqlite'
    user = '******'
    password = '******'
    host = 'localhost'
    port = 5432
    database = 'test'
    filename = 'database.sqlite'
    order = 'desc'
    schema = None
    begin = None
    end = None
    model = None
    meta = {}

    def __init__(self,
                 provider=None,
                 user=None,
                 password=None,
                 host=None,
                 port=None,
                 database=None,
                 filename=None,
                 begin=None,
                 end=None,
                 order=None,
                 schema=None,
                 validate=None,
                 version="meta",
                 model=None,
                 meta=None):
        self.provider = provider or self.provider
        self.filename = filename or self.filename
        self.user = user or self.user
        self.password = password or self.password
        self.host = host or self.host
        self.port = port or self.port
        self.database = database or self.database
        self.schema = schema or self.schema
        self.begin = begin or self.begin
        self.end = end or self.end
        self.order = order or self.order
        self.model = model or self.model
        self.meta = meta or self.meta

        if self.provider == 'sqlite':
            if not self.filename.startswith('/'):
                self.filename = os.getcwd() + '/' + self.filename

            self.database = Database(provider=self.provider,
                                     filename=self.filename,
                                     create_db=True)
        elif self.provider == 'mysql':
            self.database = Database(provider=self.provider,
                                     user=self.user,
                                     password=self.password,
                                     host=self.host,
                                     port=self.port,
                                     database=self.database,
                                     charset="utf8mb4")
        elif self.provider == 'postgres':
            self.database = Database(
                provider=self.provider,
                user=self.user,
                password=self.password,
                host=self.host,
                port=self.port,
                database=self.database,
            )
        else:
            raise StoreException(f'provider {provider} not supported')

        self.tablename = self.__class__.__name__

        if not self.model:
            self.model = dict(
                id=PrimaryKey(int, auto=True),
                create=Required(datetime,
                                sql_default='CURRENT_TIMESTAMP',
                                default=lambda: datetime.utcnow()),
                update=Required(datetime,
                                sql_default='CURRENT_TIMESTAMP',
                                default=lambda: datetime.utcnow()),
                key=Required(str, index=True, unique=True),
                data=Required(Json, volatile=True, default={}),
                meta=Required(Json, volatile=True, default={}))

        self.store = type(self.tablename, (self.database.Entity, ), self.model)
        self.database.generate_mapping(create_tables=True, check_tables=True)

    def slice(self, begin, end):
        self.begin, self.end = begin, end

    def desc(self):
        self.order = 'desc'

    def asc(self):
        self.order = 'asc'

    @staticmethod
    def register_attr(name):
        if isinstance(name, str) and name not in Store._safe_attrs:
            Store._safe_attrs.append(name)

    @db_session(retry=3)
    def __setattr__(self, key, data):
        if key in Store._safe_attrs or key.startswith('_'):
            return super().__setattr__(key, data)

        self.validate(data, meta=self.meta)
        item = select(e for e in self.store if e.key == key).first()
        if item is None:
            self.store(key=key, data=data, meta=self.meta)
        else:
            item.data = data
            item.update = datetime.utcnow()

    @db_session
    def __getattribute__(self, key):
        if key in Store._safe_attrs or key.startswith('_'):
            return object.__getattribute__(self, key)

        elem = select(e for e in self.store if e.key == key).first()
        if elem:
            self.validate(elem.data, meta=elem.meta)
            return StoreMeta(elem, store=self)
        return None

    @db_session
    def count(self, key):
        if isinstance(key, slice):
            raise StoreException('not implemented!')
        elif isinstance(key, tuple):
            key = '.'.join(key)

        # string key
        filters = parse(key)
        elems = select(e for e in self.store)
        if filters:
            elems = elems.filter(filters)
        return elems.count()

    @db_session
    def __getitem__(self, key, for_update=False):
        if isinstance(key, slice):
            raise StoreException('not implemented!')
        elif isinstance(key, tuple):
            key = '.'.join(key)

        # string key
        filters = parse(key)
        elems = select(e for e in self.store)
        if filters:
            elems = elems.filter(filters)
        if self.order == 'desc':
            elems = elems.order_by(lambda o: (desc(o.update), desc(o.id)))
        else:
            elems = elems.order_by(lambda o: (o.update, o.id))
        elems = self.adjust_slice(elems, for_update=for_update)
        for elem in elems:
            self.validate(elem.data, meta=elem.meta, extra=elem.key)
        return StoreMetas(elems, store=self)

    @db_session(retry=3)
    def __setitem__(self, key, data):

        if isinstance(key, slice):
            raise StoreException('not implemented!')
        elif isinstance(key, tuple):
            key = '.'.join(key)

        self.validate(data, meta=self.meta)

        if key.isidentifier():
            if key in Store._safe_attrs or key.startswith('_'):
                return super().__setattr__(key, data)

            item = select(e for e in self.store if e.key == key).first()
            if item is None:
                self.store(key=key, data=data, meta=self.meta)
            else:
                item.data = data
                item.update = datetime.utcnow()
            return

        filters = parse(key)
        elems = select(e for e in self.store)
        if filters:
            elems = elems.filter(filters)
        if self.order_by == 'desc':
            elems = elems.order_by(lambda o: (desc(o.update), desc(o.id)))
        else:
            elems = elems.order_by(lambda o: (o.update, o.id))
        elems = self.adjust_slice(elems, for_update=True)
        if elems:
            now = datetime.utcnow()
            for elem in elems:
                elem.data = data
                elem.update = now
        else:
            # for key like 'Pod_xxx-xxx-xxx
            item = select(e for e in self.store if e.key == key).first()
            if item is None:
                self.store(key=key, data=data, meta=self.meta)
            else:
                item.data = data
                item.update = datetime.utcnow()

    @db_session
    def __delitem__(self, key):
        if isinstance(key, slice):
            raise StoreException('not implemented!')
        elif isinstance(key, tuple):
            key = '.'.join(key)
        filters = parse(key)
        elems = select(e for e in self.store)
        if filters:
            elems = elems.filter(filters)
        if self.order_by == 'desc':
            elems = elems.order_by(lambda o: (desc(o.update), desc(o.id)))
        else:
            elems = elems.order_by(lambda o: (o.update, o.id))
        if elems:
            for elem in elems:
                # self.validate(elem.data, meta=elem.meta)
                elem.delete()
        return

    @db_session
    def __delattr__(self, key):
        delete(e for e in self.store if e.key == key)

    @db_session
    def query_key(self, key, for_update=False):
        elem = None
        if for_update:
            elem = select(e for e in self.store
                          if e.key == key).for_update().first()
        else:
            elem = select(e for e in self.store if e.key == key).first()
        if elem:
            self.validate(elem.data, meta=elem.meta)
            return StoreMeta(elem, store=self)

    @db_session
    def query_meta(self, key, for_update=False):
        if isinstance(key, slice):
            raise StoreException('not implemented!')
        elif isinstance(key, tuple):
            key = '.'.join(key)

        # string key
        filters = parse(key, "meta")
        elems = select(e for e in self.store)
        if filters:
            elems = elems.filter(filters)
        if self.order == 'desc':
            elems = elems.order_by(lambda o: (desc(o.update), desc(o.id)))
        else:
            elems = elems.order_by(lambda o: (o.update, o.id))
        elems = self.adjust_slice(elems, for_update=for_update)
        for elem in elems:
            self.validate(elem.data, extra=elem.key, meta=elem.meta)
        return StoreMetas(elems, store=self)

    def adjust_slice(self, elems, for_update=False, begin=None, end=None):
        if for_update:
            elems = elems.for_update()
        begin, end = begin or self.begin, end or self.end

        length = len(self)
        if begin and end:
            # pony doesn't support step here
            if begin < 0:
                begin = length + begin
            if end < 0:
                end = length + end
            if begin > end:
                begin, end = end, begin
            elems = elems[begin:end]
        elif begin:
            if begin < 0:
                begin = length + begin
            elems = elems[begin:]
        elif end:
            if end < 0:
                end = length + end
            elems = elems[:end]
        else:
            elems = elems[:]
        return elems

    @db_session
    def __len__(self):
        return count(e for e in self.store)

    #### explicity crud
    def validate(self, data, meta=None, extra=None):
        if SCHEMA_CHECK and meta:
            schema_version = meta.get("schema_version")
            schema_type = meta.get("schema_type")
            schema = self.schema.get(schema_version)

            if isinstance(data, TrackedValue):
                data = data.get_untracked()
            if isinstance(schema, TrackedValue):
                schema = schema.get_untracked()

            if schema_type == 'cerberus':
                validator = Validator()
                r = validator.validate(deepcopy(data), schema)
                if not r:
                    if extra:
                        raise StoreException(
                            f'{schema_type}:{schema_type} {validator.errors}, extra: {extra}'
                        )
                    raise StoreException(
                        f'{schema_type}:{schema_type} {validator.errors}')
            elif schema_type == 'jsonschema':
                validator = jsonschema
                try:
                    validator.validate(data, schema)
                except jsonschema.exceptions.ValidationError as e:
                    if extra:
                        raise StoreException(
                            f'{schema_type}:{schema_type} {e}, extra: {extra}')
                    raise StoreException(f'{schema_type}:{schema_type} {e}')
            else:
                raise StoreException(f'schema type invalid: {schema_type}')

    @db_session
    def add(self, data, meta=None, key=None):
        if not meta:
            meta = self.meta

        self.validate(data, meta=meta)

        hex = uuid.uuid1().hex
        key = f"STORE_{hex}" if not isinstance(key, str) else key
        elem = select(e for e in self.store if e.key == key).first()
        if elem is not None:
            hex = uuid.uuid1().hex
            key = f"STORE_{hex}"
            elem = select(e for e in self.store if e.key == key).first()
            if elem is not None:
                raise StoreException('add failed')
        self.store(key=key, data=data, meta=meta)
        return key

    @db_session
    def create(self, key, data, meta=None, update=True):
        if not meta:
            meta = self.meta

        self.validate(data, meta=meta)

        elem = select(e for e in self.store
                      if e.key == key).for_update().first()
        if elem is None:
            self.store(key=key, data=data, meta=self.meta)
        else:
            if update:
                elem.data = data
                elem.meta = meta
                elem.update = datetime.utcnow()
            else:
                detail = f'elem existed, key: {key}'
                raise StoreException(detail)
        return key

    @db_session
    def update(self,
               condition,
               data=None,
               meta=None,
               on='data',
               fuzzy=True,
               patch='jsonpath',
               force=False,
               begin=begin,
               end=end,
               debug=False):
        elems, _ = self.search(condition,
                               mode='raw',
                               on=on,
                               fuzzy=fuzzy,
                               debug=debug)
        elems = self.adjust_slice(elems, for_update=True, begin=begin, end=end)
        keys = []
        count = 0
        for elem in elems:
            if data:
                if patch == 'jsonpath':
                    copied = deepcopy(elem.data)
                    for key, value in data.items():
                        if isinstance(value, types.FunctionType):
                            current_value = get_json_value(copied, key)
                            value = value(current_value)
                        set_json_value(copied, key, value)
                    if not force:
                        self.validate(copied, meta=elem.meta, extra=elem.key)
                    elem.data = copied
                    elem.update = datetime.utcnow()
                elif patch == 'nest':
                    copied = deepcopy(elem.data)
                    update_nest(copied, data)
                    if not force:
                        self.validate(copied, meta=elem.meta, extra=elem.key)
                    elem.data = copied
                    elem.update = datetime.utcnow()
                else:
                    if not force:
                        self.validate(data, meta=elem.meta, extra=elem.key)
                    elem.data = data
                    elem.update = datetime.utcnow()
            if meta:
                if patch == 'jsonpath':
                    copied = deepcopy(elem.meta)
                    for key, value in meta.items():
                        if hasattr(value, '__call__'):
                            current_value = get_json_value(copied, key)
                            value = value(current_value)
                        set_json_value(copied, key, value)
                    if not force:
                        self.validate(elem.data, meta=copied, extra=elem.key)
                    elem.meta = copied
                    elem.update = datetime.utcnow()
                elif patch == 'nest':
                    copied = deepcopy(elem.meta)
                    update_nest(copied, meta)

                    if not force:
                        self.validate(elem.data, meta=copied, extra=elem.key)
                    elem.meta = copied
                    elem.update = datetime.utcnow()
                else:
                    if not force:
                        self.validate(elem.data, meta=meta, extra=elem.key)
                    elem.meta = meta
                    elem.update = datetime.utcnow()
            count += 1
            keys.append(elem.key)
        return keys, count

    @db_session
    def delete(self,
               condition,
               on='key',
               fuzzy=True,
               begin=None,
               end=None,
               debug=False):
        elems, _ = self.search(condition,
                               on=on,
                               mode='raw',
                               fuzzy=True,
                               debug=debug)
        elems = self.adjust_slice(elems, for_update=True, begin=begin, end=end)
        keys = []
        count = 0
        for elem in elems:
            elem.delete()
            count += 1
            keys.append(elem.key)
        return keys, count

    @db_session
    def search_return(self, elems, mode, order_by, order, for_update, begin,
                      end, force, debug):
        if mode == 'raw':
            if debug:
                print('\n\n----sql----')
                sql, args, _, _ = elems._construct_sql_and_arguments()
                print(sql)
                print('......')
                print(args)
                print('-----------\n\n')
            return elems, -1
        elif mode == 'count':
            if debug:
                print('\n\n----sql----')
                sql, args, _, _ = elems._construct_sql_and_arguments()
                print(sql)
                print('......')
                print(args)
                print('-----------\n\n')
            return [], elems.count()
        else:
            total = elems.count() if count else -1
            if order_by:
                elems = elems.order_by(order_by)
            else:
                if not order:
                    order = self.order
                if order == 'desc':
                    elems = elems.order_by(lambda o:
                                           (desc(o.update), desc(o.id)))
                else:
                    elems = elems.order_by(lambda o: (o.update, o.id))
            if debug:
                print('\n\n----sql----')
                sql, args, _, _ = elems._construct_sql_and_arguments()
                print(sql)
                print('......')
                print(args)
                print('-----------\n\n')
            elems = self.adjust_slice(elems,
                                      for_update=for_update,
                                      begin=begin,
                                      end=end)
            if not force:
                for elem in elems:
                    self.validate(elem.data, meta=elem.meta, extra=elem.key)
            return StoreMetas(elems, store=self), total

    @db_session
    def search(self,
               condition,
               on='data',
               for_update=False,
               fuzzy=True,
               debug=False,
               mode='normal',
               order='desc',
               order_by=None,
               begin=None,
               end=None,
               force=False):
        if on == 'key':
            if isinstance(condition, str):
                elems = select(e for e in self.store if e.key == condition)
                return self.search_return(elems,
                                          mode=mode,
                                          order_by=order_by,
                                          order=order,
                                          for_update=for_update,
                                          begin=begin,
                                          end=end,
                                          force=force,
                                          debug=debug)
            elif isinstance(condition, list):
                elems = select(e for e in self.store if e.key in condition)
                return self.search_return(elems,
                                          mode=mode,
                                          order_by=order_by,
                                          order=order,
                                          for_update=for_update,
                                          begin=begin,
                                          end=end,
                                          force=force,
                                          debug=debug)
            raise StoreException('on key invalid')
        if on not in ['data', 'meta']:
            raise StoreException('on invalid')
        elems = select(e for e in self.store)
        if condition:
            for key, value in condition.items():
                if isinstance(value, list):
                    if '.' in key:
                        keys = key.split('.')
                    else:
                        keys = [key]

                    if self.provider == 'mysql':
                        for i, k in enumerate(keys):
                            if i == 0:
                                sql = f'e.{on}["{k}"]'
                            else:
                                sql += f'["{k}"]'
                        sql += f' in {value}'

                        elems = elems.filter(sql)
                    else:
                        sql = f'e.data'
                        for i, k in enumerate(keys):
                            if i == len(keys) - 1:
                                sql += '->>'
                            else:
                                sql += '->'
                            sql += f"'{k}'"
                        v = []
                        cast = None
                        for e in value:
                            if isinstance(e, bool):
                                cast = 'boolean'
                                if e == True:
                                    v.append("true")
                                elif e == False:
                                    v.append("false")
                            elif isinstance(e, float):
                                ee = f'{e}'
                                v.append(ee)
                                cast = 'float'
                            elif isinstance(e, int):
                                ee = f'{e}'
                                v.append(ee)
                                cast = 'integer'
                            elif isinstance(e, str):
                                ee = f"'{e}'"
                                v.append(ee)
                            else:
                                raise StoreException('k invalid')
                        value_str = ', '.join(v)
                        if cast:
                            sql = f'cast({sql} as {cast}) in ({value_str})'
                        else:
                            sql += f' in ({value_str})'

                        elems = elems.filter(lambda e: raw_sql(sql))
                elif isinstance(value, dict):
                    op = value.get('operator') or value.get('op')
                    val = value.get('value') or value.get('val')
                    if op is None or val is None:
                        raise StoreException('operator and value not found')
                    if op == 'in' or op == 'any_in':
                        if isinstance(val, list):
                            if self.provider == 'mysql':
                                sqls = []
                                for v in val:
                                    # sql = f'(json_contains(`e`.`data`, \'["{v}"]\', \'$$.{key}\') or json_contains_path(`e`.`data`, \'one\', \'$$.{key}.{v}\'))'
                                    sql = f'json_contains(`e`.`{on}`, \'["{v}"]\', \'$$.{key}\')'
                                    sqls.append(sql)
                                sql = ' OR '.join(sqls)
                                elems = elems.filter(lambda e: raw_sql(sql))
                            else:
                                if '.' in key:
                                    key = key.replace('.', ',')
                                    # raise StoreException('jsonpath not support for in operator')
                                sqls = []
                                for v in val:
                                    sql = f'("e"."{on}" #> \'{{ {key} }}\' ? \'{v}\')'
                                    sqls.append(sql)
                                sql = ' OR '.join(sqls)
                                elems = elems.filter(lambda e: raw_sql(sql))

                        else:
                            if self.provider == 'mysql':
                                # sql = f'(json_contains(`e`.`data`, \'["{val}"]\', \'$$.{key}\') or json_contains_path(`e`.`data`, \'one\', \'$$.{key}.{val}\'))'
                                sql = f'json_contains(`e`.`{on}`, \'["{val}"]\', \'$$.{key}\')'  # or json_contains_path(`e`.`data`, \'one\', \'$$.{key}.{val}\'))'
                                elems = elems.filter(lambda e: raw_sql(sql))
                            else:
                                if '.' in key:
                                    key = key.replace('.', ',')
                                sql = f'("e"."{on}" #> \'{{ {key} }}\' ? \'{val}\')'
                                elems = elems.filter(lambda e: raw_sql(sql))
                    elif op == 'ain' or op == 'all_in':
                        if isinstance(val, list):
                            if self.provider == 'mysql':
                                sql = f'json_contains(`e`.`{on}`, \'{json.dumps(val)}\', \'$$.{key}\')'
                                elems = elems.filter(lambda e: raw_sql(sql))
                            else:
                                if '.' in key:
                                    key = key.replace('.', ',')
                                    # raise StoreException('jsonpath not support for in operator')
                                for v in val:
                                    sql = f'("e"."{on}" #> \'{{ {key} }}\' ? \'{v}\')'
                                    elems = elems.filter(
                                        lambda e: raw_sql(sql))
                        else:
                            if self.provider == 'mysql':
                                # sql = f'(json_contains(`e`.`data`, \'["{val}"]\', \'$$.{key}\') or json_contains_path(`e`.`data`, \'one\', \'$$.{key}.{val}\'))'
                                sql = f'json_contains(`e`.`{on}`, \'["{val}"]\', \'$$.{key}\')'  # or json_contains_path(`e`.`data`, \'one\', \'$$.{key}.{val}\'))'
                                elems = elems.filter(lambda e: raw_sql(sql))
                            else:
                                if '.' in key:
                                    key = key.replace('.', ',')
                                    # raise StoreException('jsonpath not support for in operator')
                                sql = f'("e"."{on}" #> \'{{ {key} }}\' ? \'{val}\')'
                                elems = elems.filter(lambda e: raw_sql(sql))
                    else:
                        if self.provider == 'mysql':
                            if op == '==':
                                op = '='
                            sql = None
                            if isinstance(val, bool):
                                if val == True:
                                    sql = f'json_extract(`e`.`{on}`, "$$.{key}") {op} true'
                                else:
                                    sql = f'json_extract(`e`.`{on}`, "$$.{key}") {op} false'
                            elif isinstance(val, int) or isinstance(
                                    val, float):
                                sql = f'json_extract(`e`.`{on}`, "$$.{key}") {op} {val}'
                            elif isinstance(val, str):
                                sql = f'json_extract(`e`.`{on}`, "$$.{key}") {op} "{val}"'
                            else:
                                detail = f'val {val} type {type(val)} invalid'
                                raise StoreException(detail)
                            if sql:
                                elems = elems.filter(lambda e: raw_sql(sql))
                        else:
                            if op == '=':
                                op = '=='
                            sql = None
                            if isinstance(val, bool):
                                if val == True:
                                    sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ == true)\')'
                                else:
                                    sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ == false)\')'
                            elif isinstance(val, int) or isinstance(
                                    val, float):
                                sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ {op} {val})\')'
                            elif isinstance(val, str):
                                sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ {op} "{val}")\')'
                            else:
                                detail = f'val {val} type {type(val)} invalid'
                                raise StoreException(detail)
                            if sql:
                                elems = elems.filter(lambda e: raw_sql(sql))
                elif isinstance(value, bool):
                    if self.provider == 'mysql':
                        if value == True:
                            sql = f'json_extract(`e`.`{on}`, "$$.{key}") = true'
                        else:
                            sql = f'json_extract(`e`.`{on}`, "$$.{key}") = false'
                        elems = elems.filter(lambda e: raw_sql(sql))
                    else:
                        if value == True:
                            sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ == true)\')'
                        else:
                            sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ == false)\')'
                        elems = elems.filter(lambda e: raw_sql(sql))
                elif isinstance(value, int) or isinstance(value, float):
                    if self.provider == 'mysql':
                        sql = f'json_extract(`e`.`{on}`, "$$.{key}") = {value}'
                        elems = elems.filter(lambda e: raw_sql(sql))
                    else:
                        sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ == {value})\')'
                        elems = elems.filter(lambda e: raw_sql(sql))
                elif isinstance(value, str):
                    if fuzzy:
                        if self.provider == 'mysql':
                            sql = f'json_search(`e`.`{on}`, "all", "%%{value}%%", NULL, "$$.{key}")'
                            elems = elems.filter(lambda e: raw_sql(sql))
                        else:
                            sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ like_regex "{value}" flag "i")\')'
                            elems = elems.filter(lambda e: raw_sql(sql))
                    else:
                        if self.provider == 'mysql':
                            sql = f'json_extract(`e`.`{on}`, "$$.{key}") = "{value}"'
                            elems = elems.filter(lambda e: raw_sql(sql))
                        else:
                            sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ == "{value}")\')'
                            elems = elems.filter(lambda e: raw_sql(sql))
                else:
                    raise StoreException('value type not support')
        return self.search_return(elems,
                                  mode=mode,
                                  order_by=order_by,
                                  order=order,
                                  for_update=for_update,
                                  begin=begin,
                                  end=end,
                                  force=force,
                                  debug=debug)

    @db_session
    def search_multi(self,
                     conditions,
                     on='data',
                     for_update=False,
                     fuzzy=True,
                     debug=False,
                     mode='normal',
                     order='desc',
                     order_by=None,
                     begin=None,
                     end=None,
                     force=False):
        if on not in ['data', 'meta']:
            raise StoreException('on invalid')
        elems = select(e for e in self.store)
        or_sqls = []
        for condition in conditions:
            and_sqls = []
            for key, value in condition.items():
                if isinstance(value, bool):
                    if self.provider == 'mysql':
                        if value == True:
                            sql = f'json_extract(`e`.`{on}`, "$$.{key}") = true'
                        else:
                            sql = f'json_extract(`e`.`{on}`, "$$.{key}") = false'
                        # elems = elems.filter(lambda e: raw_sql(sql))
                    else:
                        if value == True:
                            sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ == true)\')'
                        else:
                            sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ == false)\')'
                        # elems = elems.filter(lambda e: raw_sql(sql))
                elif isinstance(value, int) or isinstance(value, float):
                    if self.provider == 'mysql':
                        sql = f'json_extract(`e`.`{on}`, "$$.{key}") = {value}'
                        # elems = elems.filter(lambda e: raw_sql(sql))
                    else:
                        sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ == {value})\')'
                        # elems = elems.filter(lambda e: raw_sql(sql))
                elif isinstance(value, str):
                    if fuzzy:
                        if self.provider == 'mysql':
                            sql = f'json_search(`e`.`{on}`, "all", "%%{value}%%", NULL, "$$.{key}")'
                            # elems = elems.filter(lambda e: raw_sql(sql))
                        else:
                            sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ like_regex "{value}" flag "i")\')'
                            # elems = elems.filter(lambda e: raw_sql(sql))
                    else:
                        if self.provider == 'mysql':
                            sql = f'json_extract(`e`.`{on}`, "$$.{key}") = "{value}"'
                            # elems = elems.filter(lambda e: raw_sql(sql))
                        else:
                            sql = f'jsonb_path_exists("e"."{on}", \'$$.{key} ? (@ == "{value}")\')'
                            # elems = elems.filter(lambda e: raw_sql(sql))
                else:
                    raise StoreException('value type not support')
                and_sqls.append(sql)

            and_sql = ' AND '.join(and_sqls)
            or_sqls.append(and_sql)

        or_sql = ' OR '.join(or_sqls)
        elems = elems.filter(lambda e: raw_sql(or_sql))
        return self.search_return(elems,
                                  mode=mode,
                                  order_by=order_by,
                                  order=order,
                                  for_update=for_update,
                                  begin=begin,
                                  end=end,
                                  force=force,
                                  debug=debug)
Пример #33
0
from pony.orm import Database, PrimaryKey, Required, Set, db_session, Optional
from uuid import uuid4 as gid, UUID
import datetime as dt

db = Database()

db.bind(provider='sqlite', filename='baza.sqlite', create_db=True)


class Korisnik(db.Entity):
    id = PrimaryKey(str)
    Ime = Required(str)
    Prezime = Required(str)
    korisnicko_ime = Required(str)
    lozinka = Required(str)
    liste = Set("Lista_Zelja")


class Zanr(db.Entity):
    id = PrimaryKey(str)
    naziv = Required(str)
    knjige = Set("Knjiga")


class Knjiga(db.Entity):
    id = PrimaryKey(str)
    naziv = Required(str)
    autor = Required(str)
    opis = Required(str)
    zanr = Required(Zanr)
    lista = Set("Lista_Zelja")
Пример #34
0
from pony.orm import Database

# from src.conf.config import mysql_conf

db = Database()

# Default configuration for sqlite
db.bind('sqlite', 'database.sqlite', create_db=True)

# Configuration for external databases
# db.bind(provider=mysql_conf['provider'],
# host=mysql_conf['host'],
# user=mysql_conf['user'],
# passwd=mysql_conf['passwd'],
# db=mysql_conf['db'])
Пример #35
0
from flask import Flask, jsonify
from pony.orm import Database
from config.environment import db_uri

app = Flask(__name__, static_folder='public')

db = Database()
db.bind('postgres', db_uri)

# pylint: disable=W0611,C0413
from config import routes

db.generate_mapping(create_tables=True)


@app.errorhandler(404)
def not_found(_error):
    return jsonify({'message': 'Not found'}), 404
Пример #36
0
# -*- coding: utf-8 -*-
from pony.orm import Database, PrimaryKey, Required, Set, Json, buffer, left_join, sql_debug, select, commit
from CGRtools.FEAR import FEAR
from networkx.readwrite.json_graph import node_link_data, node_link_graph
from MODtools.descriptors.fragmentor import Fragmentor
from CGRtools.CGRcore import CGRcore
from functools import reduce
from CGRtools.files.SDFrw import MoleculeContainer
from CGRtools.files.RDFrw import RDFread, ReactionContainer
import networkx as nx
from .files.Zulfia import get_bitstring

db = Database()
fear = FEAR()
cgr_core=CGRcore()
fragmentor_mol = Fragmentor()
fragmentor_rct = Fragmentor()

class Molecules(db.Entity):
    id = PrimaryKey(int, auto=True)
    data = Required(Json)
    fear = Required(str, unique=True)
    fingerprint = Required(bytes)
    reactions = Set('ReactionsMolecules')

    def __init__(self, molecule, fingerprint=None):
        fear_str = self.get_fear(molecule)
        data = node_link_data(molecule)

        if fingerprint is None:
            fingerprint = self.get_fingerprints([molecule])[0]
Пример #37
0
#!/usr/bin/env python3

from pathlib import Path
from datetime import datetime
from pony.orm import Database, PrimaryKey, Required, Optional, Json, composite_index

db = Database()


@db.on_connect(provider="sqlite")
def _home_sqliterc(_, conn):
    rc = Path.home() / ".sqliterc"
    if rc.exists():
        with rc.open() as f:
            conn.executescript("".join(i for i in f if i[0] != "."))


class Fsm(db.Entity):
    id = PrimaryKey(int, auto=True)
    state = Required(str, 80)
    ts = Required(datetime, default=datetime.now)
    data = Optional(Json)
    composite_index(ts, state)


if __name__ == '__main__':
    db.bind('sqlite', filename=':memory:')
    db.generate_mapping(create_tables=True)
Пример #38
0
#!/usr/bin/env python
#-*- coding: utf-8 -*-

from pony.orm import Database, Required


db = Database('sqlite', ':memory:')


class User(db.Entity):
    email = Required(unicode)
    password = Required(unicode)

db.generate_mapping(create_tables=True)
Пример #39
0
# A very simple Flask Hello World app for you to get started with...

from flask import Flask, render_template, request, redirect, url_for
from pony.orm import Database, Required, Optional, PrimaryKey, select, db_session
app = Flask(__name__)
db = Database()


class User(db.Entity):
    id = PrimaryKey(int, auto=True)
    name = Required(str)
    surname = Required(str)
    score = Required(int)
    age = Optional(int)


db.bind(provider='sqlite', filename='userdata',
        create_db=True)  # username, password, host, database
db.generate_mapping(create_tables=True)


@app.route('/', methods=["POST", "GET"])
@db_session
def main():
    if request.method == "POST":
        name = request.form.get('name')
        surname = request.form.get('surname')
        age = request.form.get('age')
        score = 0
        if request.form.get('group1') == "b":
            score += 1
Пример #40
0
from pony.orm import Database, PrimaryKey, Required, Optional, select, commit, db_session
from pony.orm.ormtypes import datetime

db = Database()
db.bind(provider='sqlite', filename='database_store.db')

# db.drop_table("Product", if_exists=True, with_all_data=True)


class Product(db.Entity):
    product_id = PrimaryKey(int, auto=True)
    name = Required(str, unique=True)
    quantity = Required(int)

    def __str__(self):
        return f"Product id[{self.product_id}] name: {self.name}"


db.generate_mapping(create_tables=True)
Пример #41
0
class LoaderDaemon(Daemon):

    _consumer = None
    _db = Database()

    def _handle_sigterm(self, signum, frame) -> None:
        try:
            if self._consumer:
                self._consumer.close()
        finally:
            self._db.disconnect()

    def _init_logging(self) -> None:
        logger = logging.getLogger('ml')
        formatter = logging.Formatter(
            '%(asctime)s %(name)-12s %(levelname)-8s %(message)s')

        logger.setLevel(logging.DEBUG)

        fh = logging.FileHandler(settings.DAEMON_LOGS_FILE)
        fh.setFormatter(formatter)
        logger.addHandler(fh)

        sh = logging.StreamHandler()
        sh.setFormatter(formatter)
        logger.addHandler(sh)

    def _init_consumer(self) -> None:
        self._consumer = KafkaConsumer(
            settings.KAFKA_TOPIC_NAME,
            auto_offset_reset="earliest",
            client_id="metrics-client-1",
            group_id="metrics-group",
            bootstrap_servers=settings.KAFKA_SERVER,
            security_protocol='SSL',
            ssl_cafile=settings.KAFKA_SSL_CA_FILE,
            ssl_certfile=settings.KAFKA_SSL_CERT_FILE,
            ssl_keyfile=settings.KAFKA_SSL_KEY_FILE)

    def _init_db(self) -> None:
        self._db.bind(provider='postgres',
                      user=settings.POSTGRES_USER,
                      password=settings.POSTGRES_PASSWORD,
                      host=settings.POSTGRES_HOST,
                      port=settings.POSTGRES_PORT,
                      database=settings.POSTGRES_DB)
        define_entities(self._db)
        self._db.generate_mapping(create_tables=True)

    def run(self) -> None:
        signal.signal(signal.SIGTERM, self._handle_sigterm)
        signal.signal(signal.SIGINT, self._handle_sigterm)

        self._init_logging()
        self._init_db()

        self._init_consumer()

        logger = logging.getLogger('ml.main')

        logger.info('Starting metrics loader service...')

        loader = MetricsLoader(self._db, self._consumer)
        loader.run()
Пример #42
0
class Config(object):
    '''
    Main configuration here.

    If a property consider as *readonly*, we will use ``property`` decorator
    for it.

    This class is responsible to setup user config dir.
    On Unix, we will have ``$HOME/.iottalk``;
    on windows, it is ``$USERPROFILE/_iottalk``.
    '''

    __gateway_port = 17000
    __beacon_port = 1900
    __http_port = 9992
    __ip = '127.0.0.1'
    __uuid = ''
    debug = False
    __db_conf = {
        'type': 'sqlite',
        'url': 'iottalk.db',
        'host': 'localhost',
        'port': -1,
        'user': '',
        'passwd': '',
    }
    __db = None
    __userdir = ''
    __mqtt = {
        'scheme': 'mqtt',
        'host': 'localhost',
        'port': 1883,
    }

    def __init__(self):
        self.setup_userdir()

    @property
    def userdir(self):
        if self.__userdir:
            return self.__userdir

        if utils.is_posix():
            self.__userdir = os.path.join(os.environ['HOME'],
                                          '.iottalk')
        elif utils.is_win():
            self.__userdir = os.path.join(os.environ['USERPROFILE'],
                                          '_iottalk')
        else:
            raise OSError('Unsupport os type "{}"'.format(os.name))

        return self.__userdir

    def setup_userdir(self):
        path = self.userdir

        if os.path.exists(path) and not os.path.isdir(path):
            raise OSError('Path "{}" is not a dir'.format(path))
        elif os.path.exists(path) and os.path.isdir(path):
            return

        os.mkdir(path)

    @property
    def gateway_port(self):
        return self.__gateway_port

    @property
    def beacon_port(self):
        return self.__beacon_port

    @property
    def beacon_url(self):
        return 'udp://{}:{}'.format(self.ip, self.beacon_port)

    @property
    def ip(self):
        return self.__ip

    @property
    def uuid(self):
        '''
        :TODO: load the uuid from config file.
        '''
        if not self.__uuid:
            self.__uuid = uuid4()
        return self.__uuid

    @property
    def http_port(self):
        return self.__http_port

    @property
    def db(self):
        '''
        :return: The pony orm db instance without db provider binding
        '''
        if self.__db:
            return self.__db

        self.__db = Database()
        return self.__db

    @property
    def db_conf(self):
        '''
        The db cononection configuration.
        Here is the schema::

            {
                'type': str,
                'url': str,
                'host': str,
                'port': int,
                'user': str,
                'passwd': str,
            }

        >>> config.db_conf = {'type': 'answer', 'port': 42}
        >>> assert config.db_conf['type'] == 'answer'
        >>> config.db_conf['port']
        42
        '''
        return self.__db_conf.copy()

    @db_conf.setter
    def db_conf(self, value):
        '''
        :param dict value: the update dictionary

        We accecpt a subset of value with following schema::

            {
                'type': str,
                'url': str,
                'host': str,
                'port': int,
                'user': str,
                'passwd': str,
            }

        :raise ValueError: if we get any invalid key.
        :raise TypeError: if we get wrong type of content.
        '''
        key_set = ('type', 'url', 'host', 'port', 'user', 'passwd')

        for key, val in value.items():
            if key not in key_set:
                raise ValueError('Invalid key: {!r}'.format(key))
            if key != 'port' and not isinstance(val, string_types):
                raise TypeError('{!r} must be a string'.format(key))
            elif key == 'port' and not isinstance(val, int):
                raise TypeError("'port' must be an int")

        self.__db_conf.update(value)

    @property
    def available_protos(self):
        '''
        .. todo::
            should auto-detect the working server
        '''
        return ('mqtt', 'zmq', 'websocket')

    @property
    def feature_cates(self):
        '''
        The list of feature categories

        .. deprecated::
        '''
        return ('sight', 'hearing', 'feeling', 'motion', 'other')

    def __del__(self):
        if self.db.provider and self.db.provider is not None:
            self.__db.disconnect()

    @property
    def mqtt_conf(self):
        return self.__mqtt.copy()
Пример #43
0
from pony.orm import Database, db_session 
from pandas.io.sql import write_frame
import statsmodels.api as sm

db = Database('sqlite', ':memory:')

with db_session:
    data_loader = sm.datasets.sunspots.load_pandas()
    df = data_loader.data
    write_frame(df, "sunspots", db.get_connection()) 
    print db.select("count(*) FROM sunspots")