Exemple #1
0
    def __init__(
        self,
        db_filename: Union[Path, type(MEMORY_DB)],
        channels_dir,
        my_key,
        disable_sync=False,
        notifier=None,
        check_tables=True,
        db_version: int = CURRENT_DB_VERSION,
    ):
        self.notifier = notifier  # Reference to app-level notification service
        self.db_path = db_filename
        self.channels_dir = channels_dir
        self.my_key = my_key
        self.my_public_key_bin = self.my_key.pub().key_to_bin()[10:]
        self._logger = logging.getLogger(self.__class__.__name__)

        self._shutting_down = False
        self.batch_size = 10  # reasonable number, a little bit more than typically fits in a single UDP packet
        self.reference_timedelta = timedelta(milliseconds=100)
        self.sleep_on_external_thread = 0.05  # sleep this amount of seconds between batches executed on external thread

        # We have to dynamically define/init ORM-managed entities here to be able to support
        # multiple sessions in Tribler. ORM-managed classes are bound to the database instance
        # at definition.
        self._db = orm.Database()

        # This attribute is internally called by Pony on startup, though pylint cannot detect it
        # with the static analysis.
        # pylint: disable=unused-variable
        @self._db.on_connect(provider='sqlite')
        def sqlite_disable_sync(_, connection):
            cursor = connection.cursor()
            cursor.execute("PRAGMA journal_mode = WAL")
            cursor.execute("PRAGMA synchronous = NORMAL")
            cursor.execute("PRAGMA temp_store = MEMORY")
            cursor.execute("PRAGMA foreign_keys = ON")

            # Disable disk sync for special cases
            if disable_sync:
                # !!! ACHTUNG !!! This should be used only for special cases (e.g. DB upgrades), because
                # losing power during a write will corrupt the database.
                cursor.execute("PRAGMA journal_mode = 0")
                cursor.execute("PRAGMA synchronous = 0")
            # pylint: enable=unused-variable

        self.MiscData = misc.define_binding(self._db)

        self.TrackerState = tracker_state.define_binding(self._db)
        self.TorrentState = torrent_state.define_binding(self._db)

        self.ChannelNode = channel_node.define_binding(self._db, logger=self._logger, key=my_key)

        self.MetadataNode = metadata_node.define_binding(self._db)
        self.CollectionNode = collection_node.define_binding(self._db)
        self.TorrentMetadata = torrent_metadata.define_binding(self._db)
        self.ChannelMetadata = channel_metadata.define_binding(self._db)

        self.JsonNode = json_node.define_binding(self._db, db_version)
        self.ChannelDescription = channel_description.define_binding(self._db)

        self.BinaryNode = binary_node.define_binding(self._db, db_version)
        self.ChannelThumbnail = channel_thumbnail.define_binding(self._db)

        self.ChannelVote = channel_vote.define_binding(self._db)
        self.ChannelPeer = channel_peer.define_binding(self._db)
        self.Vsids = vsids.define_binding(self._db)

        self.ChannelMetadata._channels_dir = channels_dir  # pylint: disable=protected-access

        if db_filename is MEMORY_DB:
            create_db = True
            db_path_string = ":memory:"
        else:
            create_db = not db_filename.is_file()
            db_path_string = str(db_filename)

        self._db.bind(provider='sqlite', filename=db_path_string, create_db=create_db, timeout=120.0)
        self._db.generate_mapping(
            create_tables=create_db, check_tables=check_tables
        )  # Must be run out of session scope
        if create_db:
            with db_session(ddl=True):
                self._db.execute(sql_create_fts_table)
                self.create_fts_triggers()
                self.create_torrentstate_triggers()
                self.create_partial_indexes()

        if create_db:
            with db_session:
                self.MiscData(name="db_version", value=str(db_version))

        with db_session:
            default_vsids = self.Vsids.get(rowid=0)
            if not default_vsids:
                default_vsids = self.Vsids.create_default_vsids()
            self.ChannelMetadata.votes_scaling = default_vsids.max_val
Exemple #2
0
def do_all_migrations(bind_database_function, folder_path, python_import):
    """
    This will load and execute all needed migrations.
    Also it will return the latest database definition when done.
     
    :param bind_database_function: The function to bind to a database. Needs to include `db.bind(...)` and `db.generate_mapping(...)`  
    :param folder_path: the path of the folder where the versions are stored in.
                        Caution: If specified relative, it is relative to this file (pony_up.do_update).
                        Example: `"/path/to/migrations"`
                        
    :param python_import: the python import path. This corespondes to the way you would import it normally.
                          Example: "somewhere.migrations" (like in `from somewhere.migrations import v0`)
    :return: 
    """
    if bind_database_function is None:
        raise ValueError(
            "Please provide a function accepting the database `pony.orm.Database` "
            "which will run `db.bind(...)` and `db.generate_mapping(...)`."
        )
    # end if

    db = orm.Database()
    register_version_table(db)
    bind_database_function(db)

    current_version_db = get_current_version(db)
    current_version = current_version_db.version
    start_version = current_version

    # get the versions modules
    file_names_found = enumerate_migrations(folder_path)
    logger.debug("found the following migration files: {!r}".format(file_names_found))
    max_version = 0
    # iterate through the folder with versions
    for name, file_name in dict.items(file_names_found):
        logger.debug("name {!r}, file_name {!r}".format(name, file_name))
        if not name.startswith("v"):
            logger.debug("skipping module, format wrong.\nExpected format 'v{{number}}', got {module_name!r}".format(module_name=name))
            continue
        # end def
        try:
            version = int(name[1:])
            if version > max_version:
                max_version = version
            # end if
        except:
            logger.debug("skipping module, version int malformatted.\nExpected format 'v{{number}}', got {module_name!r}".format(module_name=name))
            continue
        # end try
        if version < current_version:
            logger.debug("skipping module, version {load!r} smaller than current {db!r}.".format(load=version, db=current_version))
            continue
        # end def
        module = importlib.import_module(python_import + "." + name)
        logger.debug("found module {m!r} (name: {n!r}, file_name: {f!r}, version parsed: {v!r}), ".format(v=version, n=name, f=file_name, m=module.__name__))
        migrations[version] = module
    # end for

    db = None
    # iterate though the versions in ascending version order, and run them.
    for v, module in sorted(migrations.items(), key=lambda x: x[0]):
        logger.debug("preparing update from version {v!r}".format(v=v))
        if current_version > v:
            logger.warn("skipping migration (needs database version {v}). We already have version {curr_v}.".format(
                v=v, curr_v=current_version
            ))
            continue
        # end if
        if current_version != v:
            raise MigrationVersionWrong(  # database version < migration start version
                "Next migration starts with database version {loaded_v}, "
                "but the database is still at version {curr_v}.\n"
                "This means a migration must be missing.".format(
                    loaded_v=v, curr_v=current_version
                )
            )
        # end if
        db, version_and_meta = do_version(module, bind_database_function, current_version, old_db=db)
        if not version_and_meta:  # is None if no manual execution was run (only the schema loaded)
            logger.info("loaded only the schema schema {v!r}".format(v=current_version))
            if current_version < max_version:
                logger.debug("storing as version {new_v!r}, there are more versions to load (curr: {v}, max: {max})".format(
                    new_v=current_version + 1, max=max_version, v=current_version
                ))
                version_and_meta = (current_version + 1, {"message": "automated update (only schema provided)"})
            else:
                logger.debug("version {v!r} this is the newest we have, just loading schema.".format(v=current_version))
                break
            # end if
        new_version, meta = version_and_meta
        new_version_db = store_new_version(db, new_version, meta)
        # Save version for next loop.
        current_version_db = new_version_db
        current_version = new_version
        logger.success(
            "upgraded from version {old_v!r} to v{new_v!r}{meta_message}".format(
                old_v=v, new_v=new_version, meta_message=(
                    (": " + repr(meta["message"])) if "message" in meta else " - Metadata: " + repr(meta)
            ).strip())
        )
        if new_version != v + 1:
            logger.warn(
                "migrated from version {old_v!r} to v{new_v!r} "
                "(instead of v{should_v!r}, it skipped {diff!r} versions)".format(
                    old_v=v, new_v=new_version, should_v=v + 1, diff=new_version - (v + 1)
                ))
            # end if
        # end if
    # end for
    logger.success("migration from version {v_old!r} to version {v_new!r} done.".format(
        v_old=start_version, v_new=current_version
    ))
    return db
from datetime import datetime
from os import makedirs
from os.path import join
from textwrap import dedent

from pony import orm
from slugify import slugify

from .helpers import get_config

db = orm.Database()
config = get_config()
db.bind(provider=config['DATABASE']['provider'],
        database=config['DATABASE']['database'])

WEBSITE_VERSE_TEMPLATE = '''
[{n}](topic_url) {result_text}
'''

JEKYLL_FRONT_MATTER = '''---
layout: page
title: {title}
---

# {title}

'''


class SlugifiedMixin:
    @property
Exemple #4
0
def do_version(version_module, bind_database_function, old_version, old_db=None):  # todo rename "run_version"
    """
    Creates a new db, registers vNEW model, and runs `migrate.do_update(old_db, vNEW_db)`
    
    First loads the new schema (`.model`) if existent, else uses the provided `old_db`.
    If there are migrations (`.migrate`), they are run too.
    
    Returns a tuple with the most recent schema in the first slot,
    and the result of the migration function or `None` if not migrated as the second slot.
     
    
    :param version_module: the module, with a `.model` and optionally a `.migrate`
    
    :param bind_database_function: The function to bind to a database. Needs to include `db.bind(...)` and `db.generate_mapping(...)`
    
    :param old_version: the version before loading a new schema (`.model`)
    :type  old_version: int
    
    :param old_db: the database before the migration, so you can copy from one to another.
                   This will be None for the first migration (e.g. v0).
    :type  old_db: None | orm.core.Database
    
    :return: Tuple (db, do_update_result).
             `db` being the new version (mapping) of the database,
             `do_update_result` is `None` if no migration was run. In case of migration happening, it is the result of calling `version_module.migrate.do_update(db, old_db)`. Should be a tuple of it's own, `(new_version:int, metadata:dict)`.
    :rtype: tuple( orm.core.Database, None | tuple(int, dict) )
    """
    if bind_database_function is None:
        raise ValueError(
            "Please provide a function accepting the database `pony.orm.Database` "
            "which will run `db.bind(...)` and `db.generate_mapping(...)`."
        )
    # end if
    model = None
    if hasattr(version_module, "model"):
        logger.debug("found .model as attribute")
        model = version_module.model
    else:
        try:
            model = importlib.import_module(version_module.__name__ + ".model")
            logger.debug("found .model as import")
        except ImportError:
            pass
        # end try
    # end if
    migrate = None
    if hasattr(version_module, "migrate"):
        logger.debug("found .migrate as attribute")
        migrate = version_module.model
    else:
        try:
            migrate = importlib.import_module(version_module.__name__ + ".migrate")
            logger.debug("found .migrate as import")
        except ImportError:
            pass
        # end try
    # end if
    if model:
        logger.info("loading model version {v!r}.".format(v=old_version))
        new_db = orm.Database()
        setattr(new_db, "pony_up__version", old_version)
        version_module.model.register_database(new_db)
        logger.debug("adding version table to model version {v!r}.".format(v=old_version))
        register_version_table(new_db)

        logger.debug("binding model version {v!r} to database.".format(v=old_version))
        bind_database_function(new_db)

        if migrate:
            # A: model + migrate (both | See "v0" or "v1" in Fig.1)
            logger.info("migrating from version {v!r}".format(v=old_version))
            migrator = Migrator(old_db, new_db, bind_database_function, old_version, has_new_schema=True)
            with orm.db_session:
                return new_db, version_module.migrate.do_update(migrator)
            # end with
        else:
            logger.debug("no migration for version {v!r}".format(v=old_version))
            # B: model + _______ (model only | See "v3" or "v4" in Fig.1))
            return new_db, None
        # end def
    else:
        logger.info("using old model version {v!r}".format(v=old_version))
        if migrate:
            # C: _____ + migrate (only migrate | See "v2" in Fig.1))
            logger.info("migrating from version {v!r}".format(v=old_version))
            migrator = Migrator(old_db, None, bind_database_function, old_version=old_version, has_new_schema=True)
            with orm.db_session:
                return old_db, version_module.migrate.do_update(migrator)
            # end with
        else:
            # D: _____ + _____ (nothing)
            logger.debug("no migration for version {v!r}".format(v=old_version))
            raise ValueError(
                "The given `version_module` does neither has a `.model` nor a `.migrate` attribute.\n"
                "Maybe you need a `from . import model, migrate` in the `__init__.py` file?"
            )
Exemple #5
0
from pony import orm

db = orm.Database('mysql', db='testdb', host='localhost', passwd='ponytest', user='******')


class Activity(db.Entity):
    descr = orm.Required(str)
Exemple #6
0
from pony import orm

cleandb = orm.Database()


class Student(cleandb.Entity):
    rollno = orm.Required(int)
    name = orm.Required(str)
    father = orm.Required(str)
    result = orm.Required(str)
    institute = orm.Required(str)
    remarks = orm.Optional(str)
    subjects = orm.Set('Result')


class Result(cleandb.Entity):
    student = orm.Required(Student)
    subject = orm.Required(str)
    part = orm.Optional(str)
    theory = orm.Required(int)
    practical = orm.Optional(int, nullable=True)
Exemple #7
0
#!/usr/bin/env python
# -*- coding:utf-8 _*-
""" 
@author:tom_tao626 
@license: Apache Licence 
@file: db_conf.py 
@time: 2020/12/11
@contact: [email protected]
@site: xxxx.suizhu.net
@software: PyCharm 
"""

from pony import orm

# 创建数据库对象
pony_db = orm.Database()
# 建立数据库连接
pony_db.bind(provider='mysql',
             host='localhost',
             user='******',
             passwd='password',
             db='pony_db')

# 官方文档地址
# https://docs.ponyorm.org/
# Job statuses
class JobStatus:
    Submitted, Running, Done, Error, Expired = range(1, 6)


# Job steps
class JobStep:
    GetData, FindingNodesEdges, ClusteringNodes, SquarifiedTreemap, FruchtermanReingoldLayout, WriteJSON = range(
        0, 6)


#db = pny.Database("sqlite", "correlation_network_jobs.db", create_db=True)
db = pny.Database('mysql',
                  host=config['general']['host'].strip("'"),
                  user=config['general']['user'].strip("'"),
                  passwd=config['general']['pass'].strip("'"),
                  db=config['general']['db'].strip("'"))


class CorrelationNetworkJob(db.Entity):
    # Basic job data
    dataset = pny.Required(str)
    candidates = pny.Optional(str)
    columns = pny.Optional(str)
    verbose = pny.Required(bool, default=False)
    threshold = pny.Required(float)
    minimum_cluster_size = pny.Required(int)
    submit_time = pny.Required(dt.datetime, sql_default='CURRENT_TIMESTAMP')
    start_time = pny.Optional(dt.datetime)
    end_time = pny.Optional(dt.datetime)