Пример #1
0
    def test_migration_check_success(self, tmpfile, caplogger):
        ddb = Database(provider="sqlite", filename=str(tmpfile))
        init_onetable(ddb)
        s1 = Schema(ddb)
        s1.version = "0.5.0"
        v1 = s1.schema

        def check_cb(check_db):
            with db_session:
                assert (
                    check_db.execute(
                        "select * from sqlite_master").fetchone()[4] ==
                    'CREATE TABLE "bla" ("key" TEXT NOT NULL PRIMARY KEY, "texta" TEXT, "textb" TEXT, "textc" TEXT)'
                )

        m = MakeMigrations(tmpfile, Version("1.3.2"), migrations)
        assert m(check_cb, lambda x: True)
        assert "Error" not in caplogger.read()

        ddb2 = Database(provider="sqlite", filename=str(tmpfile))
        s2 = Schema(ddb2)
        assert (
            s2.schema ==
            """CREATE TABLE "bla" ("key" TEXT NOT NULL PRIMARY KEY, "texta" TEXT, "textb" TEXT, "textc" TEXT)\n"""
        )
        assert s2.version == Version("1.3.2")
Пример #2
0
def clean_core(args):
    major_version = '.'.join(get_distribution('CGRdb').version.split('.')[:-1])
    schema = args.name

    db_config = Database()
    LazyEntityMeta.attach(db_config, database='CGRdb_config')
    db_config.bind('postgres', **args.connection)
    db_config.generate_mapping()

    with db_session:
        config = db_config.Config.get(name=schema, version=major_version)
    if not config:
        raise KeyError('schema not exists or version incompatible')
    config = config.config

    for p in config['packages']:
        try:
            p = get_distribution(p)
            import_module(p.project_name)
        except (DistributionNotFound, VersionConflict):
            raise ImportError(
                f'packages not installed or has invalid versions: {p}')

    db = Database()
    LazyEntityMeta.attach(db, schema, 'CGRdb')
    db.bind('postgres', **args.connection)
    db.generate_mapping()

    with db_session:
        db.execute(f'TRUNCATE TABLE "{schema}"."MoleculeSearchCache", '
                   f'"{schema}"."ReactionSearchCache" RESTART IDENTITY')
Пример #3
0
def update_core(args):
    major_version = '.'.join(get_distribution('CGRdb').version.split('.')[:-1])
    schema = args.name

    db_config = Database()
    LazyEntityMeta.attach(db_config, database='CGRdb_config')
    db_config.bind('postgres',
                   user=args.user,
                   password=args.password,
                   host=args.host,
                   database=args.base,
                   port=args.port)
    db_config.generate_mapping()

    with db_session:
        config = db_config.Config.get(name=schema, version=major_version)
    if not config:
        raise KeyError('schema not exists or version incompatible')
    config = config.config

    for p in config['packages']:
        try:
            p = get_distribution(p)
            import_module(p.project_name)
        except (DistributionNotFound, VersionConflict):
            raise ImportError(
                f'packages not installed or has invalid versions: {p}')

    db = Database()
    LazyEntityMeta.attach(db, schema, 'CGRdb')
    db.bind('postgres',
            user=args.user,
            password=args.password,
            host=args.host,
            database=args.base,
            port=args.port)
    db.generate_mapping()

    with db_session:
        db.execute(init_session.replace('{schema}', schema))
        db.execute(merge_molecules.replace('{schema}', schema))

        db.execute(insert_molecule.replace('{schema}', schema))
        db.execute(after_insert_molecule.replace('{schema}', schema))
        db.execute(delete_molecule.replace('{schema}', schema))

        db.execute(insert_reaction.replace('{schema}', schema))

        db.execute(search_similar_molecules.replace('{schema}', schema))
        db.execute(search_substructure_molecule.replace('{schema}', schema))
        db.execute(search_similar_reactions.replace('{schema}', schema))
        db.execute(search_substructure_reaction.replace('{schema}', schema))
        db.execute(
            search_substructure_fingerprint_molecule.replace(
                '{schema}', schema))
        db.execute(
            search_similar_fingerprint_molecule.replace('{schema}', schema))
        db.execute(search_reactions_by_molecule.replace('{schema}', schema))
        db.execute(search_mappingless_reaction.replace('{schema}', schema))
Пример #4
0
def index_core(args):
    major_version = '.'.join(get_distribution('CGRdb').version.split('.')[:-1])
    schema = args.name

    db_config = Database()
    LazyEntityMeta.attach(db_config, database='CGRdb_config')
    db_config.bind('postgres',
                   user=args.user,
                   password=args.password,
                   host=args.host,
                   database=args.base,
                   port=args.port)
    db_config.generate_mapping()

    with db_session:
        config = db_config.Config.get(name=schema, version=major_version)
    if not config:
        raise KeyError('schema not exists or version incompatible')
    config = config.config

    for p in config['packages']:
        try:
            p = get_distribution(p)
            import_module(p.project_name)
        except (DistributionNotFound, VersionConflict):
            raise ImportError(
                f'packages not installed or has invalid versions: {p}')

    db = Database()
    LazyEntityMeta.attach(db, schema, 'CGRdb')
    db.bind('postgres',
            user=args.user,
            password=args.password,
            host=args.host,
            database=args.base,
            port=args.port)
    db.generate_mapping()

    with db_session:
        db.execute(
            f'CREATE INDEX idx_moleculestructure__smlar ON "{schema}"."MoleculeStructure" USING '
            'GIST (fingerprint _int4_sml_ops)')
        db.execute(
            f'CREATE INDEX idx_moleculestructure__subst ON "{schema}"."MoleculeStructure" USING '
            'GIN (fingerprint gin__int_ops)')
        db.execute(
            f'CREATE INDEX idx_reactionindex__smlar ON "{schema}"."ReactionIndex" USING '
            'GIST (fingerprint _int4_sml_ops)')
        db.execute(
            f'CREATE INDEX idx_reactionindex__subst ON "{schema}"."ReactionIndex" USING '
            'GIN (fingerprint gin__int_ops)')
Пример #5
0
def define_model():
    db = Database()

    class Entry(db.Entity):
        date_added = Required(datetime)
        cam_entries = Set('CamLogEntry')

    class CamLogEntry(db.Entity):
        entry = Required(Entry)
        cam = Required(str)
        loc_x = Optional(float)
        loc_y = Optional(float)
        mean = Required(float)
        max = Required(float)
        fit_result = Optional(lambda: FitResult)

    class FitResult(db.Entity):
        cam_entry = Required(CamLogEntry)
        x0 = Required(float)
        y0 = Required(float)
        sigma_x = Required(float)
        sigma_y = Required(float)
        theta = Required(float)
        A = Required(float)
        off = Required(float)

    return db  #, Entry, CamLogEntry, FitResult
Пример #6
0
    def all_stock_candles(self):
        start_date = self.app.pargs.start_date
        end_date = self.app.pargs.end_date
        loop_count = self.app.pargs.loop_count
        file_name = self.app.pargs.file_name
        is_short = self.app.pargs.is_short
        self.app.log.info('start_date: {}, end_date: {}, loop: {}, file_name: {}, is_short: {}'.format(start_date, end_date, loop_count, file_name, is_short))

        data_conf = self.app.config.get('moontrader', 'data')
        db_ebest = Database()
        db_adapter.define_korea_stock(db_ebest)
        db_adapter.bind(db_ebest, data_conf['dir'], DB_FILE_NAME)
        db_adapter.init(db_ebest)
        codes = db_adapter.get_stock_codes()

        param_start = ' --start {}'.format(start_date) if start_date else ''
        param_end = ' --end {}'.format(end_date) if end_date else ''
        param_loop = ' --loop {}'.format(loop_count) if loop_count else ''

        if is_short:
            param_start = ' --start {yesterday}'
            param_end = ' --end {today}'

        with open(file_name, 'w') as sfile:
            for code in codes:
                cmd = 'moontrader stocks -s candle {}{}{}{}'.format(code.cd, param_start, param_end, param_loop)
                print(cmd)
                sfile.write(cmd + '\n')
Пример #7
0
    def __init__(self, data):
        db_type = data.get('type', 'postgres')
        db_user = data.get('user', 'dameng')
        db_password = data.get('password', 'hello')
        db_host = data.get('host', 'localhost')
        db_name = data.get('name', 'store')

        self.db = Database(db_type,
                           user=db_user,
                           password=db_password,
                           host=db_host,
                           database=db_name)
        body = dict(__doc__='docstring',
                    create_at=Required(datetime,
                                       sql_default='CURRENT_TIMESTAMP',
                                       default=lambda: datetime.utcnow()),
                    update_at=Required(datetime,
                                       sql_default='CURRENT_TIMESTAMP',
                                       default=lambda: datetime.utcnow()),
                    key=Required(str, index=True, unique=True),
                    value=Required(Json, volatile=True))

        table = data.get("table", "Store")
        if table[0].islower():
            table = table[0].upper() + table[1:]

        self.Store = type(table, (self.db.Entity, ), body)
        self.db.generate_mapping(create_tables=True, check_tables=True)
        self.ids = set()
Пример #8
0
def define_db(*args, **kwargs):
    set_sql_debug(app.debug, True)
    db = Database()
    db.bind(*args, **kwargs)
    define_entities(db)
    db.generate_mapping(create_tables=True)
    return db
Пример #9
0
    def __init__(
            self,
            filename: Union[str, Path],  # chemin vers la ddb
            actual_version: Union[
                Version, str] = None,  # version actuelle (dans les sources)
            migrations: dict = None,  # pool de migrations
    ):
        # migrations = migrations
        self.actual_version = (actual_version if isinstance(
            actual_version, Version) else Version(actual_version))

        self.old_file = Path(filename)  # ddb à faire migrer

        # création d'une base temporaire pour effectuer les migrations
        tmp = tempfile.NamedTemporaryFile(suffix=".sqlite", delete=False)
        tmp.close()
        self.tmp_file = Path(tmp.name)
        shutil.copy(self.old_file, self.tmp_file)  # duplication de la DDB

        # outils pour migrations
        self.tmp_db = Database(provider="sqlite", filename=tmp.name)
        self.schema = Schema(file=self.tmp_db)
        if self.schema.version == self.actual_version:
            logger.info(f"version {self.actual_version}: No migration needed")
            return
        self.migrator = Migrator(self.tmp_db, self.actual_version, migrations)
        logger.info(
            f"starting migrations from version {self.schema.version} to {self.actual_version}"
        )
Пример #10
0
def install(app, db=None):
    app.cliarguments.append(DatabaseCLI)
    if db is None:
        db = Database()

    app.db = db

    @app.when
    def ready(app):
        settings = app.settings

        if 'db' not in settings:
            raise ValueError(
                'Please provide db.url configuration entry, for example: ' \
                'postgres://:@/dbname'
            )

        url = uri.parse(settings.db.url)
        db.bind(**url)
        db.generate_mapping(create_tables=True)

    @app.when
    def shutdown(app):
        app.db.disconnect()
        if app.db.provider is not None:
            app.db.provider.disconnect()
            app.db.provider = None

        app.db.schema = None

    return db
Пример #11
0
def pony_setup(request, app, tmpdir, realdburl):

    pytest.importorskip("pony")
    from pony.orm import Database, Optional, Required, Set
    from pony.orm.core import SetInstance

    SetInstance.append = SetInstance.add
    db = Database()

    class Role(db.Entity):
        name = Required(str, unique=True)
        description = Optional(str, nullable=True)
        users = Set(lambda: User)

    class User(db.Entity):
        email = Required(str)
        fs_uniquifier = Required(str, nullable=False)
        username = Optional(str)
        security_number = Optional(int)
        password = Optional(str, nullable=True)
        last_login_at = Optional(datetime)
        current_login_at = Optional(datetime)
        tf_primary_method = Optional(str, nullable=True)
        tf_totp_secret = Optional(str, nullable=True)
        tf_phone_number = Optional(str, nullable=True)
        us_totp_secrets = Optional(str, nullable=True)
        us_phone_number = Optional(str, nullable=True)
        last_login_ip = Optional(str)
        current_login_ip = Optional(str)
        login_count = Optional(int)
        active = Required(bool, default=True)
        confirmed_at = Optional(datetime)
        roles = Set(lambda: Role)

        def has_role(self, name):
            return name in {r.name for r in self.roles.copy()}

    if realdburl:
        db_url, db_info = _setup_realdb(realdburl)
        pieces = urlsplit(db_url)
        db.bind(
            provider=pieces.scheme.split("+")[0],
            user=pieces.username,
            password=pieces.password,
            host=pieces.hostname,
            database=pieces.path[1:],
        )
    else:
        app.config["DATABASE"] = {"name": ":memory:", "engine": "pony.SqliteDatabase"}
        db.bind("sqlite", ":memory:", create_db=True)

    db.generate_mapping(create_tables=True)

    def tear_down():
        if realdburl:
            _teardown_realdb(db_info)

    request.addfinalizer(tear_down)

    return PonyUserDatastore(db, User, Role)
Пример #12
0
    def __init__(self, token):
        self.db = Database()
        logging.basicConfig(
            format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
            level=logging.INFO)

        self.handler = Handler(token)
Пример #13
0
    def sector_map(self):
        sector = self.app.pargs.sector
        self.connect_ebest()

        if self.app.pargs.save:
            data_conf = self.app.config.get('moontrader', 'data')
            db_ebest = Database()
            db_adapter.define_korea_stock(db_ebest)
            db_adapter.bind(db_ebest, data_conf['dir'], DB_FILE_NAME)
            db_adapter.init(db_ebest)
            db_adapter.delete_stock_sector_code_map(sector)

        start_code = ''
        while True:
            response = self.ebest.stock_prices_by_sector(sector, start_code)
            rows = response.content
            self.app.log.info(rows)

            if self.app.pargs.save:
                db_adapter.insert_stock_sector_code_map(sector, rows)

            if (len(rows) < 40):
                break
            else:
                start_code = rows[-1]['shcode']
                time.sleep(0.2)
Пример #14
0
def perform_ponyorm_benchmark(database, conn_str, args, benchmark_result):
    host, user, password, db = get_metadata_from_conn_str(conn_str)
    db = Database(database, host=host, user=user, passwd=password, db=db)

    class Person(db.Entity):
        name = Required(unicode)
        addresses = Set("Address")

    class Address(db.Entity):
        address = Required(unicode)
        person = Required(Person)

    db.generate_mapping(create_tables=True)

    test_data = test_data_from_args(args)
    assert test_data

    if 'ponyorm' not in benchmark_result:
        benchmark_result['ponyorm'] = dict()
    if database not in benchmark_result['ponyorm']:
        benchmark_result['ponyorm'][database] = dict()
    test_aspects = ['insert', 'read', 'update', 'delete']
    __builtin__.__dict__.update(locals())
    timeit_funcs = [
        '_{0}_{1}_data(test_data, Person, Address, db)'.format(
            'ponyorm', test_aspect) for test_aspect in test_aspects
    ]
    for index, tf in enumerate(timeit_funcs):
        rst = timeit.timeit(tf, number=args.num_repeats)
        benchmark_result['ponyorm'][database][test_aspects[index]] = rst
Пример #15
0
 def save_candle_data(self, rows, code, need_init=False):
     if need_init:
         data_conf = self.app.config.get('moontrader', 'data')
         db_candle = Database()
         db_adapter.define_korea_stock(db_candle)
         db_adapter.bind(db_candle, data_conf['dir'], DB_FILE_NAME)
         db_adapter.init(db_candle)
     db_adapter.save_stock_candles(rows, code)       
Пример #16
0
 def save_candle_data(self, rows, need_init=False):
     if need_init:
         data_conf = self.app.config.get('moontrader', 'data')
         db_dart = Database()
         db_adapter.define_dart(db_dart)
         db_adapter.bind(db_dart, data_conf['dir'], DB_FILE_NAME)
         db_adapter.init(db_dart)
     db_adapter.save_dart_disclosures(rows)       
Пример #17
0
def test_version_set():
    db = Database(provider="sqlite", filename=":memory:")
    s = Schema(file=db)
    assert s.version == Version("0")
    s.version = Version("12.34.56")
    with db_session:
        assert s.db.execute("PRAGMA user_version").fetchone()[0] == 123456
    assert s.version == Version("12.34.56")
Пример #18
0
def _bind(db_file: Path, create_tables=False) -> Database:
    db = Database()
    _define_entities(db)

    db.bind(provider="sqlite", filename=str(db_file))
    db.generate_mapping(create_tables=create_tables)

    return db
Пример #19
0
def model():
    _db = Database()

    class TestModel(_db.Entity, NapMixin):
        pass

    _db.bind('sqlite', ':memory:')
    _db.generate_mapping(create_tables=True)
    return TestModel
Пример #20
0
def init_core(args):
    db = Database()
    LazyEntityMeta.attach(db, database='CGRdb_config')
    db.bind('postgres', **args.connection)
    db.generate_mapping(create_tables=True)

    with db_session:
        db.execute('CREATE EXTENSION IF NOT EXISTS intarray')
        db.execute('CREATE EXTENSION IF NOT EXISTS plpython3u')
Пример #21
0
 def __init__(self):
     self.connection_manager = ConnectionManager(self)
     self.plugin_manager = PluginManager(self)
     self.database_identifier = DatabaseIdentifier(self)
     self.connection = None
     self.export = None
     self.output = None
     self.db = Database()
     Core.instance = self
Пример #22
0
 def test_migrations_same_version_is_cancelled(selfn, tmpfile, caplogger):
     ddb = Database(provider="sqlite", filename=str(tmpfile))
     init_onetable(ddb)
     s1 = Schema(ddb)
     s1.version = "1.3.4"
     m = MakeMigrations(tmpfile, Version("1.3.4"), migrations)
     assert not hasattr(m, "migrator")
     m(lambda: True, lambda: True)
     assert "No migration needed" in caplogger.read()
Пример #23
0
 def test_restore_backup(self, tmpfile):
     ddb = Database(provider="sqlite", filename=str(tmpfile))
     init_onetable(ddb)
     ddb.disconnect()
     bck_db = tmpfile.read_bytes()
     m = MakeMigrations(tmpfile, Version("1.3.2"),
                        {"1.3.2": "AZEZRT ERTERT"})
     assert not m(lambda x: True, lambda x: True)
     assert tmpfile.read_bytes() == bck_db
Пример #24
0
def memory_db():
    from mycartable.database import init_database

    logger.disable("")
    db = init_database(Database())
    add_database_to_types(db)

    logger.enable("")
    return db
Пример #25
0
def main():
    db = Database(provider='sqlite', filename=':memory:')
    appdb.bind_to(db)
    db.generate_mapping(create_tables=True)

    with db_session():
        john, created = get_or_create(Person, {'name': 'John Swag'},
                                      {'type': PersonType.Student})
        print('Hello, {} ({}, created: {})'.format(john.name, john.type,
                                                   created))
Пример #26
0
def init_core(args):
    db = Database()
    LazyEntityMeta.attach(db, database='CGRdb_config')
    db.bind('postgres',
            user=args.user,
            password=args.password,
            host=args.host,
            database=args.base,
            port=args.port)
    db.generate_mapping(create_tables=True)
Пример #27
0
def main_init_database(filename=None, prod=False):
    # init database first
    settings = QSettings()
    logger.info(f"ficher settings : {settings.fileName()}")
    newdb = Database()
    create_db = False
    import mycartable.database

    if prod:
        from mycartable.defaults.files_path import ROOT_DATA

        filename = settings.value("General/ddb_path",
                                  ROOT_DATA / "mycartable.ddb")
        create_db = True
    else:
        QStandardPaths.setTestModeEnabled(True)
        # filename.unlink()
        filename = Path(tempfile.gettempdir()) / "devddbmdk.sqlite"
        # filename = "/home/jimmy/Documents/MyCartable/mycartable.ddb"
        # filename = ":memory:"
        # filename = ":memory:"
        create_db = True

    from mycartable.migrations.migrations import make_migrations

    if filename != ":memory:" and Path(filename).is_file():
        migrate_res = make_migrations(filename)
        if not migrate_res:
            from mycartable.defaults.files_path import LOGFILE

            raise SystemError(f"voir dans {LOGFILE}")

    mycartable.database.db = newdb

    db = mycartable.database.init_database(newdb,
                                           filename=filename,
                                           create_db=create_db)

    if not prod:
        from tests.factory import Faker

        with db_session:
            db.Configuration.add("annee", 2019)

        try:
            f = Faker(db)
            m = f.f_matiere(groupe=2019)
            ac = f.f_activite(matiere=m)
            p = f.f_page(activite=ac)
            f.f_textSection(page=p)
        except:
            pass

    return mycartable.database.db
Пример #28
0
def cli(ctx: click.Context, log_level, config_path: Path):
    set_root_logger_level(log_level)

    if ctx.invoked_subcommand != 'csv':  # Doesn't need to initialise the db
        with config_path.open('r') as fp:
            config = json.load(fp)
            ctx.meta.update(config)

        db = Database(**config['db'])
        init_orm(db)
        ctx.obj = db
Пример #29
0
def _create_db_schemata(database_file, *schemata):
    from pony.orm import Database
    db = Database('sqlite', database_file)
    models = {}
    for schema in schemata:
        # Build db.Entity objects for the side effect of registering them
        # with the ORM
        models[schema] = type(schema.__name__, (db.Entity, ),
                              _pony_schema_from_sheets_schema(schema))
    db.generate_mapping(create_tables=True)
    return db, models
Пример #30
0
class DataFactory:
    _INT64 = 2 ** 63 - 1
    _config = FrozenConfig()
    _db = Database()
    _db.bind(provider='postgres', host=_config.postgres.host,
             port=_config.postgres.port,
             user=_config.postgres.user,
             passwd=_config.postgres.password,
             database='api')

    class Hitokoto(_db.Entity):
        """
        ORM 中的表结构,这里的变量名即存放进数据库的列名
        不能为这个 class 重写 str 或 repr 方法
        因为 pony 存放的时候会调用这个方法
        """
        id = PrimaryKey(int, size=64)
        length = Required(int)
        info = Required(Json)
        origin = Required(str)

    _db.generate_mapping(create_tables=True)
    with db_session:
        _amount = Hitokoto.select(lambda p: p.id).count()

    @classmethod
    @db_session
    def _insert(cls, id_: int, hitokoto: str, source: str, origin: str):
        try:
            cls.Hitokoto(id=id_, info=dict(hitokoto=hitokoto, source=source),
                         length=len(hitokoto), origin=origin)
            commit()
        except TransactionIntegrityError:
            has_duplicated = f'已重复({cls._amount}):{id_, hitokoto}'
            print(reprlib.repr(has_duplicated))
        except Exception as e:
            import traceback
            traceback.print_exc()
        else:
            has_inserted = f'已插入({cls._amount}):{id_, hitokoto}'
            cls._amount += 1
            print(reprlib.repr(has_inserted))

    @classmethod
    def fmt_data(cls, c: Dict[str, str], url: str) -> None:
        hitokoto_tmp = c.get('hitokoto') or c.get('text') or c.get('HITO')
        source = c.get('source') or c.get('from') or c.get('SOURCE')
        hitokoto = sub(r'[\xa0-\xad]', '', hitokoto_tmp)
        origin = url.split('.')[1]
        # 去除一些无用字符,避免无用字符对计算 hash 产生的影响
        fmt_hitokoto = sub(
            r'[,,。.“” …!、!?:’;\\‘?「/」—-♬《》⋯『』()]', '', hitokoto)
        id_ = xxh64(fmt_hitokoto).intdigest() - cls._INT64
        source and origin and cls._insert(id_, hitokoto, source, origin)