Exemplo n.º 1
0
def insert_new_rows(data, h**o):
	genes = []
	for row in data:
		d = {'homolog':h**o, 'gi':row[0], 'thous_best_bns_selected_feats':row[1], 'confidence_1000_feats':row[2]}
		genes.append(d)
	iq = InsertQuery(ML, rows=genes)
	iq.execute()
Exemplo n.º 2
0
def write_currencies(reader):
    '''
    Import Currencies from a csv DictReader
    '''
    currencies = [{'name': d['Currency'], 'symbol': d['Currency'].strip()[:4]} for d in reader]
    iq = InsertQuery(models.Currency, rows=currencies)
    iq.execute()
Exemplo n.º 3
0
def verify_database_schema(db):
    if not Versions.table_exists():
        db.create_tables([Versions])

        if ScannedLocation.table_exists():
            # Versions table didn't exist, but there were tables. This must mean the user
            # is coming from a database that existed before we started tracking the schema
            # version. Perform a full upgrade.
            InsertQuery(Versions, {
                Versions.key: 'schema_version',
                Versions.val: 0
            }).execute()
            database_migrate(db, 0)
        else:
            InsertQuery(Versions, {
                Versions.key: 'schema_version',
                Versions.val: db_schema_version
            }).execute()

    else:
        db_ver = Versions.get(Versions.key == 'schema_version').val

        if db_ver < db_schema_version:
            database_migrate(db, db_ver)

        elif db_ver > db_schema_version:
            log.error(
                "Your database version (%i) appears to be newer than the code supports (%i).",
                db_ver, db_schema_version)
            log.error(
                "Please upgrade your code base or drop all tables in your database."
            )
            sys.exit(1)
Exemplo n.º 4
0
def write_publishers(reader):
    '''
    Import Publisher from a csv DictReader
    '''
    publishers = [{'name': d['Publisher'].strip()} for d in reader]
    iq = InsertQuery(models.Publisher, rows=publishers)
    iq.execute()
Exemplo n.º 5
0
def write_usergroups(reader):
    '''
    Import User Groups from a csv DictReader serving a file of format:
    ['GroupID', 'GroupName']
    '''
    usergroups = [{'name': d['GroupName'], 'position': d['GroupID']} for d in reader]
    iq = InsertQuery(models.UserGroup, rows=usergroups)
    iq.execute()
Exemplo n.º 6
0
 def process_item(self, item, spider):
     print(item.__dict__.get('_values'))
     try:
         iq = InsertQuery(Patents, item.__dict__.get('_values'))
         iq.execute()
     except Exception as e:
         print(e)
     return item
Exemplo n.º 7
0
def write_publishplaces(reader):
    '''
    Import PublishPlaces from a csv DictReader
    '''

    l = [{'name': d['Place of Publication'].strip()} for d in reader]
    iq = InsertQuery(models.PublishPlace, rows=l)
    iq.execute()
Exemplo n.º 8
0
 def test_update(self):
     iq = InsertQuery(Blog, title='a').execute()
     
     uq = UpdateQuery(Blog, title='A').where(id=1)
     self.assertEqual(uq.sql(), ('UPDATE blog SET title=? WHERE id = ?', ['A', 1]))
     self.assertEqual(uq.execute(), 1)
     
     iq2 = InsertQuery(Blog, title='b').execute()
     
     uq = UpdateQuery(Blog, title='B').where(id=2)
     self.assertEqual(uq.sql(), ('UPDATE blog SET title=? WHERE id = ?', ['B', 2]))
     self.assertEqual(uq.execute(), 1)
     
     sq = SelectQuery(Blog).order_by('id')
     self.assertEqual([x.title for x in sq], ['A', 'B'])
Exemplo n.º 9
0
 def test_insert(self):
     iq = InsertQuery(Blog, title='a')
     self.assertEqual(iq.sql(), ('INSERT INTO blog (title) VALUES (?)', ['a']))
     self.assertEqual(iq.execute(), 1)
     
     iq = InsertQuery(Blog, title='b')
     self.assertEqual(iq.sql(), ('INSERT INTO blog (title) VALUES (?)', ['b']))
     self.assertEqual(iq.execute(), 2)
Exemplo n.º 10
0
def write_locations(reader):
    '''
    Import CampusLocations from a csv DictReader
    '''
    l = []
    for d in reader:
        u = {}
        u['name'] = d['Location'].strip()
        try:
            u['prevent_borrowing'] = bool(int(d['PreventBorrow'].strip()))
        except ValueError:
            u['prevent_borrowing'] = False
        l.append(u)
    iq = InsertQuery(models.CampusLocation, rows=l)
    iq.execute()
Exemplo n.º 11
0
def init_database(args, app):
    log.info('Connecting to MySQL database on %s:%i...',
             args.db_host, args.db_port)
    db = MyRetryDB(
        args.db_name,
        user=args.db_user,
        password=args.db_pass,
        host=args.db_host,
        port=args.db_port,
        max_connections=args.db_max_connections,
        stale_timeout=300,
        charset='utf8mb4')
    app.config['DATABASE'] = db
    flaskDb.init_app(app)
    db.connect()

    if not ProxyPool.table_exists():
        create_tables(db)
        InsertQuery(Version, {Version.key: 'schema_version',
                              Version.val: db_schema_version}).execute()
        old_schema_version = db_schema_version
    elif not Version.table_exists():
        old_schema_version = 1
    else:
        old_schema_version = Version.get(Version.key == 'schema_version').val
    if old_schema_version < db_schema_version:
        migrate_database(db, old_schema_version)

    # Last, fix database encoding
    verify_table_encoding(args, db)

    log.info('MySQL database ready.')

    return db
Exemplo n.º 12
0
def add_egg_tracking(id, monster, distance):
    InsertQuery(raid, {
        raid.human_id: id,
        raid.pokemon_id: monster,
        raid.egg: 1,
        raid.distance: distance}).execute()
    db.close()
Exemplo n.º 13
0
def migrate_database(db, old_ver):
    log.info('Detected database version {}, updating to {}...'.format(old_ver, db_schema_version))
    migrator = MySQLMigrator(db)

    if old_ver < 2:
        migrate_varchar_columns(db, Account.username, Account.password, Account.email, Account.system_id,
                                Account.tutorial_state)
        migrate_varchar_columns(db, Event.entity_id, Event.description)

        db.create_table(Version)
        InsertQuery(Version, {Version.key: 'schema_version',
                              Version.val: 1}).execute()

        migrate(
            migrator.add_column('account', 'lures',
                                SmallIntegerField(null=True)),
            migrator.rename_column('event', 'type', 'entity_type')
        )

    if old_ver < 3:
        migrate(
            migrator.add_column('account', 'assigned_at',
                                DateTimeField(index=True, null=True))
        )
    if old_ver <4:
        migrate(
            migrator.add_column('account','reach_lvl30_datetime',
                                DateTimeField(index=True,null=True))
        )
    Version.update(val=db_schema_version).where(
        Version.key == 'schema_version').execute()
    log.info("Done migrating database.")
Exemplo n.º 14
0
def init_database(app):
    log.info('Connecting to MySQL database on %s:%i...',
             cfg_get('db_host'), cfg_get('db_port'))
    db = MyRetryDB(
        cfg_get('db_name'),
        user=cfg_get('db_user'),
        password=cfg_get('db_pass'),
        host=cfg_get('db_host'),
        port=cfg_get('db_port'),
        max_connections=cfg_get('db_max_connections'),
        stale_timeout=300,
        charset='utf8mb4')
    app.config['DATABASE'] = db
    flaskDb.init_app(app)
    db.connect()

    if not Account.table_exists():
        create_tables(db)
        InsertQuery(Version, {Version.key: 'schema_version',
                              Version.val: db_schema_version}).execute()
        old_schema_version = db_schema_version
    elif not Version.table_exists():
        old_schema_version = 1
    else:
        old_schema_version = Version.get(Version.key == 'schema_version').val
    if old_schema_version < db_schema_version:
        migrate_database(db, old_schema_version)

    # Last, fix database encoding
    verify_table_encoding(db)

    return db
Exemplo n.º 15
0
def cache_insert(id, time, col):
    try:
        if args.debug:
            log.debug('updating {} cache'.format(col))
        InsertQuery(cache, {cache.id: id, col: time}).execute()
    except IntegrityError:
        cache.update(**{col: time}).where(cache.id == id).execute()
        db.close()
Exemplo n.º 16
0
def add_tracking(id, monster, distance, iv):
    InsertQuery(monsters, {
        monsters.human_id: id,
        monsters.pokemon_id: monster,
        monsters.distance: distance,
        monsters.min_iv: iv
        }).execute()
    db.close()
Exemplo n.º 17
0
def bulk_upsert(cls, data):
    num_rows = len(data.values())
    i = 0
    step = 100

    while i < num_rows:
        log.debug("Inserting items {} to {}".format(i, min(i + step, num_rows)))
        InsertQuery(cls, rows=data.values()[i:min(i + step, num_rows)]).upsert().execute()
        i += step
Exemplo n.º 18
0
def spawn_geocoding(id, addr, lat, lon):
    InsertQuery(geocoded, {
        geocoded.id: id,
        geocoded.type: 'spawn',
        geocoded.address: addr,
        geocoded.latitude: lat,
        geocoded.longitude: lon
    }).execute()
    db.close()
Exemplo n.º 19
0
    def test_delete(self):
        InsertQuery(Blog, title='a').execute()
        InsertQuery(Blog, title='b').execute()
        InsertQuery(Blog, title='c').execute()

        dq = DeleteQuery(Blog).where(title='b')
        self.assertEqual(dq.sql(), ('DELETE FROM blog WHERE title = ?', ['b']))
        self.assertEqual(dq.execute(), 1)

        sq = SelectQuery(Blog).order_by('id')
        self.assertEqual([x.title for x in sq], ['a', 'c'])

        dq = DeleteQuery(Blog)
        self.assertEqual(dq.sql(), ('DELETE FROM blog', []))
        self.assertEqual(dq.execute(), 2)

        sq = SelectQuery(Blog).order_by('id')
        self.assertEqual([x.title for x in sq], [])
Exemplo n.º 20
0
def write_users(reader):
    '''
    Import Users from a csv DictReader
    '''
    users = []
    for d in reader:
        try:
            group = models.UserGroup.get(name=d['GroupName'].strip())
        except models.db.DoesNotExist:
            group = models.UserGroup.objects.create(name=d['GroupName'].strip())

        u = {}
        u['email'] = d['Email'].strip() if d['Email'] else None
        u['name'] = d['UserName'].strip()
        u['username'] = ''.join([u['name'], '.', group.name]).lower()[:32]
        u['group'] = group
        users.append(u)
    iq = InsertQuery(models.User, rows=users)
    iq.execute()
Exemplo n.º 21
0
def add_special(id, specialty, distance):
    InsertQuery(
        monsters, {
            monsters.human_id: id,
            monsters.pokemon_id: 9000,
            monsters.distance: distance,
            monsters.min_iv: 100,
            monsters.special: specialty
        }).execute()
    db.close()
Exemplo n.º 22
0
def update_weather_path(id, path):
    geocoded.update(weather_path=path).where(geocoded.id == id).execute()
    try:
        InsertQuery(weather, {
            weather.area: path,
            weather.updated: 0
        }).execute()
    except IntegrityError:
        if args.debug:
            log.debug('tried to update weather where it already exists')
    db.close()
Exemplo n.º 23
0
def verify_database_schema(db):
    if not Versions.table_exists():
        db.create_tables([Versions])
        InsertQuery(Versions, {
            Versions.key: 'schema_version',
            Versions.val: db_schema_version
        }).execute()
    else:
        db_ver = Versions.get(Versions.key == 'schema_version').val

        database_migrate(db, db_ver)
Exemplo n.º 24
0
def save_geocoding(id, team, address, gym_name, description, url, lat, lon):
    InsertQuery(geocoded, {
        geocoded.id: id,
        geocoded.type: 'raid',
        geocoded.team: team,
        geocoded.address: address,
        geocoded.gym_name: gym_name,
        geocoded.description: description,
        geocoded.url: url,
        geocoded.latitude: lat,
        geocoded.longitude: lon
    }).execute()
    db.close()
Exemplo n.º 25
0
 def test_insert(self):
     iq = InsertQuery(Blog, title='a')
     self.assertEqual(iq.sql(), ('INSERT INTO blog (title) VALUES (?)', ['a']))
     self.assertEqual(iq.execute(), 1)
     
     iq = InsertQuery(Blog, title='b')
     self.assertEqual(iq.sql(), ('INSERT INTO blog (title) VALUES (?)', ['b']))
     self.assertEqual(iq.execute(), 2)
Exemplo n.º 26
0
    def test_insert(self):
        iq = InsertQuery(Blog, title="a")
        self.assertEqual(iq.sql(), ("INSERT INTO blog (title) VALUES (?)", ["a"]))
        self.assertEqual(iq.execute(), 1)

        iq = InsertQuery(Blog, title="b")
        self.assertEqual(iq.sql(), ("INSERT INTO blog (title) VALUES (?)", ["b"]))
        self.assertEqual(iq.execute(), 2)
Exemplo n.º 27
0
def bulk_upsert(cls, data):
    num_rows = len(data.values())
    i = 0
    step = 120

    while i < num_rows:
        log.debug('Inserting items %d to %d', i, min(i + step, num_rows))
        try:
            InsertQuery(cls, rows=data.values()[i:min(i + step, num_rows)]).upsert().execute()
        except Exception as e:
            log.warning('%s... Retrying', e)
            continue

        i += step
Exemplo n.º 28
0
    async def _insert_with_loop(self):
        id_list = []
        last_id = None
        return_id_list = self._return_id_list
        for row in self._rows:
            last_id = await (InsertQuery(self.model_class, row)
                             .upsert(self._upsert)
                             .execute())
            if return_id_list:
                id_list.append(last_id)

        if return_id_list:
            return id_list
        else:
            return last_id
Exemplo n.º 29
0
def bulk_upsert_old(cls, data, db):
    num_rows = len(data.values())
    i = 0
    step = 250
    max_fails = 3
    fails = 0

    with db.atomic():
        while i < num_rows:
            log.debug('Inserting items %d to %d.', i, min(i + step, num_rows))
            try:
                # Turn off FOREIGN_KEY_CHECKS on MySQL, because apparently it's
                # unable to recognize strings to update unicode keys for
                # foreign key fields, thus giving lots of foreign key
                # constraint errors.
                db.execute_sql('SET FOREIGN_KEY_CHECKS=0;')
                # Use peewee's own implementation of the insert_many() method.
                InsertQuery(cls, rows=data.values()
                            [i:min(i + step, num_rows)]).upsert().execute()
                db.execute_sql('SET FOREIGN_KEY_CHECKS=1;')

            except Exception as e:
                # If there is a DB table constraint error, dump the data and
                # don't retry.
                #
                # Unrecoverable error strings:
                unrecoverable = [
                    'constraint', 'has no attribute',
                    'peewee.IntegerField object at'
                ]
                has_unrecoverable = filter(lambda x: x in str(e),
                                           unrecoverable)
                if has_unrecoverable:
                    log.warning('%s. Data is:', repr(e))
                    log.warning(data.items())
                else:
                    log.warning('%s... Retrying...', repr(e))
                    log.warning('%s', pprint.pformat(data))
                    time.sleep(1)
                    fails += 1
                    if fails > max_fails:
                        return
                    continue

            i += step
Exemplo n.º 30
0
def verify_database_schema(db):
    if not Versions.table_exists():
        db.create_tables([Versions])
        InsertQuery(Versions, {
            Versions.key: 'schema_version',
            Versions.val: db_schema_version
        }).execute()
    else:
        db_ver = Versions.get(Versions.key == 'schema_version').val

        if db_ver < db_schema_version:
            database_migrate(db, db_ver)

        elif db_ver > db_schema_version:
            log.error(
                'Your database version (%i) appears to be newer than '
                'the code supports (%i).', db_ver, db_schema_version)
            sys.exit(1)
Exemplo n.º 31
0
def verify_database_schema():
    log.info('Connecting to MySQL database on {}:{}'.format(args.dbhost,
                                                            args.dbport))
    try:

        if not schema_version.table_exists():
            create_tables()
            if humans.table_exists():
                InsertQuery(schema_version, {
                    schema_version.key: 'schema_version',
                    schema_version.val: 1}).execute()

        if not cache.table_exists():
            schema_version.update(val=2).where(
                schema_version.key == 'schema_version').execute()
            db.create_table(cache, safe=True)
            db.close()

    except OperationalError as e:
        log.critical("MySQL unhappy [ERROR]:% d: % s\n" % (
            e.args[0], e.args[1]))
        exit(1)
Exemplo n.º 32
0
def bulk_upsert(cls, data):
    num_rows = len(data.values())
    i = 0

    if args.db_type == 'mysql':
        step = 120
    else:
        # SQLite has a default max number of parameters of 999,
        # so we need to limit how many rows we insert for it.
        step = 50

    while i < num_rows:
        log.debug('Inserting items %d to %d', i, min(i + step, num_rows))
        try:
            InsertQuery(
                cls,
                rows=data.values()[i:min(i +
                                         step, num_rows)]).upsert().execute()
        except Exception as e:
            log.warning('%s... Retrying', e)
            continue

        i += step
Exemplo n.º 33
0
def verify_database_schema():
    log.info('Connecting to MySQL database on {}:{}'.format(
        args.dbhost, args.dbport))
    try:
        migrator = MySQLMigrator(db)
        if not schema_version.table_exists():
            log.info('Creting database tables.')
            create_tables()
            if humans.table_exists():
                InsertQuery(schema_version, {
                    schema_version.key: 'schema_version',
                    schema_version.val: 1
                }).execute()

        if not cache.table_exists():
            log.info('Upgrading to Db schema 2.')
            schema_version.update(val=2).where(
                schema_version.key == 'schema_version').execute()
            db.create_table(cache, safe=True)
            db.close()
        if int(get_database_version()) < 3:
            schema_version.update(val=3).where(
                schema_version.key == 'schema_version').execute()
            with db.execution_context():
                log.info('Upgrading to Db schema 3.')

                db.execute_sql('SET FOREIGN_KEY_CHECKS=0;')

                log.debug('changing db encoding')
                cmd_sql_db = '''ALTER DATABASE {} DEFAULT CHARACTER 
                    SET utf8 COLLATE utf8_unicode_ci;'''.format(args.database)
                db.execute_sql(cmd_sql_db)
                log.debug('changing collation of humans')

                cmd_sql_table = '''ALTER TABLE humans CONVERT TO 
                    CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'''
                db.execute_sql(cmd_sql_table)
                db.execute_sql('SET FOREIGN_KEY_CHECKS=1;')
        if int(get_database_version()) < 4:
            log.info('Upgrading to Db schema 4.')
            schema_version.update(val=4).where(
                schema_version.key == 'schema_version').execute()
            with db.execution_context():
                db.execute_sql(
                    'ALTER TABLE geocoded MODIFY description LONGTEXT;')

        if int(get_database_version()) < 5:
            log.info('Upgrading to Db schema 5.')
            schema_version.update(val=5).where(
                schema_version.key == 'schema_version').execute()
            columns = db.get_columns('monsters')
            special_filters_migrated = False
            for col in columns:
                if 'special' in col:
                    special_filters_migrated = True
            if not special_filters_migrated:
                special = Utf8mb4CharField(index=True,
                                           null=True,
                                           max_length=20)

                with db.transaction():
                    migrate(migrator.add_column('monsters', 'special',
                                                special))

    except OperationalError as e:
        log.critical("MySQL unhappy [ERROR]:% d: % s\n" %
                     (e.args[0], e.args[1]))
        exit(1)
Exemplo n.º 34
0
def bulk_upsert(cls, data, db):
    rows = data.values()
    num_rows = len(rows)
    i = 0

    max_fails = 3
    fails = 0

    # This shouldn't happen, ever, but anyways...
    if num_rows < 1:
        return

    # We used to support SQLite and it has a default max 999 parameters,
    # so we limited how many rows we insert for it.
    # Oracle: 64000
    # MySQL: 65535
    # PostgreSQL: 34464
    # Sqlite: 999
    step = 500

    # Prepare for our query.
    conn = db.get_conn()
    cursor = db.get_cursor()

    # We build our own INSERT INTO ... ON DUPLICATE KEY UPDATE x=VALUES(x)
    # query, making sure all data is properly escaped. We use
    # placeholders for VALUES(%s, %s, ...) so we can use executemany().
    # We use peewee's InsertQuery to retrieve the fields because it
    # takes care of peewee's internals (e.g. required default fields).
    query = InsertQuery(cls, rows=[rows[0]])
    # Take the first row. We need to call _iter_rows() for peewee internals.
    # Using next() for a single item is not considered "pythonic".
    first_row = {}
    for row in query._iter_rows():
        first_row = row
        break
    # Convert the row to its fields, sorted by peewee.
    row_fields = sorted(first_row.keys(), key=lambda x: x._sort_key)
    row_fields = map(lambda x: x.name, row_fields)
    # Translate to proper column name, e.g. foreign keys.
    db_columns = [peewee_attr_to_col(cls, f) for f in row_fields]

    # Store defaults so we can fall back to them if a value
    # isn't set.
    defaults = {}

    for f in cls._meta.fields.values():
        # Use DB column name as key.
        field_name = f.name
        field_default = cls._meta.defaults.get(f, None)
        defaults[field_name] = field_default

    # Assign fields, placeholders and assignments after defaults
    # so our lists/keys stay in order.
    table = '`' + conn.escape_string(cls._meta.db_table) + '`'
    escaped_fields = ['`' + conn.escape_string(f) + '`' for f in db_columns]
    placeholders = ['%s' for escaped_field in escaped_fields]
    assignments = [
        '{x} = VALUES({x})'.format(x=escaped_field)
        for escaped_field in escaped_fields
    ]

    # We build our own MySQL query because peewee only supports
    # REPLACE INTO for upserting, which deletes the old row before
    # adding the new one, giving a serious performance hit.
    query_string = ('INSERT INTO {table} ({fields}) VALUES' +
                    ' ({placeholders}) ON DUPLICATE KEY UPDATE' +
                    ' {assignments}')

    # Prepare transaction.

    with db.atomic():
        while i < num_rows:
            start = i
            end = min(i + step, num_rows)
            name = cls.__name__

            log.debug('Inserting items %d to %d for %s.', start, end, name)
            try:
                # Turn off FOREIGN_KEY_CHECKS on MySQL, because apparently it's
                # unable to recognize strings to update unicode keys for
                # foreign key fields, thus giving lots of foreign key
                # constraint errors.
                db.execute_sql('SET FOREIGN_KEY_CHECKS=0;')

                # Time to bulk upsert our data. Convert objects to a list of
                # values for executemany(), and fall back to defaults if
                # necessary.
                batch = []
                batch_rows = rows[i:min(i + step, num_rows)]

                # We pop them off one by one so we can gradually release
                # memory as we pass each item. No duplicate memory usage.
                while len(batch_rows) > 0:
                    row = batch_rows.pop()
                    row_data = []

                    # Parse rows, build arrays of values sorted via row_fields.
                    for field in row_fields:
                        # Take a default if we need it.
                        if field not in row:
                            default = defaults.get(field, None)

                            # peewee's defaults can be callable, e.g. current
                            # time. We only call when needed to insert.
                            if callable(default):
                                default = default()

                            row[field] = default

                        # Append to keep the exact order, and only these
                        # fields.
                        row_data.append(row[field])
                    # Done preparing, add it to the batch.
                    batch.append(row_data)

                # Format query and go.
                formatted_query = query_string.format(
                    table=table,
                    fields=', '.join(escaped_fields),
                    placeholders=', '.join(placeholders),
                    assignments=', '.join(assignments))

                cursor.executemany(formatted_query, batch)
                db.execute_sql('SET FOREIGN_KEY_CHECKS=1;')

            except Exception as e:
                # If there is a DB table constraint error, dump the data and
                # don't retry.
                #
                # Unrecoverable error strings:
                unrecoverable = [
                    'constraint', 'has no attribute',
                    'peewee.IntegerField object at'
                ]
                has_unrecoverable = filter(lambda x: x in str(e),
                                           unrecoverable)
                if has_unrecoverable:
                    log.warning('%s. Data is:', repr(e))
                    log.warning(data.items())
                else:
                    log.warning('%s... Retrying...', repr(e))
                    log.warning('%s', pprint.pformat(data))
                    time.sleep(1)
                    fails += 1
                    if fails > max_fails:
                        return
                    continue

            i += step
Exemplo n.º 35
0
def register(id, name):
    InsertQuery(humans, {humans.id: id, humans.name: name}).execute()
    db.close()
Exemplo n.º 36
0
    def __init__(self, *args, **kwargs):
        BaseInsertQuery.__init__(self, *args, **kwargs)
        gen.Future.__init__(self)

        self._future = None