Example #1
0
def db_upgrade(old_version):
    migrator = SqliteMigrator(_db)
    if old_version < 2:
        User_login.create_table()
        migrate(
            migrator.add_column('clan_member', 'remaining_status',
                                TextField(null=True)),
            migrator.add_column('clan_challenge', 'message',
                                TextField(null=True)),
            migrator.add_column('clan_group', 'boss_lock_type',
                                IntegerField(default=0)),
            migrator.drop_column('user', 'last_save_slot'),
        )
    if old_version < 3:
        migrate(
            migrator.drop_column('user', 'auth_cookie'),
            migrator.drop_column('user', 'auth_cookie_expire_time'),
        )
    if old_version < 4:
        migrate(
            migrator.add_column('user', 'deleted',
                                BooleanField(default=False)), )
    if old_version < 5:
        migrate(
            migrator.add_column('user', 'must_change_password',
                                BooleanField(default=True)), )
    if old_version < 6:
        User.update({
            User.authority_group: 1
        }).where(User.authority_group == 2).execute()
    if old_version < 7:
        migrate(
            migrator.drop_column('clan_challenge', 'comment'),
            migrator.add_column('clan_challenge', 'behalf',
                                IntegerField(null=True)),
            migrator.drop_column('clan_subscribe', 'comment'),
            migrator.add_column('clan_subscribe', 'message',
                                TextField(null=True)),
            migrator.add_column('clan_group', 'apikey',
                                CharField(max_length=16, default=rand_string)),
        )
    if old_version < 8:
        migrate(
            migrator.add_column('clan_group', 'deleted',
                                BooleanField(default=False)),
            migrator.add_column('clan_group', 'battle_id',
                                IntegerField(default=0)),
            migrator.add_column('clan_challenge', 'bid',
                                IntegerField(default=0)),
            migrator.add_index('clan_challenge', ('bid', 'gid'), False))
    if old_version < 9:
        migrate(migrator.add_index('clan_member', ('qqid', ), False))

    DB_schema.replace(key='version', value=str(_version)).execute()
Example #2
0
class JunctionsDatabase(object):
    def __init__(self, db_name):
        self.db = database
        self.db.init(db_name)
        self.migrator = SqliteMigrator(self.db)

    def create_tables(self):
        self.db.drop_tables([Stats, Junction, Gene])
        self.db.create_tables([Gene, Junction, Stats])
        self.create_indexes()

    def create_indexes(self):
        migrate(
            self.migrator.add_index('gene', ('id', 'gene_name', 'nm_number')),
            self.migrator.add_index('junction', ('id', 'gene_id')),
            self.migrator.add_index('stats', ('id', 'gene_id')))

    def close_db(self):
        self.db.close()
Example #3
0
def setup_db():
    global db
    db_file = config.get("db", home_path("diffengine.db"))
    logging.debug("connecting to db %s", db_file)
    db.init(db_file)
    db.connect()
    db.create_tables([Feed, Entry, FeedEntry, EntryVersion, Diff], safe=True)
    try:
        migrator = SqliteMigrator(db)
        migrate(migrator.add_index("entryversion", ("url", ), False))
    except OperationalError as e:
        logging.debug(e)
Example #4
0
def apply():
    database = SqliteDatabase(settings.DATABASE)
    migrator = SqliteMigrator(database)

    with database.transaction():
        database.execute_sql('CREATE TABLE user (pk INTEGER PRIMARY KEY)')
        database.execute_sql('CREATE TABLE snippet (pk INTEGER PRIMARY KEY)')

        for field in (
                User.created_at,
                User.updated_at,
                User.name,
                User.passhash,
                Snippet.created_at,
                Snippet.updated_at,
                Snippet.syntax,
                Snippet.raw,
                Snippet.html,
        ):
            field.null = True

        migrate(
            # user table
            migrator.add_column('user', 'created_at', User.created_at),
            migrator.add_column('user', 'updated_at', User.updated_at),
            migrator.add_column('user', 'name', User.name),
            migrator.add_column('user', 'passhash', User.passhash),
            migrator.add_index('user', ('name', ), True),
            migrator.add_index('user', ('updated_at', ), False),

            # snippet table
            migrator.add_column('snippet', 'created_at', Snippet.created_at),
            migrator.add_column('snippet', 'updated_at', Snippet.updated_at),
            migrator.add_column('snippet', 'author_id', Snippet.author),
            migrator.add_column('snippet', 'name', Snippet.name),
            migrator.add_column('snippet', 'syntax', Snippet.syntax),
            migrator.add_column('snippet', 'raw', Snippet.raw),
            migrator.add_column('snippet', 'html', Snippet.html),
            migrator.add_index('snippet', ('updated_at', ), False),
        )
Example #5
0
def setup_db():
    global home, database
    database_url = config.get("db", "sqlite:///diffengine.db")
    logging.debug("connecting to db %s", database_url)
    database_handler = connect(database_url)
    database.initialize(database_handler)
    database.connect()
    database.create_tables([Feed, Entry, FeedEntry, EntryVersion, Diff],
                           safe=True)

    if isinstance(database_handler, SqliteDatabase):
        try:
            migrator = SqliteMigrator(database_handler)
            migrate(migrator.add_index("entryversion", ("url", ), False))
        except OperationalError as e:
            logging.debug(e)
Example #6
0
def db_upgrade(old_version):
    migrator = SqliteMigrator(_db)
    if old_version < 2:
        User_login.create_table()
        migrate(
            migrator.add_column('clan_member', 'remaining_status',
                                TextField(null=True)),
            migrator.add_column('clan_challenge', 'message',
                                TextField(null=True)),
            migrator.add_column('clan_group', 'boss_lock_type',
                                IntegerField(default=0)),
            migrator.drop_column('user', 'last_save_slot'),
        )
    if old_version < 3:
        migrate(
            migrator.drop_column('user', 'auth_cookie'),
            migrator.drop_column('user', 'auth_cookie_expire_time'),
        )
    if old_version < 4:
        migrate(
            migrator.add_column('user', 'deleted',
                                BooleanField(default=False)), )
    if old_version < 5:
        migrate(
            migrator.add_column('user', 'must_change_password',
                                BooleanField(default=True)), )
    if old_version < 7:
        migrate(
            migrator.drop_column('clan_challenge', 'comment'),
            migrator.add_column('clan_challenge', 'behalf',
                                IntegerField(null=True)),
            migrator.drop_column('clan_subscribe', 'comment'),
            migrator.add_column('clan_subscribe', 'message',
                                TextField(null=True)),
            migrator.add_column('clan_group', 'apikey',
                                CharField(max_length=16, default=rand_string)),
        )
    if old_version < 8:
        migrate(
            migrator.add_column('clan_group', 'deleted',
                                BooleanField(default=False)),
            migrator.add_column('clan_group', 'battle_id',
                                IntegerField(default=0)),
            migrator.add_column('clan_challenge', 'bid',
                                IntegerField(default=0)),
            migrator.add_index('clan_challenge', ('bid', 'gid'), False))
    if old_version < 9:
        migrate(migrator.add_index('clan_member', ('qqid', ), False))
    if old_version < 10:
        migrate(
            migrator.add_index('clan_member', ('group_id', ), False),
            migrator.add_index('clan_subscribe', ('gid', ), False),
            migrator.add_index('clan_challenge', ('qqid', ), False),
            migrator.add_index('clan_challenge', ('qqid', 'challenge_pcrdate'),
                               False),
            migrator.add_index('clan_challenge',
                               ('bid', 'gid', 'challenge_pcrdate'), False),
        )
    if old_version < 11:
        migrate(
            migrator.add_column('user', 'notify_preference',
                                IntegerField(default=1)), )
    # if old_version < 12:
    #     migrate(
    #         migrator.alter_column_type('clan_group', 'challenging_member_qq_id',
    #                                    TextField(null=True)),
    #         migrator.add_column('clan_group', 'lock_member_qq_id',
    #                             IntegerField(null=True)),
    #     )
    if old_version == 12:
        # revert database version 12
        migrate(
            migrator.alter_column_type('clan_group',
                                       'challenging_member_qq_id',
                                       IntegerField(null=True)),
            migrator.drop_column('clan_group', 'lock_member_qq_id'),
        )

    DB_schema.replace(key='version', value=str(_version)).execute()
Example #7
0
class QstarzCSVParser(object):
    '''
    Parses Qstarz csv files and loads to them to a cache database.

    :param config:
    :param database:      Open Peewee connection the cache database
    :param csv_input_dir: Path to the directory containing the input coordinates .csv data
    '''
    def __init__(self, config, database):
        self.config = config
        self.db = database
        self._migrator = SqliteMigrator(database.db)
        self.coordinates_csv = 'coordinates.csv'
        self.locations_csv = 'locations.csv'
        self.headers = [
            'INDEX',
            'UTC_DATE',
            'UTC_TIME',
            'LOCAL_DATE',
            'LOCAL_TIME',
            'LATITUDE',
            'N/S',
            'LONGITUDE',
            'E/W',
            'ALTITUDE',
            'SPEED',
            'USER',
        ]
        self.uuid_lookup = None

        # attach common functions
        self.load_subway_stations = _load_subway_stations
        # intialize survey timezone offset
        self.tz = pytz.timezone(self.config.TIMEZONE)

    @staticmethod
    def _value_or_none(row, key):
        '''
        Helper function to return the value stripped of whitespace or `None` for a 0-length string
        from a .csv cell value.
        '''
        v = row.get(key)
        if v and isinstance(v, str):
            return v.strip()

    def _coordinates_row_filter(self, row):
        lat, lon = self._value_or_none(row, 'LATITUDE'), self._value_or_none(
            row, 'LONGITUDE')
        if not lat or not lon:
            return
        lat, lon = float(lat), float(lon)
        if int(lat) == 0 and int(lon) == 0:
            return
        # add sign to negative lat/lons depending on hemisphere
        if row.get('N/S') == 'S' and lat > 0:
            lat *= -1
        if row.get('E/W') == 'W' and lon > 0:
            lon *= -1

        # format date and time columns into Python datetime (NOTE: QStarz data returns a 2-digit year)
        year, month, day = row['UTC_DATE'].split('/')
        if len(year) == 2:
            year = int('20' + year)
        year, month, day = int(year), int(month), int(day)
        hour, minute, second = [int(i) for i in row['UTC_TIME'].split(':')]
        timestamp_UTC = datetime(year,
                                 month,
                                 day,
                                 hour,
                                 minute,
                                 second,
                                 tzinfo=pytz.utc)
        timestamp_epoch = int(timestamp_UTC.timestamp())
        db_row = {
            'user': self.uuid_lookup[row['USER']],
            'latitude': lat,
            'longitude': lon,
            'altitude': self._value_or_none(row, 'ALTITUDE'),
            'speed': self._value_or_none(row, 'SPEED'),
            'direction': self._value_or_none(row, 'HEADING'),
            'h_accuracy': None,
            'v_accuracy': None,
            'acceleration_x': self._value_or_none(row, 'G-X'),
            'acceleration_y': self._value_or_none(row, 'G-Y'),
            'acceleration_z': self._value_or_none(row, 'G-Z'),
            'point_type': None,
            'mode_detected': None,
            'timestamp_UTC': timestamp_UTC,
            'timestamp_epoch': timestamp_epoch,
        }
        return db_row

    # read .csv file, apply filter and yield row
    def _row_generator(self, csv_fp, filter_func=None):
        with open(csv_fp, 'r', encoding='utf-8-sig') as csv_f:
            reader = csv.reader(
                csv_f)  # use zip() below instead of DictReader for speed
            if not self.headers:
                self.headers = next(reader)
            else:
                next(reader)

            # generate dictionaries to insert and apply row filter if exists
            for row in reader:
                dict_row = dict(zip(self.headers, row))
                yield filter_func(dict_row) if filter_func else dict_row

    def _generate_uuids(self, input_dir):
        self.uuid_lookup = {}
        logger.info("Generating UUIDs for non-standard user ids...")
        lookup_fp = temp_path(f'{self.config.SURVEY_NAME}.json')
        if os.path.exists(lookup_fp):
            with open(lookup_fp, 'r') as json_f:
                self.uuid_lookup = json.load(json_f)
        else:
            coordinates_fp = os.path.join(input_dir, self.coordinates_csv)
            with open(coordinates_fp, 'r', encoding='utf-8-sig') as csv_f:
                reader = csv.reader(csv_f)
                user_id_idx = self.headers.index('USER')
                for r in reader:
                    # skip first row if header
                    if 'USER' in r:
                        continue

                    user_id = r[user_id_idx]
                    if not user_id in self.uuid_lookup:
                        self.uuid_lookup[user_id] = str(uuid.uuid4())
            with open(lookup_fp, 'w') as json_f:
                json.dump(self.uuid_lookup, json_f)

    def generate_null_survey(self, input_dir):
        '''
        Wrapper function to generate null survey responses for each user in coordinates.

        :param input_dir: Directory containing input .csv data
        '''
        self._generate_uuids(input_dir)
        _generate_null_survey(input_dir,
                              self.coordinates_csv,
                              id_column='user',
                              uuid_lookup=self.uuid_lookup,
                              headers=self.headers)

    def load_export_coordinates(self, input_dir):
        '''
        Loads QStarz coordinates data to the cache database.

        :param input_dir: The directory containing the `self.coordinates_csv` data file.
        '''
        logger.info("Loading coordinates .csv to db...")
        migrate(self._migrator.drop_index(Coordinate, 'coordinate_user_id'))
        coordinates_fp = os.path.join(input_dir, self.coordinates_csv)
        coordinates_rows = self._row_generator(coordinates_fp,
                                               self._coordinates_row_filter)
        self.db.bulk_insert(Coordinate, coordinates_rows)
        migrate(self._migrator.add_index('coordinates', ('user_id', ), False))

    def load_user_locations(self, input_dir):
        '''
        Loads QStarz user locations data to the cache database.

        :param input_dir: The directory containing the `self.locations_csv` data file.        
        '''
        if not self.uuid_lookup:
            raise Exception(
                'QStarz cannot load user locations before null survey has been initialized.'
            )
        locations_fp = os.path.join(input_dir, self.locations_csv)
        if os.path.exists(locations_fp):
            _load_user_locations(locations_fp, uuid_lookup=self.uuid_lookup)
Example #8
0
        {
            'name': 'zadowolenie'
        },
        {
            'name': 'combat'
        },
        {
            'name': 'amso'
        },
    ]).on_conflict_ignore().execute()
    migrator = SqliteMigrator(db)
    # TODO: improve migrations!
    migrations = [
        # Add date when promotion ends and add number of items in promotion
        migrator.add_column('promotion', 'end_date',
                            peewee.DateTimeField(default=None, null=True)),
        migrator.add_index('promotion', 'end_date'),
        migrator.add_column('promotion', 'number_of_items',
                            peewee.IntegerField(default=None, null=True)),
        # Redesign items counts
        migrator.rename_column('promotion', 'number_of_items',
                               'items_available'),
        migrator.add_column('promotion', 'items_sold',
                            peewee.IntegerField(default=None, null=True)),
    ]
    for migration in migrations:
        try:
            migrate(migration)
        except peewee.OperationalError:
            pass
Example #9
0
from playhouse.migrate import SqliteMigrator, migrate
from zhlib import zh

if __name__ == '__main__':
    migrator = SqliteMigrator(zh.database)
    migrate(migrator.drop_index('sentence', 'sentence_chinese'),
            migrator.add_index('sentence', ('sentence', 'pinyin'), True))
Example #10
0
class ItinerumCSVParser(object):
    '''
    Parses Itinerum platform csv files and loads to them to a cache database.

    :param database: Open Peewee connection the cache database
    '''
    def __init__(self, database):
        self.db = database
        self._migrator = SqliteMigrator(database.db)
        self.cancelled_prompt_responses_csv = 'cancelled_prompts.csv'
        self.coordinates_csv = 'coordinates.csv'
        self.prompt_responses_csv = 'prompt_responses.csv'
        self.survey_responses_csv = 'survey_responses.csv'
        # attach common functions
        self.load_subway_stations = _load_subway_stations

    def _get(self, row, key, cast_func=None):
        value = row.get(key)
        if value:
            value = value.strip()
            if cast_func:
                return cast_func(value)
            return value

    # read .csv file, apply filter and yield row
    @staticmethod
    def _row_generator(csv_fp, filter_func=None):
        with open(csv_fp, 'r', encoding='utf-8-sig') as csv_f:
            reader = csv.reader(
                csv_f)  # use zip() below instead of DictReader for speed
            headers = next(reader)

            # generate dictionaries to insert and apply row filter if exists
            for row in reader:
                dict_row = dict(zip(headers, row))
                yield filter_func(dict_row) if filter_func else dict_row

    def generate_null_survey(self, input_dir):
        '''
        Wrapper function to generate null survey responses for each user in coordinates.

        :param input_dir: Directory containing input .csv data
        '''
        _generate_null_survey(input_dir, self.coordinates_csv)

    def load_export_survey_responses(self, input_dir):
        '''
        Loads Itinerum survey responses data to the cache database.

        :param input_dir: The directory containing the `self.survey_responses_csv` data file.
        '''
        survey_responses_fp = os.path.join(input_dir,
                                           self.survey_responses_csv)
        survey_responses_rows = self._row_generator(
            survey_responses_fp, _survey_response_row_filter)
        self.db.bulk_insert(UserSurveyResponse, survey_responses_rows)

    def load_export_coordinates(self, input_dir):
        '''
        Loads Itinerum coordinates data to the cache database.

        :param input_dir: The directory containing the `self.coordinates_csv` data file.
        '''
        logger.info("Loading coordinates .csv to db...")
        migrate(self._migrator.drop_index(Coordinate, 'coordinate_user_id'))
        coordinates_fp = os.path.join(input_dir, self.coordinates_csv)
        coordinates_rows = self._row_generator(coordinates_fp,
                                               _coordinates_row_filter)
        self.db.bulk_insert(Coordinate, coordinates_rows)
        migrate(self._migrator.add_index('coordinates', ('user_id', ), False))

    def load_export_prompt_responses(self, input_dir):
        '''
        Loads Itinerum prompt responses data to the cache database. For each .csv row, the data 
        is fetched by column name if it exists and cast to appropriate types as set in the database.

        :param input_dir: The directory containing the `self.prompt_responses.csv` data file.
        '''
        logger.info("Loading prompt responses .csv to db...")
        migrate(
            self._migrator.drop_index(PromptResponse,
                                      'promptresponse_user_id'))
        prompt_responses_fp = os.path.join(input_dir,
                                           self.prompt_responses_csv)
        prompt_responses_rows = self._row_generator(prompt_responses_fp,
                                                    _prompts_row_filter)
        self.db.bulk_insert(PromptResponse, prompt_responses_rows)
        migrate(
            self._migrator.add_index('prompt_responses', ('user_id', ), False))

    def load_export_cancelled_prompt_responses(self, input_dir):
        '''
        Loads Itinerum cancelled prompt responses data to the cache database. For each .csv row, the data
        is fetched by column name if it exists and cast to appropriate types as set in the database.

        :param input_dir: The directory containing the `self.cancelled_prompt_responses.csv` data file.
        '''
        logger.info("Loading cancelled prompt responses .csv to db...")
        cancelled_prompt_responses_fp = os.path.join(
            input_dir, self.cancelled_prompt_responses_csv)
        cancelled_prompt_responses_rows = self._row_generator(
            cancelled_prompt_responses_fp, _cancelled_prompts_row_filter)
        self.db.bulk_insert(CancelledPromptResponse,
                            cancelled_prompt_responses_rows)

    def load_trips(self, trips_csv_fp):
        '''
        Loads trips processed by the web platform itself. This is mostly useful for comparing current algorithm
        results against the deployed platform's version.

        :param trips_csv_fp: The full filepath of the downloaded trips `.csv` file for a survey.
        '''
        logger.info("Loading detected trips .csv to db...")
        DetectedTripCoordinate.drop_table()
        DetectedTripCoordinate.create_table()
        self.db.bulk_insert(DetectedTripCoordinate, trips_csv_fp,
                            _trips_row_filter)