Example #1
0
def upgrade(version):
    if version < 13:
        raise Exception(
            f"Upgrade code for this version is >1 year old and is no longer in the active codebase to reduce clutter. You can still find this code on github, contact me for more info."
        )
    elif version == 13:
        from models import LocationUserOption, MultiLine, Polygon

        db.foreign_keys = False
        migrator = SqliteMigrator(db)

        migrate(migrator.drop_column("location_user_option", "active_filters"))

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 14:
        db.foreign_keys = False
        migrator = SqliteMigrator(db)

        from models import GridLayer, Layer

        db.execute_sql(
            'CREATE TABLE IF NOT EXISTS "base_rect" ("shape_id" TEXT NOT NULL PRIMARY KEY, "width" REAL NOT NULL, "height" REAL NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)'
        )
        db.execute_sql(
            'CREATE TABLE IF NOT EXISTS "shape_type" ("shape_id" TEXT NOT NULL PRIMARY KEY, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)'
        )

        shape_types = [
            "asset_rect",
            "circle",
            "circular_token",
            "line",
            "multi_line",
            "polygon",
            "rect",
            "text",
        ]
        with db.atomic():
            for table in shape_types:
                db.execute_sql(
                    f"CREATE TEMPORARY TABLE _{table} AS SELECT * FROM {table}"
                )
                db.execute_sql(f"DROP TABLE {table}")
            for query in [
                'CREATE TABLE IF NOT EXISTS "asset_rect" ("shape_id" TEXT NOT NULL PRIMARY KEY, "width" REAL NOT NULL, "height" REAL NOT NULL, "src" TEXT NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)',
                'CREATE TABLE IF NOT EXISTS "circle" ("shape_id" TEXT NOT NULL PRIMARY KEY, "radius" REAL NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)',
                'CREATE TABLE IF NOT EXISTS "circular_token" ("shape_id" TEXT NOT NULL PRIMARY KEY, "radius" REAL NOT NULL, "text" TEXT NOT NULL, "font" TEXT NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)',
                'CREATE TABLE IF NOT EXISTS "line" ("shape_id" TEXT NOT NULL PRIMARY KEY, "x2" REAL NOT NULL, "y2" REAL NOT NULL, "line_width" INTEGER NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)',
                'CREATE TABLE IF NOT EXISTS "multi_line" ("shape_id" TEXT NOT NULL PRIMARY KEY, "line_width" INTEGER NOT NULL, "points" TEXT NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)',
                'CREATE TABLE IF NOT EXISTS "polygon" ("shape_id" TEXT NOT NULL PRIMARY KEY, "vertices" TEXT NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)',
                'CREATE TABLE IF NOT EXISTS "rect" ("shape_id" TEXT NOT NULL PRIMARY KEY, "width" REAL NOT NULL, "height" REAL NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)',
                'CREATE TABLE IF NOT EXISTS "text" ("shape_id" TEXT NOT NULL PRIMARY KEY, "text" TEXT NOT NULL, "font" TEXT NOT NULL, "angle" REAL NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)',
            ]:
                db.execute_sql(query)
            for table in shape_types:
                db.execute_sql(
                    f"INSERT INTO {table} SELECT _{table}.* FROM _{table} INNER JOIN shape ON shape.uuid = _{table}.uuid"
                )
        field = ForeignKeyField(Layer, Layer.id, null=True)
        with db.atomic():
            migrate(migrator.add_column("grid_layer", "layer_id", field))
            for gl in GridLayer.select():
                l = Layer.get_or_none(id=gl.id)
                if l:
                    gl.layer = l
                    gl.save()
                else:
                    gl.delete_instance()
            migrate(migrator.add_not_null("grid_layer", "layer_id"))

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 15:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column("room", "is_locked", BooleanField(default=False))
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 16:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column(
                    "location", "unit_size_unit", TextField(default="ft")
                )
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 17:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column(
                    "polygon", "open_polygon", BooleanField(default=False)
                ),
                migrator.add_column("polygon", "line_width", IntegerField(default=2)),
            )
            db.execute_sql(
                "INSERT INTO polygon (shape_id, line_width, vertices, open_polygon) SELECT shape_id, line_width, points, 1 FROM multi_line"
            )
            db.execute_sql("DROP TABLE multi_line")
            db.execute_sql(
                "UPDATE shape SET type_ = 'polygon' WHERE type_ = 'multiline'"
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 18:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(migrator.add_column("user", "email", TextField(null=True)))
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 19:
        db.foreign_keys = False
        migrator = SqliteMigrator(db)

        db.execute_sql(
            'CREATE TABLE IF NOT EXISTS "floor" ("id" INTEGER NOT NULL PRIMARY KEY, "location_id" INTEGER NOT NULL, "name" TEXT, "index" INTEGER NOT NULL, FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE CASCADE)'
        )
        db.execute_sql(
            'INSERT INTO floor (location_id, name, "index") SELECT id, "ground", 0 FROM location'
        )

        with db.atomic():
            db.execute_sql("CREATE TEMPORARY TABLE _layer AS SELECT * FROM layer")
            db.execute_sql("DROP TABLE layer")
            db.execute_sql(
                'CREATE TABLE IF NOT EXISTS "layer" ("id" INTEGER NOT NULL PRIMARY KEY, "floor_id" INTEGER NOT NULL, "name" TEXT NOT NULL, "type_" TEXT NOT NULL, "player_visible" INTEGER NOT NULL, "player_editable" INTEGER NOT NULL, "selectable" INTEGER NOT NULL, "index" INTEGER NOT NULL, FOREIGN KEY ("floor_id") REFERENCES "floor" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'INSERT INTO layer (id, floor_id, name, type_, player_visible, player_editable, selectable, "index") SELECT _layer.id, floor.id, _layer.name, type_, player_visible, player_editable, selectable, _layer."index" FROM _layer INNER JOIN floor ON floor.location_id = _layer.location_id'
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 20:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column("shape", "badge", IntegerField(default=1)),
                migrator.add_column("shape", "show_badge", BooleanField(default=False)),
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 21:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column("user", "invert_alt", BooleanField(default=False))
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 22:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                'CREATE TABLE IF NOT EXISTS "marker" ("id" INTEGER NOT NULL PRIMARY KEY, "shape_id" TEXT NOT NULL, "user_id" INTEGER NOT NULL, "location_id" INTEGER NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape"("uuid") ON DELETE CASCADE, FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE CASCADE, FOREIGN KEY ("user_id") REFERENCES "user"("id") ON DELETE CASCADE)'
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 23:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column(
                    "shape_owner", "edit_access", BooleanField(default=True)
                ),
                migrator.add_column(
                    "shape_owner", "vision_access", BooleanField(default=True)
                ),
                migrator.add_column(
                    "shape", "default_edit_access", BooleanField(default=False)
                ),
                migrator.add_column(
                    "shape", "default_vision_access", BooleanField(default=False)
                ),
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 24:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                'DELETE FROM "player_room" WHERE id IN (SELECT pr.id FROM "player_room" pr INNER JOIN "room" r ON r.id = pr.room_id WHERE r.creator_id = pr.player_id )'
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 25:
        # Move Room.dm_location and Room.player_location to PlayerRoom.active_location
        # Add PlayerRoom.role
        # Add order index on location
        from models import Location

        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column(
                    "player_room",
                    "active_location_id",
                    ForeignKeyField(
                        Location,
                        Location.id,
                        backref="players",
                        on_delete="CASCADE",
                        null=True,
                    ),
                ),
                migrator.add_column("player_room", "role", IntegerField(default=0)),
                migrator.add_column("location", "index", IntegerField(default=0)),
            )
            db.execute_sql(
                "UPDATE player_room SET active_location_id = (SELECT location.id FROM room INNER JOIN location ON room.id = location.room_id WHERE location.name = room.player_location AND room.id = player_room.room_id)"
            )
            db.execute_sql(
                "INSERT INTO player_room (role, player_id, room_id, active_location_id) SELECT 1, u.id, r.id, l.id FROM room r INNER JOIN user u ON u.id = r.creator_id INNER JOIN location l ON l.name = r.dm_location AND l.room_id = r.id"
            )
            db.execute_sql(
                "UPDATE location SET 'index' = (SELECT COUNT(*) + 1 FROM location l INNER JOIN room r WHERE location.room_id = r.id AND l.room_id = r.id AND l.'index' != 0) "
            )
            migrate(
                migrator.drop_column("room", "player_location"),
                migrator.drop_column("room", "dm_location"),
                migrator.add_not_null("player_room", "active_location_id"),
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    elif version == 26:
        # Move Location settings to a separate LocationSettings table
        # Add a default_settings field to Room that refers to such a LocationSettings row
        from models import LocationOptions

        migrator = SqliteMigrator(db)

        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                'CREATE TABLE IF NOT EXISTS "location_options" ("id" INTEGER NOT NULL PRIMARY KEY, "unit_size" REAL DEFAULT 5, "unit_size_unit" TEXT DEFAULT "ft", "use_grid" INTEGER DEFAULT 1, "full_fow" INTEGER DEFAULT 0, "fow_opacity" REAL DEFAULT 0.3, "fow_los" INTEGER DEFAULT 0, "vision_mode" TEXT DEFAULT "triangle", "vision_min_range" REAL DEFAULT 1640, "vision_max_range" REAL DEFAULT 3281, "grid_size" INTEGER DEFAULT 50)'
            )
            migrate(
                migrator.add_column(
                    "location",
                    "options_id",
                    ForeignKeyField(
                        LocationOptions,
                        LocationOptions.id,
                        on_delete="CASCADE",
                        null=True,
                    ),
                ),
                migrator.add_column(
                    "room",
                    "default_options_id",
                    ForeignKeyField(
                        LocationOptions,
                        LocationOptions.id,
                        on_delete="CASCADE",
                        null=True,
                    ),
                ),
            )
            data = db.execute_sql(
                """SELECT l.id, r.id, l.unit_size, l.unit_size_unit, l.use_grid, l.full_fow, l.fow_opacity, l.fow_los, l.vision_mode, l.vision_min_range, l.vision_max_range, g.size AS grid_size
                FROM location l
                INNER JOIN room r
                INNER JOIN floor f ON f.id = (SELECT id FROM floor f2 WHERE f2.location_id = l.id LIMIT 1)
                INNER JOIN layer la
                INNER JOIN grid_layer g
                WHERE r.id = l.room_id AND la.floor_id = f.id AND la.name = 'grid' AND g.layer_id = la.id"""
            )
            room_options = {}
            descr = data.description
            mapping = {
                "unit_size": 0,
                "unit_size_unit": 1,
                "use_grid": 2,
                "full_fow": 3,
                "fow_opacity": 4,
                "fow_los": 5,
                "vision_mode": 6,
                "vision_min_range": 7,
                "vision_max_range": 8,
                "grid_size": 9,
            }
            default_row = [5, "ft", True, False, 0.3, False, "triangle", 1640, 3281, 50]
            for row in data.fetchall():
                new_row = [None, None, None, None, None, None, None, None, None, None]

                if row[1] not in room_options:
                    room_options[row[1]] = db.execute_sql(
                        "INSERT INTO location_options (unit_size, unit_size_unit, use_grid, full_fow, fow_opacity, fow_los, vision_mode, vision_min_range, vision_max_range, grid_size) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
                        default_row,
                    ).lastrowid
                    db.execute_sql(
                        f"UPDATE room SET default_options_id = {room_options[row[1]]} WHERE id = {row[1]}"
                    )
                for col, val in zip(descr, row):
                    if col[0] in ["id", "room_id"]:
                        continue
                    idx = mapping[col[0]]
                    if val != default_row[idx]:
                        new_row[idx] = val

                loc_id = db.execute_sql(
                    "INSERT INTO location_options (unit_size, unit_size_unit, use_grid, full_fow, fow_opacity, fow_los, vision_mode, vision_min_range, vision_max_range, grid_size) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
                    new_row,
                ).lastrowid
                db.execute_sql(
                    f"UPDATE location SET options_id = {loc_id} WHERE id = {row[0]}"
                )

            migrate(
                migrator.add_not_null("room", "default_options_id"),
                migrator.drop_column("location", "unit_size"),
                migrator.drop_column("location", "unit_size_unit"),
                migrator.drop_column("location", "use_grid"),
                migrator.drop_column("location", "full_fow"),
                migrator.drop_column("location", "fow_opacity"),
                migrator.drop_column("location", "fow_los"),
                migrator.drop_column("location", "vision_mode"),
                migrator.drop_column("location", "vision_min_range"),
                migrator.drop_column("location", "vision_max_range"),
                migrator.drop_index("location", "location_room_id_name"),
            )
            db.execute_sql("DROP TABLE 'grid_layer'")

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version + 1).execute()
    else:
        raise Exception(f"No upgrade code for save format {version} was found.")
Example #2
0
class QstarzCSVParser(object):
    '''
    Parses Qstarz csv files and loads to them to a cache database.

    :param config:
    :param database:      Open Peewee connection the cache database
    :param csv_input_dir: Path to the directory containing the input coordinates .csv data
    '''
    def __init__(self, config, database):
        self.config = config
        self.db = database
        self._migrator = SqliteMigrator(database.db)
        self.coordinates_csv = 'coordinates.csv'
        self.locations_csv = 'locations.csv'
        self.headers = [
            'INDEX',
            'UTC_DATE',
            'UTC_TIME',
            'LOCAL_DATE',
            'LOCAL_TIME',
            'LATITUDE',
            'N/S',
            'LONGITUDE',
            'E/W',
            'ALTITUDE',
            'SPEED',
            'USER',
        ]
        self.uuid_lookup = None

        # attach common functions
        self.load_subway_stations = _load_subway_stations
        # intialize survey timezone offset
        self.tz = pytz.timezone(self.config.TIMEZONE)

    @staticmethod
    def _value_or_none(row, key):
        '''
        Helper function to return the value stripped of whitespace or `None` for a 0-length string
        from a .csv cell value.
        '''
        v = row.get(key)
        if v and isinstance(v, str):
            return v.strip()

    def _coordinates_row_filter(self, row):
        lat, lon = self._value_or_none(row, 'LATITUDE'), self._value_or_none(
            row, 'LONGITUDE')
        if not lat or not lon:
            return
        lat, lon = float(lat), float(lon)
        if int(lat) == 0 and int(lon) == 0:
            return
        # add sign to negative lat/lons depending on hemisphere
        if row.get('N/S') == 'S' and lat > 0:
            lat *= -1
        if row.get('E/W') == 'W' and lon > 0:
            lon *= -1

        # format date and time columns into Python datetime (NOTE: QStarz data returns a 2-digit year)
        year, month, day = row['UTC_DATE'].split('/')
        if len(year) == 2:
            year = int('20' + year)
        year, month, day = int(year), int(month), int(day)
        hour, minute, second = [int(i) for i in row['UTC_TIME'].split(':')]
        timestamp_UTC = datetime(year,
                                 month,
                                 day,
                                 hour,
                                 minute,
                                 second,
                                 tzinfo=pytz.utc)
        timestamp_epoch = int(timestamp_UTC.timestamp())
        db_row = {
            'user': self.uuid_lookup[row['USER']],
            'latitude': lat,
            'longitude': lon,
            'altitude': self._value_or_none(row, 'ALTITUDE'),
            'speed': self._value_or_none(row, 'SPEED'),
            'direction': self._value_or_none(row, 'HEADING'),
            'h_accuracy': None,
            'v_accuracy': None,
            'acceleration_x': self._value_or_none(row, 'G-X'),
            'acceleration_y': self._value_or_none(row, 'G-Y'),
            'acceleration_z': self._value_or_none(row, 'G-Z'),
            'point_type': None,
            'mode_detected': None,
            'timestamp_UTC': timestamp_UTC,
            'timestamp_epoch': timestamp_epoch,
        }
        return db_row

    # read .csv file, apply filter and yield row
    def _row_generator(self, csv_fp, filter_func=None):
        with open(csv_fp, 'r', encoding='utf-8-sig') as csv_f:
            reader = csv.reader(
                csv_f)  # use zip() below instead of DictReader for speed
            if not self.headers:
                self.headers = next(reader)
            else:
                next(reader)

            # generate dictionaries to insert and apply row filter if exists
            for row in reader:
                dict_row = dict(zip(self.headers, row))
                yield filter_func(dict_row) if filter_func else dict_row

    def _generate_uuids(self, input_dir):
        self.uuid_lookup = {}
        logger.info("Generating UUIDs for non-standard user ids...")
        lookup_fp = temp_path(f'{self.config.SURVEY_NAME}.json')
        if os.path.exists(lookup_fp):
            with open(lookup_fp, 'r') as json_f:
                self.uuid_lookup = json.load(json_f)
        else:
            coordinates_fp = os.path.join(input_dir, self.coordinates_csv)
            with open(coordinates_fp, 'r', encoding='utf-8-sig') as csv_f:
                reader = csv.reader(csv_f)
                user_id_idx = self.headers.index('USER')
                for r in reader:
                    # skip first row if header
                    if 'USER' in r:
                        continue

                    user_id = r[user_id_idx]
                    if not user_id in self.uuid_lookup:
                        self.uuid_lookup[user_id] = str(uuid.uuid4())
            with open(lookup_fp, 'w') as json_f:
                json.dump(self.uuid_lookup, json_f)

    def generate_null_survey(self, input_dir):
        '''
        Wrapper function to generate null survey responses for each user in coordinates.

        :param input_dir: Directory containing input .csv data
        '''
        self._generate_uuids(input_dir)
        _generate_null_survey(input_dir,
                              self.coordinates_csv,
                              id_column='user',
                              uuid_lookup=self.uuid_lookup,
                              headers=self.headers)

    def load_export_coordinates(self, input_dir):
        '''
        Loads QStarz coordinates data to the cache database.

        :param input_dir: The directory containing the `self.coordinates_csv` data file.
        '''
        logger.info("Loading coordinates .csv to db...")
        migrate(self._migrator.drop_index(Coordinate, 'coordinate_user_id'))
        coordinates_fp = os.path.join(input_dir, self.coordinates_csv)
        coordinates_rows = self._row_generator(coordinates_fp,
                                               self._coordinates_row_filter)
        self.db.bulk_insert(Coordinate, coordinates_rows)
        migrate(self._migrator.add_index('coordinates', ('user_id', ), False))

    def load_user_locations(self, input_dir):
        '''
        Loads QStarz user locations data to the cache database.

        :param input_dir: The directory containing the `self.locations_csv` data file.        
        '''
        if not self.uuid_lookup:
            raise Exception(
                'QStarz cannot load user locations before null survey has been initialized.'
            )
        locations_fp = os.path.join(input_dir, self.locations_csv)
        if os.path.exists(locations_fp):
            _load_user_locations(locations_fp, uuid_lookup=self.uuid_lookup)
Example #3
0
from playhouse.migrate import SqliteMigrator, migrate
from zhlib import zh

if __name__ == '__main__':
    migrator = SqliteMigrator(zh.database)
    migrate(migrator.drop_index('sentence', 'sentence_chinese'),
            migrator.add_index('sentence', ('sentence', 'pinyin'), True))
def upgrade(version):
    if version < 16:
        raise OldVersionException(
            f"Upgrade code for this version is >1 year old and is no longer in the active codebase to reduce clutter. You can still find this code on github, contact me for more info."
        )
    elif version == 16:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column("location", "unit_size_unit",
                                    TextField(default="ft")))
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 17:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column("polygon", "open_polygon",
                                    BooleanField(default=False)),
                migrator.add_column("polygon", "line_width",
                                    IntegerField(default=2)),
            )
            db.execute_sql(
                "INSERT INTO polygon (shape_id, line_width, vertices, open_polygon) SELECT shape_id, line_width, points, 1 FROM multi_line"
            )
            db.execute_sql("DROP TABLE multi_line")
            db.execute_sql(
                "UPDATE shape SET type_ = 'polygon' WHERE type_ = 'multiline'")
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 18:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(migrator.add_column("user", "email", TextField(null=True)))
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 19:
        db.foreign_keys = False

        db.execute_sql(
            'CREATE TABLE IF NOT EXISTS "floor" ("id" INTEGER NOT NULL PRIMARY KEY, "location_id" INTEGER NOT NULL, "name" TEXT, "index" INTEGER NOT NULL, FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE CASCADE)'
        )
        db.execute_sql(
            'INSERT INTO floor (location_id, name, "index") SELECT id, "ground", 0 FROM location'
        )

        with db.atomic():
            db.execute_sql(
                "CREATE TEMPORARY TABLE _layer AS SELECT * FROM layer")
            db.execute_sql("DROP TABLE layer")
            db.execute_sql(
                'CREATE TABLE IF NOT EXISTS "layer" ("id" INTEGER NOT NULL PRIMARY KEY, "floor_id" INTEGER NOT NULL, "name" TEXT NOT NULL, "type_" TEXT NOT NULL, "player_visible" INTEGER NOT NULL, "player_editable" INTEGER NOT NULL, "selectable" INTEGER NOT NULL, "index" INTEGER NOT NULL, FOREIGN KEY ("floor_id") REFERENCES "floor" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'INSERT INTO layer (id, floor_id, name, type_, player_visible, player_editable, selectable, "index") SELECT _layer.id, floor.id, _layer.name, type_, player_visible, player_editable, selectable, _layer."index" FROM _layer INNER JOIN floor ON floor.location_id = _layer.location_id'
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 20:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column("shape", "badge", IntegerField(default=1)),
                migrator.add_column("shape", "show_badge",
                                    BooleanField(default=False)),
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 21:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column("user", "invert_alt",
                                    BooleanField(default=False)))
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 22:
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                'CREATE TABLE IF NOT EXISTS "marker" ("id" INTEGER NOT NULL PRIMARY KEY, "shape_id" TEXT NOT NULL, "user_id" INTEGER NOT NULL, "location_id" INTEGER NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape"("uuid") ON DELETE CASCADE, FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE CASCADE, FOREIGN KEY ("user_id") REFERENCES "user"("id") ON DELETE CASCADE)'
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 23:
        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column("shape_owner", "edit_access",
                                    BooleanField(default=True)),
                migrator.add_column("shape_owner", "vision_access",
                                    BooleanField(default=True)),
                migrator.add_column("shape", "default_edit_access",
                                    BooleanField(default=False)),
                migrator.add_column("shape", "default_vision_access",
                                    BooleanField(default=False)),
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 24:
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                'DELETE FROM "player_room" WHERE id IN (SELECT pr.id FROM "player_room" pr INNER JOIN "room" r ON r.id = pr.room_id WHERE r.creator_id = pr.player_id )'
            )
        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 25:
        # Move Room.dm_location and Room.player_location to PlayerRoom.active_location
        # Add PlayerRoom.role
        # Add order index on location
        from models import Location

        migrator = SqliteMigrator(db)
        db.foreign_keys = False
        with db.atomic():
            migrate(
                migrator.add_column(
                    "player_room",
                    "active_location_id",
                    ForeignKeyField(
                        Location,
                        Location.id,
                        backref="players",
                        on_delete="CASCADE",
                        null=True,
                    ),
                ),
                migrator.add_column("player_room", "role",
                                    IntegerField(default=0)),
                migrator.add_column("location", "index",
                                    IntegerField(default=0)),
            )
            db.execute_sql(
                "UPDATE player_room SET active_location_id = (SELECT location.id FROM room INNER JOIN location ON room.id = location.room_id WHERE location.name = room.player_location AND room.id = player_room.room_id)"
            )
            db.execute_sql(
                "INSERT INTO player_room (role, player_id, room_id, active_location_id) SELECT 1, u.id, r.id, l.id FROM room r INNER JOIN user u ON u.id = r.creator_id INNER JOIN location l ON l.name = r.dm_location AND l.room_id = r.id"
            )
            db.execute_sql(
                "UPDATE location SET 'index' = (SELECT COUNT(*) + 1 FROM location l INNER JOIN room r WHERE location.room_id = r.id AND l.room_id = r.id AND l.'index' != 0) "
            )
            migrate(
                migrator.drop_column("room", "player_location"),
                migrator.drop_column("room", "dm_location"),
                migrator.add_not_null("player_room", "active_location_id"),
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 26:
        # Move Location settings to a separate LocationSettings table
        # Add a default_settings field to Room that refers to such a LocationSettings row
        from models import LocationOptions

        migrator = SqliteMigrator(db)

        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                'CREATE TABLE IF NOT EXISTS "location_options" ("id" INTEGER NOT NULL PRIMARY KEY, "unit_size" REAL DEFAULT 5, "unit_size_unit" TEXT DEFAULT "ft", "use_grid" INTEGER DEFAULT 1, "full_fow" INTEGER DEFAULT 0, "fow_opacity" REAL DEFAULT 0.3, "fow_los" INTEGER DEFAULT 0, "vision_mode" TEXT DEFAULT "triangle", "vision_min_range" REAL DEFAULT 1640, "vision_max_range" REAL DEFAULT 3281, "grid_size" INTEGER DEFAULT 50)'
            )
            migrate(
                migrator.add_column(
                    "location",
                    "options_id",
                    ForeignKeyField(
                        LocationOptions,
                        LocationOptions.id,
                        on_delete="CASCADE",
                        null=True,
                    ),
                ),
                migrator.add_column(
                    "room",
                    "default_options_id",
                    ForeignKeyField(
                        LocationOptions,
                        LocationOptions.id,
                        on_delete="CASCADE",
                        null=True,
                    ),
                ),
            )
            data = db.execute_sql(
                """SELECT l.id, r.id, l.unit_size, l.unit_size_unit, l.use_grid, l.full_fow, l.fow_opacity, l.fow_los, l.vision_mode, l.vision_min_range, l.vision_max_range, g.size AS grid_size
                FROM location l
                INNER JOIN room r
                INNER JOIN floor f ON f.id = (SELECT id FROM floor f2 WHERE f2.location_id = l.id LIMIT 1)
                INNER JOIN layer la
                INNER JOIN grid_layer g
                WHERE r.id = l.room_id AND la.floor_id = f.id AND la.name = 'grid' AND g.layer_id = la.id"""
            )
            room_options = {}
            descr = data.description
            mapping = {
                "unit_size": 0,
                "unit_size_unit": 1,
                "use_grid": 2,
                "full_fow": 3,
                "fow_opacity": 4,
                "fow_los": 5,
                "vision_mode": 6,
                "vision_min_range": 7,
                "vision_max_range": 8,
                "grid_size": 9,
            }
            default_row = [
                5, "ft", True, False, 0.3, False, "triangle", 1640, 3281, 50
            ]
            for row in data.fetchall():
                new_row = [
                    None, None, None, None, None, None, None, None, None, None
                ]

                if row[1] not in room_options:
                    room_options[row[1]] = db.execute_sql(
                        "INSERT INTO location_options (unit_size, unit_size_unit, use_grid, full_fow, fow_opacity, fow_los, vision_mode, vision_min_range, vision_max_range, grid_size) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
                        default_row,
                    ).lastrowid
                    db.execute_sql(
                        f"UPDATE room SET default_options_id = {room_options[row[1]]} WHERE id = {row[1]}"
                    )
                for col, val in zip(descr, row):
                    if col[0] in ["id", "room_id"]:
                        continue
                    idx = mapping[col[0]]
                    if val != default_row[idx]:
                        new_row[idx] = val

                loc_id = db.execute_sql(
                    "INSERT INTO location_options (unit_size, unit_size_unit, use_grid, full_fow, fow_opacity, fow_los, vision_mode, vision_min_range, vision_max_range, grid_size) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
                    new_row,
                ).lastrowid
                db.execute_sql(
                    f"UPDATE location SET options_id = {loc_id} WHERE id = {row[0]}"
                )

            migrate(
                migrator.add_not_null("room", "default_options_id"),
                migrator.drop_column("location", "unit_size"),
                migrator.drop_column("location", "unit_size_unit"),
                migrator.drop_column("location", "use_grid"),
                migrator.drop_column("location", "full_fow"),
                migrator.drop_column("location", "fow_opacity"),
                migrator.drop_column("location", "fow_los"),
                migrator.drop_column("location", "vision_mode"),
                migrator.drop_column("location", "vision_min_range"),
                migrator.drop_column("location", "vision_max_range"),
                migrator.drop_index("location", "location_room_id_name"),
            )
            db.execute_sql("DROP TABLE 'grid_layer'")

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 27:
        # Fix broken schemas from older save upgrades
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                "CREATE TEMPORARY TABLE _floor AS SELECT * FROM floor")
            db.execute_sql("DROP TABLE floor")
            db.execute_sql(
                'CREATE TABLE "floor" ("id" INTEGER NOT NULL PRIMARY KEY, "location_id" INTEGER NOT NULL, "index" INTEGER NOT NULL, "name" TEXT NOT NULL, FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "floor_location_id" ON "floor" ("location_id")'
            )
            db.execute_sql(
                'INSERT INTO floor (id, location_id, "index", name) SELECT id, location_id, "index", name FROM _floor'
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _label AS SELECT * FROM label")
            db.execute_sql("DROP TABLE label")
            db.execute_sql(
                'CREATE TABLE "label" ("uuid" TEXT NOT NULL PRIMARY KEY, "user_id" INTEGER NOT NULL, "category" TEXT, "name" TEXT NOT NULL, "visible" INTEGER NOT NULL, FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "label_user_id" ON "label" ("user_id")'
            )
            db.execute_sql(
                "INSERT INTO label (uuid, user_id, category, name, visible) SELECT uuid, user_id, category, name, visible FROM _label"
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _layer AS SELECT * FROM layer")
            db.execute_sql("DROP TABLE layer")
            db.execute_sql(
                'CREATE TABLE "layer" ("id" INTEGER NOT NULL PRIMARY KEY, "floor_id" INTEGER NOT NULL, "name" TEXT NOT NULL, "type_" TEXT NOT NULL, "player_visible" INTEGER NOT NULL, "player_editable" INTEGER NOT NULL, "selectable" INTEGER NOT NULL, "index" INTEGER NOT NULL, FOREIGN KEY ("floor_id") REFERENCES "floor" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "layer_floor_id" ON "layer" ("floor_id")'
            )
            db.execute_sql(
                'CREATE UNIQUE INDEX "layer_floor_id_index" ON "layer" ("floor_id", "index")'
            )
            db.execute_sql(
                'CREATE UNIQUE INDEX "layer_floor_id_name" ON "layer" ("floor_id", "name")'
            )
            db.execute_sql(
                'INSERT INTO layer (id, floor_id, name, type_, player_visible, player_editable, selectable, "index") SELECT id, floor_id, name, type_, player_visible, player_editable, selectable, "index" FROM _layer'
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _location AS SELECT * FROM location")
            db.execute_sql("DROP TABLE location")
            db.execute_sql(
                'CREATE TABLE "location" ("id" INTEGER NOT NULL PRIMARY KEY, "room_id" INTEGER NOT NULL, "name" TEXT NOT NULL, "options_id" INTEGER, "index" INTEGER NOT NULL, FOREIGN KEY ("room_id") REFERENCES "room" ("id") ON DELETE CASCADE, FOREIGN KEY ("options_id") REFERENCES "location_options" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "location_room_id" ON "location" ("room_id")'
            )
            db.execute_sql(
                'INSERT INTO location (id, room_id, name, options_id, "index") SELECT id, room_id, name, options_id, "index" FROM _location'
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _location_options AS SELECT * FROM location_options"
            )
            db.execute_sql("DROP TABLE location_options")
            db.execute_sql(
                'CREATE TABLE "location_options" ("id" INTEGER NOT NULL PRIMARY KEY, "unit_size" REAL, "unit_size_unit" TEXT, "use_grid" INTEGER, "full_fow" INTEGER, "fow_opacity" REAL, "fow_los" INTEGER, "vision_mode" TEXT, "grid_size" INTEGER, "vision_min_range" REAL, "vision_max_range" REAL)'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "location_options_id" ON "location" ("options_id")'
            )
            db.execute_sql(
                "INSERT INTO location_options (id, unit_size, unit_size_unit, use_grid, full_fow, fow_opacity, fow_los, vision_mode, grid_size, vision_min_range, vision_max_range) SELECT id, unit_size, unit_size_unit, use_grid, full_fow, fow_opacity, fow_los, vision_mode, grid_size, vision_min_range, vision_max_range FROM _location_options"
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _location_user_option AS SELECT * FROM location_user_option"
            )
            db.execute_sql("DROP TABLE location_user_option")
            db.execute_sql(
                'CREATE TABLE "location_user_option" ("id" INTEGER NOT NULL PRIMARY KEY, "location_id" INTEGER NOT NULL, "user_id" INTEGER NOT NULL, "pan_x" INTEGER NOT NULL, "pan_y" INTEGER NOT NULL, "zoom_factor" REAL NOT NULL, "active_layer_id" INTEGER, FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE CASCADE, FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE, FOREIGN KEY ("active_layer_id") REFERENCES "layer" ("id"))'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "location_user_option_location_id" ON "location_user_option" ("location_id")'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "location_user_option_active_layer_id" ON "location_user_option" ("active_layer_id")'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "location_user_option_user_id" ON "location_user_option" ("user_id")'
            )
            db.execute_sql(
                'CREATE UNIQUE INDEX "location_user_option_location_id_user_id" ON "location_user_option" ("location_id", "user_id")'
            )
            db.execute_sql(
                "INSERT INTO location_user_option (id, location_id, user_id, pan_x, pan_y, zoom_factor, active_layer_id) SELECT id, location_id, user_id, pan_x, pan_y, zoom_factor, active_layer_id FROM _location_user_option"
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _marker AS SELECT * FROM marker")
            db.execute_sql("DROP TABLE marker")
            db.execute_sql(
                'CREATE TABLE "marker" ("id" INTEGER NOT NULL PRIMARY KEY, "shape_id" TEXT NOT NULL, "user_id" INTEGER NOT NULL, "location_id" INTEGER NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE, FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE, FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "marker_location_id" ON "marker" ("location_id")'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "marker_shape_id" ON "marker" ("shape_id")'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "marker_user_id" ON "marker" ("user_id")'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "marker_location_id" ON "marker" ("location_id")'
            )
            db.execute_sql(
                "INSERT INTO marker (id, shape_id, user_id, location_id) SELECT id, shape_id, user_id, location_id FROM _marker"
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _player_room AS SELECT * FROM player_room"
            )
            db.execute_sql("DROP TABLE player_room")
            db.execute_sql(
                'CREATE TABLE "player_room" ("id" INTEGER NOT NULL PRIMARY KEY, "role" INTEGER NOT NULL, "player_id" INTEGER NOT NULL, "room_id" INTEGER NOT NULL, "active_location_id" INTEGER NOT NULL, FOREIGN KEY ("player_id") REFERENCES "user" ("id") ON DELETE CASCADE, FOREIGN KEY ("room_id") REFERENCES "room" ("id") ON DELETE CASCADE, FOREIGN KEY ("active_location_id") REFERENCES "location" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "player_room_active_location_id" ON "player_room" ("active_location_id")'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "player_room_player_id" ON "player_room" ("player_id")'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "player_room_room_id" ON "player_room" ("room_id")'
            )
            db.execute_sql(
                "INSERT INTO player_room (id, role, player_id, room_id, active_location_id) SELECT id, role, player_id, room_id, active_location_id FROM _player_room"
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _polygon AS SELECT * FROM polygon")
            db.execute_sql("DROP TABLE polygon")
            db.execute_sql(
                'CREATE TABLE "polygon" ("shape_id" TEXT NOT NULL PRIMARY KEY, "vertices" TEXT NOT NULL, "line_width" INTEGER NOT NULL, "open_polygon" INTEGER NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)'
            )
            db.execute_sql(
                "INSERT INTO polygon (shape_id,vertices, line_width, open_polygon) SELECT shape_id,vertices, line_width, open_polygon FROM _polygon"
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _room AS SELECT * FROM room")
            db.execute_sql("DROP TABLE room")
            db.execute_sql(
                'CREATE TABLE "room" ("id" INTEGER NOT NULL PRIMARY KEY, "name" TEXT NOT NULL, "creator_id" INTEGER NOT NULL, "invitation_code" TEXT NOT NULL, "is_locked" INTEGER NOT NULL, "default_options_id" INTEGER NOT NULL, FOREIGN KEY ("creator_id") REFERENCES "user" ("id") ON DELETE CASCADE, FOREIGN KEY ("default_options_id") REFERENCES "location_options" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "room_creator_id" ON "room" ("creator_id")'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "room_default_options_id" ON "room" ("default_options_id")'
            )
            db.execute_sql(
                'CREATE UNIQUE INDEX "room_invitation_code" ON "room" ("invitation_code")'
            )
            db.execute_sql(
                'CREATE UNIQUE INDEX "room_name_creator_id" ON "room" ("name", "creator_id")'
            )
            db.execute_sql(
                "INSERT INTO room (id, name, creator_id, invitation_code, is_locked, default_options_id) SELECT id, name, creator_id, invitation_code, is_locked, default_options_id FROM _room"
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _shape AS SELECT * FROM shape")
            db.execute_sql("DROP TABLE shape")
            db.execute_sql(
                'CREATE TABLE "shape" ("uuid" TEXT NOT NULL PRIMARY KEY, "layer_id" INTEGER NOT NULL, "type_" TEXT NOT NULL, "x" REAL NOT NULL, "y" REAL NOT NULL, "name" TEXT, "name_visible" INTEGER NOT NULL, "fill_colour" TEXT NOT NULL, "stroke_colour" TEXT NOT NULL, "vision_obstruction" INTEGER NOT NULL, "movement_obstruction" INTEGER NOT NULL, "is_token" INTEGER NOT NULL, "annotation" TEXT NOT NULL, "draw_operator" TEXT NOT NULL, "index" INTEGER NOT NULL, "options" TEXT, "badge" INTEGER NOT NULL, "show_badge" INTEGER NOT NULL, "default_edit_access" INTEGER NOT NULL, "default_vision_access" INTEGER NOT NULL, FOREIGN KEY ("layer_id") REFERENCES "layer" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "shape_layer_id" ON "shape" ("layer_id")'
            )
            db.execute_sql(
                'INSERT INTO shape (uuid, layer_id, type_, x, y, name, name_visible, fill_colour, stroke_colour, vision_obstruction, movement_obstruction, is_token, annotation, draw_operator, "index", options, badge, show_badge, default_edit_access, default_vision_access) SELECT uuid, layer_id, type_, x, y, name, name_visible, fill_colour, stroke_colour, vision_obstruction, movement_obstruction, is_token, annotation, draw_operator, "index", options, badge, show_badge, default_edit_access, default_vision_access FROM _shape'
            )

            db.execute_sql(
                "CREATE TEMPORARY TABLE _user AS SELECT * FROM user")
            db.execute_sql("DROP TABLE user")
            db.execute_sql(
                'CREATE TABLE "user" ("id" INTEGER NOT NULL PRIMARY KEY, "name" TEXT NOT NULL, "email" TEXT, "password_hash" TEXT NOT NULL, "fow_colour" TEXT NOT NULL, "grid_colour" TEXT NOT NULL, "ruler_colour" TEXT NOT NULL, "invert_alt" INTEGER NOT NULL)'
            )
            db.execute_sql(
                "INSERT INTO user (id, name, email, password_hash, fow_colour, grid_colour, ruler_colour, invert_alt) SELECT id, name, email, password_hash, fow_colour, grid_colour, ruler_colour, invert_alt FROM _user"
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 28:
        # Add invisibility toggle to shapes
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                "ALTER TABLE shape ADD COLUMN is_invisible INTEGER NOT NULL DEFAULT 0"
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 29:
        # Add movement access permission
        migrator = SqliteMigrator(db)

        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                "ALTER TABLE shape ADD COLUMN default_movement_access INTEGER NOT NULL DEFAULT 0"
            )
            db.execute_sql(
                "ALTER TABLE shape_owner ADD COLUMN movement_access INTEGER")
            db.execute_sql(
                "UPDATE shape_owner SET movement_access = CASE WHEN edit_access = 0 THEN 0 ELSE 1 END"
            )

            migrate(migrator.add_not_null("shape_owner", "movement_access"), )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 30:
        # Add spawn locations
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                'ALTER TABLE location_options ADD COLUMN spawn_locations TEXT NOT NULL DEFAULT "[]"'
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 31:
        # Add shape movement lock
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                "ALTER TABLE shape ADD COLUMN is_locked INTEGER NOT NULL DEFAULT 0"
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 32:
        # Add Shape.angle and Shape.stroke_width
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                "ALTER TABLE shape ADD COLUMN angle INTEGER NOT NULL DEFAULT 0"
            )
            db.execute_sql(
                "ALTER TABLE shape ADD COLUMN stroke_width INTEGER NOT NULL DEFAULT 2"
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 33:
        # Add Floor.player_visible
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                "ALTER TABLE floor ADD COLUMN player_visible INTEGER NOT NULL DEFAULT 1"
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 34:
        # Fix Floor.index
        db.foreign_keys = False
        with db.atomic():
            data = db.execute_sql("SELECT id FROM location")
            for location_id in data.fetchall():
                db.execute_sql(
                    f"UPDATE floor SET 'index' = (SELECT COUNT(*)-1 FROM floor f WHERE f.location_id = {location_id[0]} AND f.id <= floor.id ) WHERE location_id = {location_id[0]}"
                )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 35:
        # Move grid size to client options
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                "CREATE TEMPORARY TABLE _location_options AS SELECT * FROM location_options"
            )
            db.execute_sql("DROP TABLE location_options")
            db.execute_sql(
                'CREATE TABLE "location_options" ("id" INTEGER NOT NULL PRIMARY KEY, "unit_size" REAL, "unit_size_unit" TEXT, "use_grid" INTEGER, "full_fow" INTEGER, "fow_opacity" REAL, "fow_los" INTEGER, "vision_mode" TEXT, "vision_min_range" REAL, "vision_max_range" REAL, "spawn_locations" TEXT NOT NULL DEFAULT "[]")'
            )
            db.execute_sql(
                'CREATE INDEX IF NOT EXISTS "location_options_id" ON "location" ("options_id")'
            )
            db.execute_sql(
                "INSERT INTO location_options (id, unit_size, unit_size_unit, use_grid, full_fow, fow_opacity, fow_los, vision_mode, vision_min_range, vision_max_range, spawn_locations) SELECT id, unit_size, unit_size_unit, use_grid, full_fow, fow_opacity, fow_los, vision_mode, vision_min_range, vision_max_range, spawn_locations FROM _location_options"
            )
            db.execute_sql(
                "ALTER TABLE user ADD COLUMN grid_size INTEGER NOT NULL DEFAULT 50"
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 36:
        # Change polygon vertices format from { x: number, y: number } to number[]
        db.foreign_keys = False
        with db.atomic():
            data = db.execute_sql("SELECT shape_id, vertices FROM polygon")
            for row in data.fetchall():
                try:
                    vertices = json.loads(row[1])
                    if len(vertices) == 0 or isinstance(vertices[0], list):
                        continue
                    vertices = json.dumps([[v["x"], v["y"]] for v in vertices])
                    db.execute_sql(
                        f"UPDATE 'polygon' SET 'vertices' = '{vertices}' WHERE 'shape_id' = '{row[0]}'"
                    )
                except json.decoder.JSONDecodeError:
                    print(f"Failed to update polygon vertices! {row}")

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 37:
        # Change shape.angle from integer field to float field
        db.foreign_keys = False
        with db.atomic():
            db.execute_sql(
                "CREATE TEMPORARY TABLE _shape AS SELECT * FROM shape")
            db.execute_sql("DROP TABLE shape")
            db.execute_sql(
                'CREATE TABLE IF NOT EXISTS "shape" ("uuid" TEXT NOT NULL PRIMARY KEY, "layer_id" INTEGER NOT NULL, "type_" TEXT NOT NULL, "x" REAL NOT NULL, "y" REAL NOT NULL, "name" TEXT, "name_visible" INTEGER NOT NULL, "fill_colour" TEXT NOT NULL, "stroke_colour" TEXT NOT NULL, "vision_obstruction" INTEGER NOT NULL, "movement_obstruction" INTEGER NOT NULL, "is_token" INTEGER NOT NULL, "annotation" TEXT NOT NULL, "draw_operator" TEXT NOT NULL, "index" INTEGER NOT NULL, "options" TEXT, "badge" INTEGER NOT NULL, "show_badge" INTEGER NOT NULL, "default_edit_access" INTEGER NOT NULL, "default_vision_access" INTEGER NOT NULL, is_invisible INTEGER NOT NULL DEFAULT 0, default_movement_access INTEGER NOT NULL DEFAULT 0, is_locked INTEGER NOT NULL DEFAULT 0, angle REAL NOT NULL DEFAULT 0, stroke_width INTEGER NOT NULL DEFAULT 2, FOREIGN KEY ("layer_id") REFERENCES "layer" ("id") ON DELETE CASCADE)'
            )
            db.execute_sql(
                'CREATE INDEX "shape_layer_id" ON "shape" ("layer_id")')
            db.execute_sql(
                "INSERT INTO shape (uuid, layer_id, type_, x, y, name, name_visible, fill_colour, stroke_colour, vision_obstruction, movement_obstruction, is_token, annotation, draw_operator, 'index', options, badge, show_badge, default_edit_access, default_vision_access, is_invisible, default_movement_access, is_locked, angle, stroke_width) SELECT uuid, layer_id, type_, x, y, name, name_visible, fill_colour, stroke_colour, vision_obstruction, movement_obstruction, is_token, annotation, draw_operator, 'index', options, badge, show_badge, default_edit_access, default_vision_access, is_invisible, default_movement_access, is_locked, angle, stroke_width FROM _shape"
            )
            db.execute_sql(
                "CREATE TEMPORARY TABLE _text AS SELECT * FROM text")
            db.execute_sql("DROP TABLE text")
            db.execute_sql(
                'CREATE TABLE IF NOT EXISTS "text" ("shape_id" TEXT NOT NULL PRIMARY KEY, "text" TEXT NOT NULL, "font" TEXT NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE);'
            )
            db.execute_sql(
                "INSERT INTO text (shape_id, text, font) SELECT shape_id, text, font FROM _text"
            )

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 38:
        # Change polygon vertices format from { x: number, y: number } to number[]
        db.foreign_keys = False
        with db.atomic():
            data = db.execute_sql("SELECT shape_id, vertices FROM polygon")
            for row in data.fetchall():
                try:
                    vertices = json.loads(row[1])
                    if len(vertices) == 0 or isinstance(vertices[0], list):
                        continue
                    vertices = json.dumps([[v["x"], v["y"]] for v in vertices])
                    db.execute_sql(
                        f"UPDATE polygon SET vertices = '{vertices}' WHERE shape_id = '{row[0]}'"
                    )
                except json.decoder.JSONDecodeError:
                    print(f"Failed to update polygon vertices! {row}")

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    elif version == 39:
        # Fix Shape.index being set to 'index'
        from models import Layer

        db.foreign_keys = False
        with db.atomic():
            with db.atomic():
                for layer in Layer.select():
                    shapes = layer.shapes.select()
                    for i, shape in enumerate(shapes):
                        shape.index = i
                        shape.save()

        db.foreign_keys = True
        Constants.get().update(save_version=Constants.save_version +
                               1).execute()
    else:
        raise UnknownVersionException(
            f"No upgrade code for save format {version} was found.")
Example #5
0
class ItinerumCSVParser(object):
    '''
    Parses Itinerum platform csv files and loads to them to a cache database.

    :param database: Open Peewee connection the cache database
    '''
    def __init__(self, database):
        self.db = database
        self._migrator = SqliteMigrator(database.db)
        self.cancelled_prompt_responses_csv = 'cancelled_prompts.csv'
        self.coordinates_csv = 'coordinates.csv'
        self.prompt_responses_csv = 'prompt_responses.csv'
        self.survey_responses_csv = 'survey_responses.csv'
        # attach common functions
        self.load_subway_stations = _load_subway_stations

    def _get(self, row, key, cast_func=None):
        value = row.get(key)
        if value:
            value = value.strip()
            if cast_func:
                return cast_func(value)
            return value

    # read .csv file, apply filter and yield row
    @staticmethod
    def _row_generator(csv_fp, filter_func=None):
        with open(csv_fp, 'r', encoding='utf-8-sig') as csv_f:
            reader = csv.reader(
                csv_f)  # use zip() below instead of DictReader for speed
            headers = next(reader)

            # generate dictionaries to insert and apply row filter if exists
            for row in reader:
                dict_row = dict(zip(headers, row))
                yield filter_func(dict_row) if filter_func else dict_row

    def generate_null_survey(self, input_dir):
        '''
        Wrapper function to generate null survey responses for each user in coordinates.

        :param input_dir: Directory containing input .csv data
        '''
        _generate_null_survey(input_dir, self.coordinates_csv)

    def load_export_survey_responses(self, input_dir):
        '''
        Loads Itinerum survey responses data to the cache database.

        :param input_dir: The directory containing the `self.survey_responses_csv` data file.
        '''
        survey_responses_fp = os.path.join(input_dir,
                                           self.survey_responses_csv)
        survey_responses_rows = self._row_generator(
            survey_responses_fp, _survey_response_row_filter)
        self.db.bulk_insert(UserSurveyResponse, survey_responses_rows)

    def load_export_coordinates(self, input_dir):
        '''
        Loads Itinerum coordinates data to the cache database.

        :param input_dir: The directory containing the `self.coordinates_csv` data file.
        '''
        logger.info("Loading coordinates .csv to db...")
        migrate(self._migrator.drop_index(Coordinate, 'coordinate_user_id'))
        coordinates_fp = os.path.join(input_dir, self.coordinates_csv)
        coordinates_rows = self._row_generator(coordinates_fp,
                                               _coordinates_row_filter)
        self.db.bulk_insert(Coordinate, coordinates_rows)
        migrate(self._migrator.add_index('coordinates', ('user_id', ), False))

    def load_export_prompt_responses(self, input_dir):
        '''
        Loads Itinerum prompt responses data to the cache database. For each .csv row, the data 
        is fetched by column name if it exists and cast to appropriate types as set in the database.

        :param input_dir: The directory containing the `self.prompt_responses.csv` data file.
        '''
        logger.info("Loading prompt responses .csv to db...")
        migrate(
            self._migrator.drop_index(PromptResponse,
                                      'promptresponse_user_id'))
        prompt_responses_fp = os.path.join(input_dir,
                                           self.prompt_responses_csv)
        prompt_responses_rows = self._row_generator(prompt_responses_fp,
                                                    _prompts_row_filter)
        self.db.bulk_insert(PromptResponse, prompt_responses_rows)
        migrate(
            self._migrator.add_index('prompt_responses', ('user_id', ), False))

    def load_export_cancelled_prompt_responses(self, input_dir):
        '''
        Loads Itinerum cancelled prompt responses data to the cache database. For each .csv row, the data
        is fetched by column name if it exists and cast to appropriate types as set in the database.

        :param input_dir: The directory containing the `self.cancelled_prompt_responses.csv` data file.
        '''
        logger.info("Loading cancelled prompt responses .csv to db...")
        cancelled_prompt_responses_fp = os.path.join(
            input_dir, self.cancelled_prompt_responses_csv)
        cancelled_prompt_responses_rows = self._row_generator(
            cancelled_prompt_responses_fp, _cancelled_prompts_row_filter)
        self.db.bulk_insert(CancelledPromptResponse,
                            cancelled_prompt_responses_rows)

    def load_trips(self, trips_csv_fp):
        '''
        Loads trips processed by the web platform itself. This is mostly useful for comparing current algorithm
        results against the deployed platform's version.

        :param trips_csv_fp: The full filepath of the downloaded trips `.csv` file for a survey.
        '''
        logger.info("Loading detected trips .csv to db...")
        DetectedTripCoordinate.drop_table()
        DetectedTripCoordinate.create_table()
        self.db.bulk_insert(DetectedTripCoordinate, trips_csv_fp,
                            _trips_row_filter)