def part_two(db: sqlite3.Connection) -> int: # There is no MUL() aggregate, but we can replicate it with exp(sum(log(val))) # https://blog.jooq.org/2018/09/21/how-to-write-a-multiplication-aggregate-function-in-sql/ db.create_function("LOG", 1, math.log) db.create_function("EXP", 1, math.exp) sql = """ WITH slopes AS ( SELECT 1 as R, 1 as D UNION SELECT 3 as R, 1 as D UNION SELECT 5 as R, 1 as D UNION SELECT 7 as R, 1 as D UNION SELECT 1 as R, 2 as D ) SELECT CAST(ROUND(EXP(SUM(LOG(trees)))) as int) FROM ( SELECT count(*) trees FROM tiles CROSS JOIN slopes WHERE tile = '#' AND x = (R * (y/D)) % (SELECT count(*) FROM tiles WHERE y = 0) AND y % D = 0 GROUP BY R, D ) inn """ cursor = db.cursor() cursor.execute(sql) return cursor.fetchone()[0]
def freeze_database_time(conn: Connection): """ This function freezes the CURRENT_TIMESTAMP function in SQLite3 to "2020-01-01 01:01:01". This should only be used in testing. """ conn.create_function( "CURRENT_TIMESTAMP", 0, _return_fake_timestamp, deterministic=True, )
def update_from_13_to_14(db: sqlite3.Connection) -> None: # pragma: no cover db.execute(""" ALTER TABLE entries ADD COLUMN feed_order INTEGER; """) db.create_function('_datetime_to_us', 1, _datetime_to_us) db.execute(""" UPDATE entries SET feed_order = COALESCE( (SELECT _datetime_to_us(MAX(last_updated)) FROM entries) - _datetime_to_us(last_updated), 0 ); """)
def sql(self, conn: sqlite3.Connection = None): sql = self._sql if not self._sql: sql = "1" if self._order_by: sql += f" ORDER BY {self._order_by}" if self._desc: sql += " DESC" if self._limit: sql += f" LIMIT {self._limit}" if self._offset: sql += f" OFFSET {self._offset}" if self.has_regex and conn: conn.create_function("REGEXP", 2, self._regexp) return sql
def _query_items(db: sqlite3.Connection) -> pa.Table: """Return a table; raise sqlite3.ProgrammingError if queries fail.""" db.create_function("comment_yaml_to_text", 1, comment_yaml_to_text) with contextlib.closing(db.cursor()) as cursor: cursor.execute(ITEMS_SQL) table = _cursor_to_table(cursor) status_id_to_label = _query_team_status_labels_lookup(db) # dictionary_encode() makes the pylist take less RAM because strings aren't # duplicated. (Each duplicated Python string costs 50 bytes overhead.) status_ids = table["item_status"].dictionary_encode().to_pylist() status_labels = pa.array( [(status_id_to_label.get(id, id) if id is not None else None) for id in status_ids], pa.utf8(), ).dictionary_encode() table = table.set_column(table.column_names.index("item_status"), "item_status", status_labels) return table
def migrate(db: sqlite3.Connection, version: int) -> None: # version 1 -> 2 db.create_function('IS_TRASHED', 1, lambda _: _ == 'TRASH') db.create_function('ISO_TO_INT', 1, lambda _: arrow.get(_).timestamp) SQL = [ 'ALTER TABLE nodes RENAME TO old_nodes;', ''' CREATE TABLE nodes ( id TEXT NOT NULL, name TEXT, trashed BOOLEAN, created INTEGER, modified INTEGER, PRIMARY KEY (id), UNIQUE (id) ); ''', ''' INSERT INTO nodes (id, name, trashed, created, modified) SELECT id, name, IS_TRASHED(status), ISO_TO_INT(created), ISO_TO_INT(modified) FROM old_nodes ;''', 'DROP TABLE old_nodes;', 'CREATE INDEX ix_nodes_trashed ON nodes(trashed);', 'CREATE INDEX ix_nodes_created ON nodes(created);', 'CREATE INDEX ix_nodes_modified ON nodes(modified);', 'PRAGMA user_version = 2;', ] with ReadWrite(db) as query: for sql in SQL: query.execute(sql)
def _query_tasks(db: sqlite3.Connection) -> pa.Table: db.create_function("task_yaml_to_label", 1, build_task_yaml_to_label()) db.create_function("comment_yaml_to_text", 1, comment_yaml_to_text) db.create_function( "format_dynamic_annotation_field_value", 5, format_dynamic_annotation_field_value, ) with contextlib.closing(db.cursor()) as cursor: for sql in TASKS_SQL.split(";\n"): # This SQL creates indexes to speed up the full query. The final # query is the one _cursor_to_table() will iterate over. cursor.execute(sql) return _cursor_to_table(cursor)