def add( cursor: sqlite3.Cursor, solution: str, usages: str, keywords: _KeywordsType): cmd = 'INSERT INTO clues VALUES (?, ?, ?)' try: cursor.execute(cmd, (solution, usages, _format_keyword_counts(keywords))) except (sqlite3.OperationalError, sqlite3.IntegrityError): print(cmd, solution, usages, _format_keywords(keywords)) raise
def load(self, cursor: sqlite3.Cursor) -> None: """Load the properties from the provided database cursor.""" cursor.execute('select mu, sigma, games_count, wins_count from players ' 'where name=? and ladder=?', [self.name, self.ranking.ladder]) loaded = cursor.fetchone() logging.info('Fetched player %s skill %s', self.name, loaded['mu']) self.rating = self.ranking.tsh.create_rating(loaded['mu'], loaded['sigma']) self.games_count = loaded['games_count'] self.wins_count = loaded['wins_count']
async def import_characters(conn: asyncpg.Connection, c: sqlite3.Cursor, new_ids: Dict[int, int]): log.info("Importing characters...") # Dictionary that maps character names to their SQL ID old_ids = {} chars = [] log.debug("Gathering character records from sqlite...") c.execute("""SELECT id, user_id, name, level, vocation, world, guild FROM chars ORDER By id ASC""") rows = c.fetchall() for char_id, user_id, name, level, vocation, world, guild in rows: chars.append((user_id, name, level, vocation, world, guild)) old_ids[name] = char_id log.debug(f"Collected {len(chars):,} records from old database.") log.info("Copying records to character table") res = await conn.copy_records_to_table("character", records=chars, columns=["user_id", "name", "level", "vocation", "world", "guild"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") new_chars = await conn.fetch('SELECT id, name FROM "character"') log.debug("Generating old id to new id mapping...") new_ids.clear() for new_id, name in new_chars: new_ids[old_ids[name]] = new_id log.debug("Old id to new id mapping generated.") ids = 1 deaths = [] killers = [] log.debug("Gathering death records from sqlite...") c.execute("""SELECT char_id, level, killer, date, byplayer FROM char_deaths ORDER BY date ASC""") rows = c.fetchall() # This doesn't seem very safe to do, maybe it would be better to import deaths the old way for char_id, level, killer, date, byplayer in rows: byplayer = byplayer == 1 date = datetime.datetime.fromtimestamp(date, datetime.timezone.utc) deaths.append((new_ids[char_id], level, date)) killers.append((ids, killer, byplayer)) ids += 1 log.debug(f"Collected {len(deaths):,} records from old database.") log.info("Copying records to deaths table.") res = await conn.copy_records_to_table("character_death", records=deaths, columns=["character_id", "level", "date"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.info("Copying records to death killers table.") res = await conn.copy_records_to_table("character_death_killer", records=killers, columns=["death_id", "name", "player"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.debug("Gathering level up records from sqlite...") c.execute("""SELECT char_id, level, date FROM char_levelups ORDER BY date ASC""") rows = c.fetchall() levelups = [] for char_id, level, date in rows: date = datetime.datetime.fromtimestamp(date, datetime.timezone.utc) levelups.append((new_ids[char_id], level, date)) log.debug(f"Collected {len(levelups):,} records from old database.") log.info("Copying records to level ups table.") res = await conn.copy_records_to_table("character_levelup", records=levelups, columns=["character_id", "level", "date"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.info("Finished importing characters.")
def str2int(s: str, cursor: sqlite3.Cursor) -> int: """ On interroge une base données, on vérifie si la chaine de caractère est soit dans la case phonologie, soit dans la case graphie et on renvoie tous les entiers correspondants. :param s: chaine de caractères :param cursor: pointeur de base de données :return: l'entier associé à la chaine de caractères d'entrée, False sinon """ recherche = "SELECT integer FROM Lexique WHERE ortho = ? OR phon = ?" cursor.execute(recherche, (s,)*2) return cursor.fetchone()
async def import_ignored_channels(conn: asyncpg.Connection, c: sqlite3.Cursor): log.info("Importing ignored channels...") channels = [] log.debug("Gathering ignored channels from sqlite...") c.execute("SELECT server_id, channel_id FROM ignored_channels") rows = c.fetchall() for server_id, channel_id in rows: channels.append((server_id, channel_id)) log.debug(f"Collected {len(channels):,} records from old database.") log.info("Copying records to ignored channels table") res = await conn.copy_records_to_table("ignored_entry", records=channels, columns=["server_id", "entry_id"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.info("Finished importing channels.")
def update_grammar_with_table_values(grammar_dictionary: Dict[str, List[str]], schema: Dict[str, List[TableColumn]], cursor: Cursor) -> None: for table_name, columns in schema.items(): for column in columns: cursor.execute(f'SELECT DISTINCT {table_name}.{column.name} FROM {table_name}') results = [x[0] for x in cursor.fetchall()] if column_has_string_type(column): productions = sorted([f'"{str(result)}"' for result in results], reverse=True) grammar_dictionary["string"].extend(productions) elif column_has_numeric_type(column): productions = sorted([f'"{str(result)}"' for result in results], reverse=True) grammar_dictionary["number"].extend(productions)
async def import_events(conn: asyncpg.Connection, c: sqlite3.Cursor, new_char_ids: Dict[int, int]): log.info("Importing events...") events = [] subscribers = [] participants = [] new_event_ids = {} i = 1 log.debug("Gathering event records from sqlite...") c.execute("SELECT id, creator, name, start, active, status, description, server, joinable, slots FROM events") rows = c.fetchall() for event_id, creator, name, start, active, status, description, server, joinable, slots in rows: new_event_ids[event_id] = i start = datetime.datetime.fromtimestamp(start, datetime.timezone.utc) active = bool(active) joinable = bool(joinable) status = 4 - status events.append((creator, name, start, active, description, server, joinable, slots, status)) i += 1 log.debug(f"Collected {len(events):,} records from old database.") log.info("Copying records to events table") res = await conn.copy_records_to_table("event", records=events, columns=["user_id", "name", "start", "active", "description", "server_id", "joinable", "slots", "reminder"]) log.debug(f"Copied {get_affected_count(res):,} records successfully.") log.debug("Gathering event subscribers from sqlite...") c.execute("SELECT event_id, user_id FROM event_subscribers") rows = c.fetchall() for event_id, user_id in rows: subscribers.append((new_event_ids[event_id], user_id)) log.debug(f"Collected {len(subscribers):,} records from old database.") log.info("Copying records to event subscribers table") res = await conn.copy_records_to_table("event_subscriber", records=subscribers, columns=["event_id", "user_id"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.debug("Gathering event participants from sqlite...") c.execute("SELECT event_id, char_id FROM event_participants") rows = c.fetchall() for event_id, char_id in rows: participants.append((new_event_ids[event_id], new_char_ids[char_id])) log.debug(f"Collected {len(participants):,} records from old database.") log.info("Copying records to event participants table") res = await conn.copy_records_to_table("event_participant", records=participants, columns=["event_id", "character_id"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.info("Finished importing events.")
def query(cursor: sqlite3.Cursor, clue: str) -> List[Tuple[str, int, _KeywordsType]]: results = [] cmd = """ SELECT solution, usages, keywords FROM clues WHERE keywords LIKE ? """ like = '%' + _format_keywords_query(clue_keywords(clue)) + '%' for solution, usages, keywords in cursor.execute(cmd, (like,)): keywords = _query_keywords(keywords) results.append((solution, usages, keywords)) return results
async def import_server_properties(conn: asyncpg.Connection, c: sqlite3.Cursor): properties = [] prefixes = [] times = [] log.debug("Gathering server property records from sqlite...") log.info("Importing server properties...") c.execute("SELECT server_id, name, value FROM server_properties") rows = c.fetchall() for server_id, key, value in rows: server_id = int(server_id) if key == "prefixes": prefixes.append((server_id, json.loads(value))) continue if key == "times": value = json.loads(value) for entry in value: times.append((server_id, entry["timezone"], entry["name"])) continue elif key in ["events_channel", "levels_channel", "news_channel", "welcome_channel", "ask_channel", "announce_channel", "announce_level"]: value = int(value) elif key in ["watched_message", "watched_channel"]: continue elif key == "commandsonly": value = bool(value) properties.append((server_id, key, value)) log.debug(f"Collected {len(properties):,} properties, {len(times):,} timezones and {len(prefixes):,} prefixes" f" from old database.") log.info("Copying records to server property table") res = await conn.copy_records_to_table("server_property", records=properties, columns=["server_id", "key", "value"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.info("Copying records to server prefixes table") res = await conn.copy_records_to_table("server_prefixes", records=prefixes, columns=["server_id", "prefixes"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.info("Copying records to server timezone table") res = await conn.copy_records_to_table("server_timezone", records=times, columns=["server_id", "zone", "name"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.info("Finished importing server properties.")
def _Check_Table_Fields(self, cursor: Cursor, table_name: str, table_info: tuple) -> bool: """ _Check_Table_Fields(cursor: Cursor, table_name: str, table_info: tuple) -> bool 检查表结构是否一致 :param cursor: 光标 :param table_name: 表名 :param table_info: 表结构(从左至右顺序) :return: bool """ sql = f'''PRAGMA TABLE_INFO({table_name})''' result = cursor.execute(sql) temp_name, temp_type = [], [] for temp in result: temp_name.append(temp[1]) temp_type.append(temp[2]) return (tuple(temp_name), tuple(temp_type)) == table_info
def _Exist_Table(self, cursor: Cursor, table_name: str) -> bool: """ _Exist_Table(cursor: Cursor, table_name: str) -> None 检查表是否存在 :param cursor: 光标 :param table_name: 表名 :return: bool """ sql = '''SELECT name FROM sqlite_master WHERE type='table' AND name=? COLLATE NOCASE;''' data = (table_name,) result = cursor.execute(sql, data) try: next(result) return True except StopIteration: return False
def get_table_info(self, cursor_name: str, table_name: str, cursor: sqlite3.Cursor = None): if not cursor or not isinstance(cursor, sqlite3.Cursor): if not cursor_name in self.env: raise ValueError cursor = self.env[cursor_name] if not isinstance(cursor, sqlite3.Cursor): raise TypeError index_list = [ data[1] for data in cursor.execute('PRAGMA index_list({});'.format( table_name)).fetchall() ] indexed_list = { cursor.execute('pragma index_info({});'.format(col)).fetchone()[2] for col in index_list } table = DatabaseTableInfo() table.tableName = table_name cursor.execute("PRAGMA table_info('{}')".format(table_name)) for col_info in cursor.fetchall(): column = DatabaseColumnInfo() column.columnName = col_info[1] column.columnType = col_info[2] column.notNull = bool(col_info[3]) column.default = col_info[4] or '<span class="null">null</span>' column.defaultRaw = col_info[4] column.primaryKey = bool(col_info[5]) column.indexed = column.columnName in indexed_list table.columns.append(column) cursor.execute('SELECT COUNT(*) FROM {}'.format(table_name)) table.rowCount = cursor.fetchone()[0] return table
def rename_assets_in_db(cursor: Cursor, rename_pairs: List[Tuple[str, str]]) -> None: """ Renames assets in all the relevant tables in the Database. Takes a list of tuples in the form: [(from_name_1, to_name_1), (from_name_2, to_name_2), ...] Good from DB version 1 until now. """ # [(to_name_1, from_name_1), (to_name_2, from_name_2), ...] changed_symbols = [(e[1], e[0]) for e in rename_pairs] cursor.executemany( 'UPDATE multisettings SET value=? WHERE value=? and name="ignored_asset";', changed_symbols, ) rename_assets_in_timed_balances(cursor, rename_pairs) replaced_symbols = [e[0] for e in rename_pairs] replaced_symbols_q = ['pair LIKE "%' + s + '%"' for s in replaced_symbols] query_str = ( f'SELECT id, pair, fee_currency FROM trades WHERE fee_currency IN ' f'({",".join("?"*len(replaced_symbols))}) OR (' f'{" OR ".join(replaced_symbols_q)})' ) cursor.execute(query_str, replaced_symbols) updated_trades = [] for q in cursor: new_pair = q[1] for rename_pair in rename_pairs: from_asset = rename_pair[0] to_asset = rename_pair[1] if from_asset not in q[1] and from_asset != q[2]: # It's not this rename pair continue if from_asset in q[1]: new_pair = q[1].replace(from_asset, to_asset) new_fee_currency = q[2] if from_asset == q[2]: new_fee_currency = to_asset updated_trades.append((new_pair, new_fee_currency, q[0])) cursor.executemany( 'UPDATE trades SET pair=?, fee_currency=? WHERE id=?', updated_trades, )
def procedure_corpus2(corpus, cursor: sqlite3.Cursor, debug: bool=False): for (j, (classes, sequence)) in corpus: if debug: print("procedure: ", len(corpus) - j, sequence, file=stderr) add_feature_cmd = "INSERT OR IGNORE INTO Features(feature) VALUES (?);" add_classes_cmd = "INSERT OR IGNORE INTO Classes(classe) VALUES (?);" cursor.execute('BEGIN TRANSACTION;') cursor.execute(add_feature_cmd, (sequence,)) cursor.executemany(add_classes_cmd, zip(classes)) generate_regex( OptimString(sequence, Point('.'), pointee=None, control=None), classes=classes, cursor=cursor )
def _update_tags(cursor: sqlite3.Cursor, bookmarks: List[Bookmark]) -> None: for bookmark in bookmarks: cursor.execute( "SELECT name FROM tag WHERE bookmarkId IS ?", (str(bookmark.id), ), ) old_tags = cursor.fetchall() if old_tags and {tag[0] for tag in old_tags} != set(bookmark.tags): cursor.execute( "DELETE FROM tag WHERE bookmarkId IS ?", (str(bookmark.id), ), ) cursor.executemany( "INSERT INTO tag (name, bookmarkId) VALUES (?, ?)", [(tag, str(bookmark.id)) for tag in bookmark.tags], )
def get_db_info(self, cursor_name: str, cursor: sqlite3.Cursor = None) -> DatabaseInfo: if not cursor or not isinstance(cursor, sqlite3.Cursor): if cursor_name not in self.env: raise ValueError cursor = self.env[cursor_name] if not isinstance(cursor, sqlite3.Cursor): raise TypeError result = DatabaseInfo() table_names = cursor.execute( "SELECT name FROM sqlite_master WHERE type='table';").fetchall() table_names = [table[0] for table in table_names] for table_name in table_names: result.tables.append(self.get_table_info('', table_name, cursor)) return result
def all_guest_appearances_by_id(cur: sqlite3.Cursor, guest_id: int) -> List[Tuple[int, int]]: '''Finds all apperances of a given guest id Args: cur (Cursor): DB cursor guest_id (int): Guest id in the DB Returns: List[Tuple[int, int]]: List of appearances (tuples) in the format: (show_id, episode_num) ''' return cur.execute( ''' select show, episode from appearances where appearances.guest_id in ( select guest_id from guests where guests.guest_id = ? ) order by episode desc ''', (guest_id, ))
def get_cache_game(cur: sqlite3.Cursor, get_total: bool = False) -> list: '''获取免费游戏缓存''' cur.execute('SELECT COUNT(*) FROM "cache"') total = cur.fetchone()[0] print(f'缓存中共有 {total} 个免费游戏') if get_total: return total appids = [] for i in range(0, total+1, 1000): cur.execute('SELECT "appid" FROM "cache" LIMIT ?,1000', (i,)) t = [x[0] for x in cur.fetchall()] appids.extend(t) return appids
def test_v1_to_v2(test_name: str, input, expected, db: sqlite3.Connection, cursor: sqlite3.Cursor): print(test_name) cursor.execute("CREATE TABLE mod (" + "id TEXT UNIQUE, " + "site TEXT, " + "site_id TEXT, " + "site_slug TEXT, " + "upload_time INTEGER, " + "active INTEGER)") cursor.execute( "INSERT INTO mod (id, site, site_id, site_slug, upload_time, active) VALUES (?, ?, ?, ?, ?, ?)", input) db.commit() upgrader = SqliteUpgrader(db, cursor) upgrader._v1_to_v2() cursor.execute("SELECT * FROM mod") result = cursor.fetchall() assert [expected] == result
def _create_schema(self, c: sqlite3.Cursor) -> None: self._create_schemaversion(c) c.execute( """create table if not exists Deployments( uuid text primary key );""" ) c.execute( """create table if not exists DeploymentAttrs( deployment text not null, name text not null, value text not null, primary key(deployment, name), foreign key(deployment) references Deployments(uuid) on delete cascade );""" ) c.execute( """create table if not exists Resources( id integer primary key autoincrement, deployment text not null, name text not null, type text not null, foreign key(deployment) references Deployments(uuid) on delete cascade );""" ) c.execute( """create table if not exists ResourceAttrs( machine integer not null, name text not null, value text not null, primary key(machine, name), foreign key(machine) references Resources(id) on delete cascade );""" )
def get_account_address(cursor: sqlite3.Cursor, account: str) -> str: cursor.execute('insert or ignore into account_wallet(account) values(?)', (account, )) cursor.execute( 'select address from account_address where account == ? and time > ?', (account, int(time.time()) - 60 * 60 * 24 * 7)) address = cursor.fetchone() if address is None: address = coinrpc.call('getnewaddress') cursor.execute( 'insert into account_address(account, address, time) values(?, ?, ?)', (account, address, int(time.time()))) else: address = address[0] return address
def get_owned_game(cur: sqlite3.Cursor, bot: str, get_total: bool = False) -> list: '''获取已拥有的游戏''' cur.execute('SELECT COUNT(*) FROM "owned" WHERE bot=?', (bot,)) total = cur.fetchone()[0] print(f'{bot} 已拥有 {total} 个免费游戏') if get_total: return total appids = [] for i in range(0, total+1, 1000): cur.execute('SELECT "appid" FROM "owned" WHERE bot=? LIMIT ?,1000', (bot, i)) t = [x[0] for x in cur.fetchall()] appids.extend(t) return appids
def inner_insert_node(query: sqlite3.Cursor, node: Node) -> None: # add this node query.execute( ''' INSERT OR REPLACE INTO nodes (id, name, trashed, created, modified) VALUES (?, ?, ?, ?, ?) ;''', (node.id_, node.name, node.trashed, node.created.timestamp, node.modified.timestamp)) # add file information if not node.is_folder: query.execute( ''' INSERT OR REPLACE INTO files (id, md5, size) VALUES (?, ?, ?) ;''', (node.id_, node.md5, node.size)) # add parentage if node.parents: query.execute( ''' DELETE FROM parentage WHERE child=? ;''', (node.id_, )) for parent in node.parents: query.execute( ''' INSERT OR REPLACE INTO parentage (parent, child) VALUES (?, ?) ;''', (parent, node.id_))
def all_episode_guests(cur: sqlite3.Cursor, show: str, ep_num: int) -> List[Tuple[int, str]]: '''Gets all guests that were on a given episode Args: cur (Cursor): DB cursor. show (str): Show name. ep_num (int): Episode number. Returns: List[Tuple[int, str]]: List of appearances (tuples) in the format: (show_id, episode_num) ''' show_id = utils.get_show_id(show) return cur.execute( ''' select guest_id, name from guests where guest_id in ( select guest_id from appearances where appearances.show = ? and appearances.episode = ? ) ''', (show_id, ep_num)).fetchall()
def guest_name_search(cur: sqlite3.Cursor, search_str: str) -> List[Dict]: '''Querys the database for guests who have a name contianing the given string Args: cur (Cursor): DB cursor search_str (str): Search query Returns: List of dicts with the format: { id: guest_id name: guest_name } ''' wildcard_name = f'%{search_str}%' all_results = cur.execute( ''' select guest_id, name from guests where name like ? ''', (wildcard_name, )).fetchall() return [{'id': res[0], 'name': res[1]} for res in all_results]
def store_transformation_into_DB(transID: int, baseNode: Node, addedNodes: List[Node], replacedNodes: List[Node], cursor: sqlite3.Cursor, fullScopeName: str) -> None: """A helper function to store nodes transformations into database. Args: transID: int. The ID for this stored transformation. baseNode: Node. The base node that changes its operands. addedNodes: List[Node]. A list of added nodes in this transformation. replacedNodes: List[Node]. A list of replaced nodes in this transformation. cursor: sqlite3.Cursor. Cursor of the sqlite3 database. fullScopeName: str. The full scope name of this transformation. """ cursor.execute( """INSERT INTO Log_Transformation VALUES ( ?, 'OPERATOR_BASE', ?, ?, ? )""", (transID, baseNode.get_name(), baseNode.get_kind_name(), fullScopeName)) for an in addedNodes: cursor.execute( """INSERT INTO Log_Transformation VALUES ( ?, 'ADD_OPERAND', ?, ?, ? )""", (transID, an.get_name(), an.get_kind_name(), fullScopeName)) for rn in replacedNodes: cursor.execute( """INSERT INTO Log_Transformation VALUES ( ?, 'REMOVE_OPERAND', ?, ?, ? )""", (transID, rn.get_name(), rn.get_kind_name(), fullScopeName))
def hmp_write_techmax(cur: sqlite3.Cursor, newmax:int): try: #povol zapis cur.execute("insert into serial_prikaz(cas, prikaz, typ_dotazu, iswrite) values(?, ?, ?, 1)", (datetime.now(), "FF A5 00 3F 03 19 E1 00 00 F4 2B", APP_TYP_DOTAZU.HMP_ZAPIS.value)) #zapis techmax hx = telegram_create(Jednotka.HMP.value, Telegram.BEZNY, b'\xA2\x22\x00' + int_to_bytes_data(newmax)) cur.execute("insert into serial_prikaz(cas, prikaz, typ_dotazu, iswrite) values(?, ?, ?, 1)", (datetime.now(), conv_hex_to_str(hx), APP_TYP_DOTAZU.HMP_ZAPIS.value)) #zrus povoleni zapisu cur.execute("insert into serial_prikaz(cas, prikaz, typ_dotazu, iswrite) values(?, ?, ?, 1)", (datetime.now(), "FF A5 00 3F 03 19 E1 00 00 F0 2F", APP_TYP_DOTAZU.HMP_ZAPIS.value)) except Exception as e: print("hmp_write_regmax(): Ex: {}".format(str(e)), flush=True) pass hmp_call_techmax(cur)
def geo_locate(cursor: sqlite3.Cursor): """Generate location data for each city then feed to location_cache table""" cursor.execute('''DELETE FROM location_cache''' ) # Scrub previous results to start over geo_code = Nominatim(user_agent="capstone_project") cursor.execute("""SELECT location FROM combined_jobs""") jobs = cursor.fetchall() # Set to .fetchall once development is complete for location in jobs: try: full_loc = geo_code.geocode(location[0]) print(location[0]) cursor.execute( f"""INSERT INTO location_cache(location, latitude, longitude) VALUES (?,?,?)""", (location[0], full_loc.latitude, full_loc.longitude)) except AttributeError: print(AttributeError) except sqlite3.IntegrityError: print(sqlite3.IntegrityError)
def encode_flower_data(flower_id, since_time, cursor: sqlite3.Cursor): print(f'Device: {flower_id}, since_time: {since_time}') query = f'SELECT * FROM readings ' \ f'WHERE flower_id = {flower_id} AND timestamp > {since_time}' rows = cursor.execute(query) rows = [row for row in rows] print(rows) timestamps = [timestamp for (_, _, _, timestamp) in rows] lights = [light for (_, light, _, _) in rows] moistures = [moisture for (_, _, moisture, _) in rows] print(f'Timestamps: {timestamps}') print(f'Lights: {lights}') print(f'Moistures: {moistures}') response = plantcare_pb2.Response() response.flower_data.flower_id = flower_id response.flower_data.measurement_timestamps.extend(timestamps) response.flower_data.light_measurements.extend(lights) response.flower_data.moisture_measurements.extend(moistures) response.flower_data.watering_timestamps.append(45) return response
def fetch_all_jobs(cursor: sqlite3.Cursor): jobs_list = [] actual_jobs = cursor.execute("SELECT * FROM api_jobs") for job in actual_jobs: new_job_object = { "job_id": job[0], "company": job[1], "company_logo": job[2], "company_url": job[3], "created_at": job[4], "description": job[5], "how_to_apply": job[6], "location": job[7] if job[7] in job else None, "title": job[8], "type": job[9], "url": job[10], "lat": None, "long": None } jobs_list.append(new_job_object) return jobs_list
def get_event_by_id(cur: sqlite3.Cursor, event_id: int) -> Dict: '''Get an event based on its id Args: cur (Cursor): Database cursor event_id: Id of event Returns: dict: Dict representing an event ''' event = cur.execute( ''' select event_id, show, episode, timestamp, description from events where event_id = ? ''', (event_id, )).fetchone() return { 'id': event[0], 'show': 'PKA' if event[1] == 1 else 'PKN', 'episode': event[2], 'timestamp': event[3], 'description': event[4], }
def analyse_exam(cursor: Cursor, exam_id: int) -> dict: ''' Gives all stats about the exam Args: cursor (Cursor): sqlite3 Cursor object exam_id (int): the unique id of the exam Returns: dict: all the stats ''' logger.info('Analyzing exam with id ') cursor.execute(___(f'SELECT AVG(marks) FROM marks WHERE exam={exam_id}')) average = cursor.fetchone()[0] cursor.execute(___(f'SELECT MAX(marks) FROM marks WHERE exam={exam_id}')) highest = cursor.fetchone()[0] return {'average': average, 'highest': highest}
def fetch_all_jobs_with_lat_long(cursor: sqlite3.Cursor, actual_jobs): jobs_list_with_updated_location = [] for job in actual_jobs: if job['location'] is not None: converted_city = convert_city(job['location']) if converted_city.split()[0] != 'remote': cache_city = cursor.execute( "SELECT * FROM api_jobs_location_cache WHERE location = ?", (converted_city, )) lat_and_long = cache_city.fetchone() if lat_and_long is not None: lat_from_cache = lat_and_long[1] long_from_cache = lat_and_long[2] else: lat_from_cache = None long_from_cache = None job['lat'] = lat_from_cache job['long'] = long_from_cache jobs_list_with_updated_location.append(job) return jobs_list_with_updated_location
def get_balance(self, client_id: int, db_cur: sqlite3.Cursor) -> float: """ The function select user's balance with his id from the database. The pending request is a tuple, which contains the client's balance in the first position. :param db_cur: Object - Cursor to the database. :param client_id: Integer - Id to find user in the database. :return: Float - User's balance from the database """ try: result = db_cur.execute( f'SELECT BALANCE FROM BALANCES WHERE CLIENTS_CLIENT_ID={client_id}' ).fetchone() except sqlite3.Error as e: raise Exception( f'An error occurred while accessing the service "Getting client\'s balance from the ' f'database": {e}') assert result try: return result[0] except IndexError: raise Exception( f'An error occurred while accessing the service "Getting client\'s balance from the ' f'database", got empty response: {e}')
def execute(cursor : sqlite3.Cursor, sql : str, tableInfo : blockSQL.sql.tool.table_info_module.TableInfo, hashFunc = hashlib.sha256, timeFunc = time.time) -> sqlite3.Cursor: sql = blockSQL.sql.sql_module.string_module.removeHeadNoise(sql) sql = blockSQL.sql.sql_module.string_module.removeHeadNoise(sql, '\n') sql = blockSQL.sql.sql_module.string_module.removeHeadNoise(sql) funcName = blockSQL.sql.sql_module.findFunc(sql) if(funcName == "CREATE TABLE"): tableName = blockSQL.sql.sql_module.getInfo_create(sql) blockSQL.sql.currentTable_module.create(cursor, sql, tableName) blockSQL.sql.historyTable_module.create(cursor, tableName) elif(funcName == "INSERT INTO"): tableName, columns = blockSQL.sql.sql_module.getInfo_insert(sql, tableInfo) blockSQL.sql.currentTable_module.insert(cursor, sql, tableName) blockSQL.sql.historyTable_module.insert(cursor, tableName, columns, timeFunc) blockSQL.sql.block_module.insert(cursor, tableName, hashFunc, timeFunc) blockSQL.sql.currentTable_module.done(cursor, tableName) elif(funcName == "UPDATE"): tableName, columns, where = blockSQL.sql.sql_module.getInfo_update(sql, tableInfo) blockSQL.sql.currentTable_module.update(cursor, tableName, where, sql) blockSQL.sql.historyTable_module.insert(cursor, tableName, columns, timeFunc) blockSQL.sql.block_module.insert(cursor, tableName, hashFunc, timeFunc) blockSQL.sql.currentTable_module.done(cursor, tableName) else: return cursor.execute(sql) return cursor
def inner_delete_node_by_id(query: sqlite3.Cursor, node_id: Text) -> None: # disconnect parents query.execute( ''' DELETE FROM parentage WHERE child=? OR parent=? ;''', (node_id, node_id)) # remove from files query.execute(''' DELETE FROM files WHERE id=? ;''', (node_id, )) # remove from nodes query.execute(''' DELETE FROM nodes WHERE id=? ;''', (node_id, ))
def sql_update_setting(guild: int, setting: str, value: str, cursor: sqlite3.Cursor, connection: sqlite3.Connection) -> bool: sql = """SELECT value from settings WHERE guildID == ? and setting = ?;""" cursor.execute(sql, (guild, setting)) results = cursor.fetchall() update = False for result in results: if value in result: raise SettingAlreadySet() else: update = True continue else: if update: sql = """UPDATE settings SET value = ? WHERE setting = ? AND guildID = ?;""" cursor.execute(sql, (value, setting, guild)) else: sql = """INSERT INTO settings (guildID, setting, value) VALUES (?, ?, ?);""" cursor.execute(sql, (guild, setting, value)) connection.commit() return True
def write_intron_feature_annotation(c: sqlite3.Cursor, intron_feature: str, genes: str, intron_feature_name: str) -> None: query = str( "select sample_type, all_map_sample_count, all_map_sample_pct " + " from intron_sample_type_counts " + " where intron = ? and db_class = ? and sample_type != 'total' " + " order by all_map_sample_pct desc") TCGA_vals = list() c.execute(query, (intron_feature, "TCGA")) rows = c.fetchall() for row in rows: (sample_type, sample_count, sample_fraction) = row sample_info = ":".join([ sample_type, str(sample_count), "{:.2f}".format(sample_fraction * 100) ]) TCGA_vals.append(sample_info) if not TCGA_vals: TCGA_vals = ["NA"] GTEx_vals = list() c.execute(query, (intron_feature, "GTEx")) rows = c.fetchall() for row in rows: (sample_type, sample_count, sample_fraction) = row sample_info = ":".join([ sample_type, str(sample_count), "{:.2f}".format(sample_fraction * 100) ]) GTEx_vals.append(sample_info) if not GTEx_vals: GTEx_vals = ["NA"] print("\t".join([ intron_feature, genes, ",".join(TCGA_vals), ",".join(GTEx_vals), intron_feature_name ])) return
def req_list(cur: Cursor = None) -> dict: page: int = int(request.args["page"]) page_size: int = int( request.args["pageSize"] if request.args["pageSize"] else "10") """ 查询玩家申请列表 """ cur.execute(""" SELECT COUNT(*) FROM apply_player """) count: int = cur.fetchone()[0] page = max(1, min(page, math.ceil(count / page_size))) cur.execute( """ SELECT * FROM apply_player ORDER BY req_time DESC LIMIT %d OFFSET %d """, (page_size, (page - 1) * page_size)) res = cur.fetchall() data = [ { "playerName": r[0], "password": r[1], # TODO 自动创建账号后不再响应给前端 "reqTime": r[2], "applyTime": r[3], "applyOP": r[4], "status": r[5], "type": r[6], "ip": r[7], "qq": r[8], "oldPlayerName": r[9], "opName": r[10] } for r in res ] return success(pager_data(page, count, data, page_size))
async def import_roles(conn: asyncpg.Connection, c: sqlite3.Cursor): log.info("Importing roles...") auto_roles = [] joinable_roles = [] log.debug("Gathering auto roles from sqlite...") c.execute("SELECT server_id, role_id, guild FROM auto_roles") rows = c.fetchall() for server_id, role_id, guild in rows: auto_roles.append((server_id, role_id, guild)) log.debug(f"Collected {len(auto_roles):,} records from old database.") log.info("Copying records to auto roles table") res = await conn.copy_records_to_table("role_auto", records=auto_roles, columns=["server_id", "role_id", "rule"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.debug("Gathering joinable roles from sqlite...") c.execute("SELECT server_id, role_id FROM joinable_roles") rows = c.fetchall() for server_id, role_id in rows: joinable_roles.append((server_id, role_id)) log.debug(f"Collected {len(joinable_roles):,} records from old database.") log.info("Copying records to joinable roles table") res = await conn.copy_records_to_table("role_joinable", records=joinable_roles, columns=["server_id", "role_id"]) log.info(f"Copied {get_affected_count(res):,} records successfully.") log.info("Finished importing roles.")
def __init__(self, db_): self.db_gate = connect(db_) Cursor.__init__(self, self.db_gate)
def clean_up_old_db(c: sqlite3.Cursor): log.info("Cleaning up old database") # Clean up characters c.execute("SELECT min(id), name as id FROM chars GROUP BY name HAVING COUNT(*) > 1") rows = c.fetchall() log.debug("Removing duplicate characters...") for char_id, name in rows: c.execute("""UPDATE char_levelups SET char_id = ? WHERE char_id IN (SELECT id FROM chars WHERE name = ? ORDER BY id LIMIT 1)""", (char_id, name)) c.execute("""UPDATE char_deaths SET char_id = ? WHERE char_id IN (SELECT id FROM chars WHERE name = ? ORDER BY id LIMIT 1)""", (char_id, name)) c.execute("""UPDATE event_participants SET char_id = ? WHERE char_id IN (SELECT id FROM chars WHERE name = ? ORDER BY id LIMIT 1)""", (char_id, name)) c.execute("DELETE FROM chars WHERE name = ? AND id != ?", (name, char_id)) log.info(f"Removed {len(rows):,} duplicate characters") # Clean up deaths log.debug("Removing duplicate deaths...") c.execute("""DELETE FROM char_deaths WHERE rowid NOT IN (SELECT min(rowid) FROM char_deaths GROUP BY char_id, date)""") log.info(f"Removed {c.rowcount:,} duplicate deaths") log.debug("Removing orphaned deaths...") c.execute("""DELETE FROM char_deaths WHERE char_id NOT IN (SELECT id FROM chars)""") log.info(f"Removed {c.rowcount:,} orphaned deaths") # Clean up level ups log.debug("Removing duplicate level ups...") c.execute("""SELECT min(rowid), min(date), max(date)-min(date) as diff, count() as c, char_id, level FROM char_levelups GROUP BY char_id, level HAVING c > 1 AND diff < 30""") rows = c.fetchall() count = 0 for rowid, date, diff, _count, char_id, level in rows: c.execute("""DELETE FROM char_levelups WHERE rowid != ? AND char_id = ? AND level = ? AND date-30 < ? AND date+30 > ?""", (rowid, char_id, level, date, date)) count += c.rowcount log.info(f"Removed {count:,} duplicate level ups") log.debug("Removing orphaned level ups...") c.execute("""DELETE FROM char_levelups WHERE char_id NOT IN (SELECT id FROM chars)""") log.info(f"Removed {c.rowcount:,} orphaned levelups") # Clean up event participants log.debug("Removing duplicate event participants...") c.execute("""DELETE FROM event_participants WHERE rowid NOT IN (SELECT min(rowid) FROM event_participants GROUP BY event_id, char_id)""") log.info(f"Removed {c.rowcount:,} duplicate event participants") log.debug("Removing orphaned event participants...") c.execute("""DELETE FROM event_participants WHERE char_id NOT IN (SELECT id FROM chars)""") log.info(f"Removed {c.rowcount:,} orphaned event participants") # Clean up event subscribers log.debug("Removing duplicate event subscribers...") c.execute("""DELETE FROM event_subscribers WHERE rowid NOT IN (SELECT min(rowid) FROM event_subscribers GROUP BY event_id, user_id)""") log.info(f"Removed {c.rowcount:,} duplicate event subscribers") # Remove server properties log.debug("Removing duplicate server properties...") c.execute("""DELETE FROM server_properties WHERE rowid NOT IN (SELECT min(rowid) FROM server_properties GROUP BY server_id, name)""") log.info(f"Removed {c.rowcount:,} duplicate server properties")
def save(self, cursor: sqlite3.Cursor) -> None: """Save the properties to the provided database cursor.""" cursor.execute('update players set mu=?, sigma=?, games_count=?, ' 'wins_count=? where name=? and ladder=?', [self.rating.mu, self.rating.sigma, self.games_count, self.wins_count, self.name, self.ranking.ladder])
def setup(c: sqlite3.Cursor) -> None: for q in QUERIES: c.execute(q)