def user_from_table(connection: sqlite3.Connection, username: str) -> User: """ Функция возвращает пользователя по его имени аргументы: connection -- соединение с базой данных username -- имя пользователя """ try: if connection is None: raise WrongDataBaseNameException( 'Неправильное название базы данных.') cursor = connection.cursor() cursor.execute("""SELECT username FROM users WHERE username = ?""", (username, )) if cursor.fetchall() is None: return None cursor.execute( """SELECT username, password FROM "users" WHERE username = ?""", (username, )) try: data = cursor.fetchall()[0] except IndexError as e: print(e) return None return data except sqlite3.Error as e: print(e) finally: connection.close()
def fetch_unprocessed(self): """Returns one unprocessed row in a tuple(id, a, b, c, d, e, f, g, h) and set its status to 'processing' """ connect = Connection(self.dbname) cursor = connect.cursor() cursor.execute(""" SELECT * FROM 'data' WHERE status = 'unprocessed' LIMIT 1 """) unprocessed_row = cursor.fetchone() if unprocessed_row: cursor.execute( """ UPDATE data SET status = 'processing' WHERE id = :id """, {'id': unprocessed_row[0]}, ) connect.commit() connect.close() if unprocessed_row: return unprocessed_row[:9]
def show_all_created_tables_content(connection:sqlite3.Connection) -> None: """ Функция выводит список всех созданных таблиц с их содержанием аргумент: connection -- соединение с базой данных """ try: if connection is None: raise WrongDataBaseNameException('Неправильное название базы данных. Введите существующую') cursor = connection.cursor() cursor.execute("""SELECT name FROM sqlite_master WHERE type='table'""") tables_name = [table[0] for table in cursor.fetchall()] print("Список всех созданных таблиц и их содержание:") for table in tables_name: cursor.execute(f"""SELECT * FROM {table}""") print(table) for e, content in enumerate(cursor.fetchall()): print(' ', e+1, ' -- ', sep='', end='') print(*content, sep=', ') connection.commit() except sqlite3.Error as e: print(e) finally: connection.close()
def _update_dispersy(self): """ Cleans up all SearchCommunity and MetadataCommunity stuff in dispersy database. """ db_path = os.path.join(self.state_dir, u"sqlite", u"dispersy.db") if not os.path.isfile(db_path): return communities_to_delete = (u"SearchCommunity", u"MetadataCommunity") connection = Connection(db_path) cursor = connection.cursor() data_updated = False for community in communities_to_delete: try: result = list(cursor.execute(u"SELECT id FROM community WHERE classification == ?", (community,))) for community_id, in result: self._logger.info(u"deleting all data for community %s...", community_id) cursor.execute(u"DELETE FROM community WHERE id == ?", (community_id,)) cursor.execute(u"DELETE FROM meta_message WHERE community == ?", (community_id,)) cursor.execute(u"DELETE FROM sync WHERE community == ?", (community_id,)) data_updated = True except StopIteration: continue if data_updated: connection.commit() cursor.close() connection.close()
def edit_info_in_table(connection:sqlite3.Connection, table_name:str, _id:int, data:dict) -> None: """ Функция изменяет нужные данные в таблице аргументы: connection -- соединение с базой данных table_name -- название таблицы _id -- идентификационный номер записи в таблице data -- данные для изменения """ try: if connection is None: raise WrongDataBaseNameException('Неправильное название базы данных. Введите существующую') if type(table_name) is not str: raise TypeError('Название таблицы может быть только строковое') if type(_id) is not int: raise TypeError('Идентификатор записи в таблице должен быть числом') if type(data) is not dict: raise TypeError('данные должны быть в формате словаря') cursor = connection.cursor() cursor.execute(f"""UPDATE {table_name.lower()} SET {', '.join(f'{e} = "{b}"' for e,b in data.items())} WHERE id = ?""", (_id,)) connection.commit() except sqlite3.Error as e: print(e) finally: connection.close()
def save_crashstats_packages(statsid: int, packages: List[str], con: sqlite3.Connection = None) -> None: close = False if con is None: con = init_crashstats_db() close = True query = con.cursor() for package in packages: pkgdata = parse_rpm_name(package) if pkgdata["name"] is None: continue ver = "%s-%s" % (pkgdata["version"], pkgdata["release"]) query.execute("SELECT id FROM packages WHERE name = ? AND version = ?", (pkgdata["name"], ver)) row = query.fetchone() if row: pkgid = row[0] else: query.execute("INSERT INTO packages (name, version) VALUES (?, ?)", (pkgdata["name"], ver)) pkgid = query.lastrowid query.execute( """ INSERT INTO packages_tasks (taskid, pkgid) VALUES (?, ?) """, (statsid, pkgid)) con.commit() if close: con.close()
def _update_dispersy(self): """ Cleans up all SearchCommunity and MetadataCommunity stuff in dispersy database. """ db_path = os.path.join(self.session.get_state_dir(), u"sqlite", u"dispersy.db") if not os.path.isfile(db_path): return communities_to_delete = (u"SearchCommunity", u"MetadataCommunity") connection = Connection(db_path) cursor = connection.cursor() data_updated = False for community in communities_to_delete: try: result = list(cursor.execute(u"SELECT id FROM community WHERE classification == ?", (community,))) for community_id, in result: self._logger.info(u"deleting all data for community %s...", community_id) cursor.execute(u"DELETE FROM community WHERE id == ?", (community_id,)) cursor.execute(u"DELETE FROM meta_message WHERE community == ?", (community_id,)) cursor.execute(u"DELETE FROM sync WHERE community == ?", (community_id,)) data_updated = True except StopIteration: continue if data_updated: connection.commit() cursor.close() connection.close()
def _purge_old_search_metadata_communities(self): """ Cleans up all SearchCommunity and MetadataCommunity stuff in dispersy database. """ db_path = os.path.join(self.session.get_state_dir(), u"sqlite", u"dispersy.db") if not os.path.isfile(db_path): return communities_to_delete = (u"SearchCommunity", u"MetadataCommunity", u"TunnelCommunity") connection = Connection(db_path) cursor = connection.cursor() for community in communities_to_delete: try: result = list(cursor.execute(u"SELECT id FROM community WHERE classification == ?;", (community,))) for community_id, in result: cursor.execute(u"DELETE FROM community WHERE id == ?;", (community_id,)) cursor.execute(u"DELETE FROM meta_message WHERE community == ?;", (community_id,)) cursor.execute(u"DELETE FROM sync WHERE community == ?;", (community_id,)) except StopIteration: continue cursor.close() connection.commit() connection.close()
def selectByDateRange(prec_no, block_no, fromDate, toDate): __mylogger.info('selectByDateRange start!') try: #dbのオープン db = Connection(dbFilePath) #データ取得 statement = "SELECT * FROM WeatherInfo WHERE" statement = statement + " date BETWEEN '{fromDate}' and '{toDate}' ".format(fromDate=fromDate, toDate=toDate) result = np.array([]) for item in db.cursor().execute(statement): if len(result) > 0: result = np.vstack([result, item]) else: result = np.array(item) #result.append(item) return result except Exception as ex: __mylogger.error('error has occured.') __mylogger.error(ex) if db != None: db.rollback() finally: if db != None: db.close() __mylogger.info('selectByDateRange ended')
def main(connection: Connection): user, last_year, last_month = get_starting_user(connection) while user: print(f'Consuming {user}...') try: with connection: connection.execute('update users set started = 1 where id = ?', [user]) except Exception as e: print(e) sys.exit(1) today = datetime.today() starting_year = last_year or 1999 for year in range(starting_year, today.year + 1): starting_month = 1 if (year > last_year or not last_month) else last_month for month in range(starting_month, 13): if year == today.year and month > today.month: break print(f'Looking for {user} in {year}-{month}...') games = reversed(get_games(user, year, month)) for game in games: add_user(connection, game.black) add_user(connection, game.white) if game.id: add_game(connection, game) update_user_progress(connection, user, year, month) cursor = connection.cursor() cursor.execute('select count(*) from games') count = cursor.fetchone()[0] print( f'Finished {year}-{month}. There are now {count} games.\n' ) print() count = count_games(connection, user) print( f'Finished consuming {user}. This user has {count} viewable games.' ) try: with connection: connection.execute( 'update users set finished = 1 where id = ?', [user]) except Exception as e: print(e) sys.exit(1) user, last_year, last_month = get_starting_user(connection) connection.close() print('Done!')
def _close_db(conn: sqlite3.Connection) -> None: """Close a database. This is split into a separate function for the benefit of testing, because the close member of the object itself is read-only and hence cannot be patched. """ conn.close()
class Graph(object): """ Initializes a new Graph object. :param uri: The URI of the SQLite db. :param graphs: Graphs to create. """ def __init__(self, uri, graphs=()): self.uri = uri self.db = Connection(database=uri) self.setup_sql(graphs) def setup_sql(self, graphs): """ Sets up the SQL tables for the graph object, and creates indexes as well. :param graphs: The graphs to create. """ with closing(self.db.cursor()) as cursor: for table in graphs: cursor.execute(SQL.CREATE_TABLE % (table)) for index in SQL.INDEXES: cursor.execute(index % (table)) self.db.commit() def close(self): """ Close the SQLite connection. """ self.db.close() __del__ = close def __contains__(self, edge): """ Checks if an edge exists within the database with the given source and destination nodes. :param edge: The edge to query. """ with closing(self.db.cursor()) as cursor: cursor.execute(*SQL.select_one(edge.src, edge.rel, edge.dst)) return bool(cursor.fetchall()) def find(self, edge_query): """ Returns a Query object that acts on the graph. """ return Query(self.db)(edge_query) def transaction(self): """ Returns a Transaction object. All modifying operations, i.e. ``store``, ``delete`` must then be performed on the transaction object. """ return Transaction(self.db)
def importCsv(csvName): __mylogger.info('importing start!') try: #csvファイル読み込み df = pd.read_csv(csvName, encoding='utf-8', parse_dates=True, header=0) #INTEGERの精度変更 objectにする #なぜかint64のままだとsqliteにBLOBとして登録される。int8やint32に変更すると文字化けしてしまう。 #objectにすると自動判定でintと判断されるようである。 df['県番号'] = df['県番号'].astype('object') df['地区番号'] = df['地区番号'].astype('object') df['時刻'] = df['時刻'].astype('object') df['湿度(%)'] = df['湿度(%)'].astype('object') ''' #欠損値の補完 def __conv00(val): if val == '--' or val == None: return 0.0 else: return val df['降水量'] = df['降水量'].apply(__conv00) df['日照時間(h)'] = df['日照時間(h)'].fillna(0.0) df['全天日射量(MJ/m2)'] = df['全天日射量(MJ/m2)'].fillna(0.0) df['降雪(cm)'] = df['降雪(cm)'].fillna(0.0) df['降雪(cm)'] = df['降雪(cm)'].apply(__conv00) df['積雪(cm)'] = df['積雪(cm)'].fillna(0.0) df['積雪(cm)'] = df['積雪(cm)'].apply(__conv00) df['天気'] = df['天気'].fillna('') df['雨雲'] = df['雨雲'].fillna('') df['視程(km)'] = df['視程(km)'].fillna(0.0) ''' #dbのオープン db = Connection(dbFilePath) #データ一括登録 for idx in range(0, len(df)): __mylogger.debug("\n" + str(df.iloc[idx])) statement = "INSERT INTO WeatherInfo VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) " try: db.cursor().execute(statement, df.iloc[idx]) except DatabaseError as insertEx: __mylogger.error('error has occured.') __mylogger.error(insertEx) db.commit() except Exception as ex: __mylogger.error('error has occured.') __mylogger.error(ex) if db != None: db.rollback() finally: if db != None: db.close() __mylogger.info('importing ended')
def close_db(db: sqlite3.Connection) -> None: """Close the database. Args: db: Database (:class:`sqlite3.Connection`) Returns: None """ db.close()
def close_db(connection: sqlite3.Connection) -> bool: """Closes the provided db connection object :param connection: [description] :type connection: sqlite3.Connection :return: [description] :rtype: bool """ connection.close() return True
def make_memory_database(design_names, design_data, out_connection: Connection): # Table idents: 5 is enough to stitch the entire ward output together with the hypercube designs # These should ensure third normal form, although I haven't checked this in detail design_table_name: str = "design_table" output_table_name: str = "output_table" day_table_name: str = "day_table" run_table_name: str = "run_table" key_column = design_names[0] design_table_schema = make_design_table_schema(key_column, design_names[1:], design_table_name) output_channel_schema: str = f"create table {output_table_name} (id integer not null primary key, name text);" day_table_schema: str = f"create table {day_table_name}(day integer not null primary key,date text not null);" run_table_schema: str = f"create table {run_table_name}(run_index integer not null primary key," \ f"design_index integer not null,end_day integer not null," \ f"mw_folder text not null," \ f"foreign key (design_index) references {design_table_name}({key_column}));" try: # Create in memory first (small database, + we want to ensure it is well-formed) database = connect(":memory:") cursor: Cursor = database.cursor() # Flags / pragmas cursor.execute(f'PRAGMA encoding = "UTF-8";') cursor.execute(f"PRAGMA foreign_keys = ON;") # Create table structure cursor.execute(design_table_schema) cursor.execute(output_channel_schema) cursor.execute(day_table_schema) cursor.execute(run_table_schema) # Global writes # Write the design table for i, row in enumerate(design_data): vals = tuple([i] + row[:-1]) cursor.execute( f"insert into design_table ({','.join(design_names)}) " f"values ({','.join(['?'] * len(design_names))})", vals) database.commit() # Now save to disk database.backup(out_connection) finally: if database: database.close() if out_connection: out_connection.close()
async def release(self, conn: sqlite3.Connection): """ Releases a connection back to the pool of available connections. """ async with threadpool(): # rollback anything stale left in the DB if conn.in_transaction: conn.close() conn = self._new_connection() self.queue.put_nowait(conn)
def __close_connection(self, cur: sqlite3.Cursor, conn: sqlite3.Connection): if cur: cur.close() else: print("Warning: No cursor defined.") if conn: conn.close() else: print("Warning: No connection defined.")
def close(con: sqlite3.Connection) -> None: """ Closes the connection to the sqlite db. Parameters ---------- con : sqlite3.Connection The connection to the db. """ con.commit() con.close()
def close_db(self, db_connection: sqlite3.Connection): """Committing and closing the database. :param db_connection: Object - Connection to the database. """ try: db_connection.close() except sqlite3.Error as e: raise Exception( f'An error occurred while accessing the service "Closing the database": {e}' )
def add_to_db(articles: list): connect = Connection(DB_FILENAME) cursor = connect.cursor() for article in articles: # print(article) values_string = '"' + '", "'.join( article) + '"' # генеруємо рядок для запиту query = 'INSERT INTO articles ' \ f'VALUES ({values_string})' cursor.execute(query) connect.commit() connect.close()
def __isExistsDb(): if os.path.exists(dbFilePath) == False: return False db = None try: db = Connection(dbFilePath) return True except Exception as ex: return False finally: if db != None: db.close()
def create_schema(conn: Connection): """ Creates the main database schema. It should only be called if it is the first time the DB is created """ try: c = conn.cursor() for query in __schema: c.execute(query) conn.commit() c.close() except Error as e: print(e) logging.critical('Could not create the database schema') conn.close() sys.exit(-1)
def _finalize_instance( cls: typing.Type["CacheDict[KT, VT]"], *, self_repr: str, conn: sqlite3.Connection, ) -> None: log.info("_finalize_instance [%s] conn: [%r]", self_repr, conn) try: conn.close() except Exception: # pragma: no cover # We don't really care what the exception is as we cannot do # anything about it. If it's rethrown it will just be output # to stderr log.error("exception when closing conn: [%r]", conn, exc_info=True)
def mutator(mac): #category of some common variations, default is "variations on first byte" seen later category = { 0: 'Registered OUI', 1: 'Second nibble - 2', 2: 'Second nibble - 6', 3: 'Second nibble - 10' } #macvendors database taken from oui.txt from IEEE OUI_CON = Connection('./databases/macvendors.db') OUI_CUR = OUI_CON.cursor() #strip down the input mac or oui to just 6 hex digits, in text, uppercased oui = mac.replace(':', '').replace('-', '').replace('.', '').upper()[0:6] #pull first and second nibbles n1 = int(oui[0], 16) * 16 n2 = int(oui[1], 16) #define all possible byte variations for later matching bytes = [n1 + n2, (n1 + n2) - 2, (n1 + n2) - 6, (n1 + n2) - 10,\ n2 - 2, (0x10 + n2) - 2, (0x20 + n2) - 2, (0x30 + n2) - 2,\ (0x40 + n2) - 2, (0x50 + n2) - 2, (0x60 + n2) - 2, (0x70 + n2) - 2, (0x80 + n2) - 2, (0x90 + n2) - 2,\ (0xa0 + n2) - 2, (0xb0 + n2) - 2, (0xc0 + n2) - 2,\ (0xd0 + n2) - 2, (0xe0 + n2) - 2, (0xf0 + n2) - 2] print bytes #uppercase and convert to hex strings and add back the rest of the oui, padd back leading zeroes bytes = [(hex(byte)[2:].upper() + oui[2:]).zfill(6) for byte in bytes] print bytes #look for all of them in the database using enumerate #using enumerate will keep a running tab of the current index to be #able to use the category dictionary above without if statements for index, byte in enumerate(bytes): OUI_CUR.execute( 'SELECT vendor FROM macvendors WHERE mac="{}";'.format(byte)) hit = OUI_CUR.fetchone() if hit != None: result, oui_match = hit[0].rstrip(), byte OUI_CON.close() return (oui, oui_match, result, category.get(index, 'Variations on first byte') ) #default category return (oui, 'None', 'No Vendor Found', 'Unknown - Possibly random')
def insert_parsed_data(self, parsed_data, primary_key): """Insert parsed data into row with given promary key """ connect = Connection(self.dbname) cursor = connect.cursor() cursor.execute( """ UPDATE data SET top_tube_min = :top_tube_min, top_tube_max = :top_tube_max, seat_tube_cc_min = :seat_tube_cc_min, seat_tube_cc_max = :seat_tube_cc_max, seat_tube_ct_min = :seat_tube_ct_min, seat_tube_ct_max = :seat_tube_ct_max, stem_min = :stem_min, stem_max = :stem_max, bb_saddle_min = :bb_saddle_min, bb_saddle_max = :bb_saddle_max, saddle_handlebar_min = :saddle_handlebar_min, saddle_handlebar_max = :saddle_handlebar_max, saddle_setback_min = :saddle_setback_min, saddle_setback_max = :saddle_setback_max, seatpost_setback = :seatpost_setback, status = :status WHERE id = :id """, { 'top_tube_min': parsed_data['Top Tube'][0], 'top_tube_max': parsed_data['Top Tube'][1], 'seat_tube_cc_min': parsed_data['Seat Tube Range CC'][0], 'seat_tube_cc_max': parsed_data['Seat Tube Range CC'][1], 'seat_tube_ct_min': parsed_data['Seat Tube Range CT'][0], 'seat_tube_ct_max': parsed_data['Seat Tube Range CT'][1], 'stem_min': parsed_data['Stem Length'][0], 'stem_max': parsed_data['Stem Length'][1], 'bb_saddle_min': parsed_data['BB Saddle Position'][0], 'bb_saddle_max': parsed_data['BB Saddle Position'][1], 'saddle_handlebar_min': parsed_data['Saddle Handlebar'][0], 'saddle_handlebar_max': parsed_data['Saddle Handlebar'][1], 'saddle_setback_min': parsed_data['Saddle Setback'][0], 'saddle_setback_max': parsed_data['Saddle Setback'][1], 'seatpost_setback': 1 if parsed_data['Saddle Setback'] else 0, 'status': 'processed', 'id': primary_key, }) connect.commit() connect.close()
class Decoder: def __init__(self): self.conn = Connection('nes.sqlite') self.conn.row_factory = Row def __del__(self): self.conn.close() def decode(self, opcode): c = self.conn.cursor() c.execute('select * from instruction where opcode=?', [opcode]) row = c.fetchone() if row: return dict(zip(row.keys(), row)) else: raise NotImplementedError('Undocumented Opcode: ' + str(opcode))
def heal(self): """Change status from processing to unprocess for all rows in case of failed attempt """ connect = Connection(self.dbname) cursor = connect.cursor() cursor.execute(""" UPDATE "data" SET status = "unprocessed" WHERE status = "processing" """) connect.commit() connect.close()
def get_unprocessed_count(self): """Return number of unprocessed rows """ connect = Connection(self.dbname) cursor = connect.cursor() cursor.execute(""" SELECT count(*) FROM "data" WHERE status = "unprocessed" """) unprocessed = cursor.fetchone() connect.commit() connect.close() return unprocessed[0]
def insert_input_data(self, inseam, trunk, forearm, arm, thigh, leg, notch, height): """Insert data into a table """ connect = Connection(self.dbname) cursor = connect.cursor() cursor.execute( """ INSERT INTO data ( inseam, trunk, forearm, arm, thigh, lower_leg, sternal_notch, height, status ) VALUES ( :inseam, :trunk, :forearm, :arm, :thigh, :leg, :notch, :height, :status ) """, { 'inseam': inseam, 'trunk': trunk, 'forearm': forearm, 'arm': arm, 'thigh': thigh, 'leg': leg, 'notch': notch, 'height': height, 'status': 'unprocessed', }) connect.commit() connect.close()
def add_user_to_table(connection: sqlite3.Connection, data: tuple) -> NoReturn: """ Функция добавления информации в имеющуюся таблицу в базе данных аргументы: connection -- соединение с базой данных data -- данные пользователя вида (username, password (уже хешированный)) """ try: if connection is None: raise WrongDataBaseNameException( 'Неправильное название базы данных.') cursor = connection.cursor() cursor.execute("""INSERT INTO users VALUES (?, ?)""", data) connection.commit() except sqlite3.Error as e: print(e) finally: connection.close()
def convert_weatherdata(con: sqlite3.Connection, input_filepath: str): """convert the weather csv file""" # create the table con.execute(""" CREATE TABLE weather ( id INTEGER PRIMARY KEY, datetime TEXT, temperature REAL, humidity INTEGER, wind_speed REAL, precipitation REAL, condition TEXT, fog INTEGER, rain INTEGER, snow INTEGER, hail INTEGER, thunder INTEGER ) """) # for an `n` item list, there are `n-1` splits # in order to merge the first two fields, split from the right and # leave out the final split, i.e., have `n-1-1` splits with con, open(input_filepath, 'rt') as f: # skip the header and determine the number of splits split_len = len(next(f).split(',')) - 2 for i, line in enumerate(f, start=1): # make datetime a single field line = [ l.replace(',', ' ') for l in line.rsplit(',', maxsplit=split_len) ] # replace empty string with NULL line = [l if l != '' else None for l in line] con.execute( "INSERT INTO weather VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", (i, *line), ) con.close()