def _sqlite_version(): return tuple( map( int, sqlite3.connect(":memory:").execute( "select sqlite_version()").fetchone()[0].split("."), ))
def get_database(): try: conn = pysqlite3.connect('db/phonebook.db') cursor = conn.cursor() return conn, cursor except FileNotFoundError: return False
def max_sql_variables(): """Get the maximum number of arguments allowed in a query by the current sqlite3 implementation. ESV amendment: Report that on CentOS the following error occurs: "sqlite3.OperationalError: too many terms in compound SELECT" This is another limit, likely lower: SQLITE_LIMIT_COMPOUND_SELECT Returns ------- int inferred SQLITE_MAX_VARIABLE_NUMBER """ db = sqlite3.connect(':memory:') cur = db.cursor() cur.execute('CREATE TABLE t (test)') low, high = 0, 100000 while (high - 1) > low: guess = (high + low) // 2 query = 'INSERT INTO t VALUES ' + ','.join( ['(?)' for _ in range(guess)]) args = [str(i) for i in range(guess)] try: cur.execute(query, args) except sqlite3.OperationalError as e: es = str(e) if "too many SQL variables" in es or "too many terms in compound SELECT" in es: high = guess else: raise else: low = guess cur.close() db.close() return low
def getConnection(self): # First get the database. This will create the path database = self.getDatabaseName() if not os.path.exists(database): # First run, create the table self.createOriginalDb(database) return pysqlite3.connect(database)
def detect_json1(conn=None): if conn is None: conn = sqlite3.connect(":memory:") try: conn.execute("SELECT json('{}')") return True except Exception: return False
def update_database_file(wallet_path: str) -> None: if wallet_path.endswith(DATABASE_EXT): raise DatabaseMigrationError("wallet path is not base path") db_path = wallet_path + DATABASE_EXT if not os.path.exists(db_path): raise DatabaseMigrationError("wallet database does not exist") db = sqlite3.connect(db_path) update_database(db) db.close()
def createOriginalDb(self, database): conn = pysqlite3.connect(database) c = conn.cursor() # Create table # ('AN', 1258570800.0, None, 4, 'sea_water_salinity', 'PSU', 38.9636, -76.4468, 0.0) c.execute('''CREATE TABLE obs (station text, obs_time integer , obs_value real, qc_code integer, varname text, units text, lat real, lon real, elevation real)''') # Save (commit) the changes conn.commit() # We can also close the connection if we are done with it. # Just be sure any changes have been committed or they will be lost. conn.close()
def increase_connection_pool(self) -> None: """adds 1 more connection to the pool""" self.SQLITE_CONN_POOL_SIZE += 1 # debug_text = traceback.format_stack() connection = sqlite3.connect(self._db_path, check_same_thread=False, isolation_level=None) connection.execute("PRAGMA busy_timeout=5000;") connection.execute("PRAGMA foreign_keys=ON;") # We do not enable journaling for in-memory databases. It resulted in 'database is locked' # errors. Perhaps it works now with the locking and backoff retries. if not self.is_special_path(self._db_path): self._ensure_journal_mode(connection) # self._debug_texts[connection] = debug_text self._connection_pool.put(connection)
def build_database(dbpath="cryptids.db"): conn = sqlite3.connect(dbpath) # Enable Spatialite conn.enable_load_extension(True) spatialite_found = False for extension in try_these: try: conn.load_extension(extension) conn.execute('select InitSpatialMetadata(1)') spatialite_found = True except: continue assert spatialite_found, 'Did not find spatialite - looked in {}'.format( repr(try_these)) # Create the database table conn.execute(''' create table cryptids ( id integer primary key, name text, wikipedia_url text, additional_url text, description text, copyright text, first_sighted integer, last_sighted integer ) ''') conn.execute( "SELECT AddGeometryColumn('cryptids', 'geom', 4326, 'MULTIPOLYGON', 2);" ) conn.execute("SELECT CreateSpatialIndex('cryptids', 'geom');") conn.execute( "SELECT AddGeometryColumn('cryptids', 'range', 4326, 'MULTIPOLYGON', 2);" ) conn.execute("SELECT CreateSpatialIndex('cryptids', 'range');") # Loop through and insert the records for filename in os.listdir(os.path.join(BASE_DIR, 'cryptids')): filepath = os.path.join(BASE_DIR, 'cryptids', filename) if filepath.endswith(".geojson"): id = filename.split(".")[0] insert_record(conn, id, json.load(open(filepath))) conn.commit() conn.close()
def create_database_file(wallet_path: str) -> None: if wallet_path.endswith(DATABASE_EXT): raise DatabaseMigrationError("wallet path is not base path") if 22 != MIGRATION_FIRST: raise DatabaseMigrationError( "constant MIGRATION_FIRST differs from local version") db_path = wallet_path + DATABASE_EXT if os.path.exists(db_path): raise DatabaseMigrationError("wallet database already exists") # Python sqlite bindings automatically enter a transaction which prevents the PRAGMA from # exiting, which is why we use no isolation level. db = sqlite3.connect(db_path, check_same_thread=False, isolation_level=None) db.execute(f"PRAGMA journal_mode=WAL;") create_database(db) db.close() update_database_file(wallet_path)
def max_sql_variables(self): low, high = 0, 100000 try: try: import pysqlite3 as sqlite3 except Exception as e: self.log.logger.warning( "Unable to load pysqlite3 will try with sqlite3") self.log.logger.warning(e) import sqlite3 db = sqlite3.connect(':memory:') cur = db.cursor() cur.execute('CREATE TABLE t (test)') while (high - 1) > low: guess = (high + low) // 2 query = 'INSERT INTO t VALUES ' + ','.join( ['(?)' for _ in range(guess)]) args = [str(i) for i in range(guess)] try: cur.execute(query, args) except sqlite3.OperationalError as e: if "too many SQL variables" in str(e): high = guess else: raise else: low = guess cur.close() db.close() return low except Exception as e: self.log.logger.warning( "Unable to load SQLite3 library binary, can't download backtrade to SQLite3" ) self.log.logger.warning(e) return 512
def __init__(self, dbname): self.con = sqlite.connect(dbname)
def pytest_report_header(config): return "SQLite: {}".format( sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0] )
import os import pysqlite3 data_path = os.path.expanduser('~')+r"\AppData\Local\Google\Chrome\User Data\Default" files = os.listdir(data_path) history_db = os.path.join(data_path, 'history') c = pysqlite3.connect(history_db) cursor = c.cursor() select_statement = "SELECT urls.url, urls.visit_count FROM urls, visits WHERE urls.id = visits.url;" cursor.execute(select_statement) results = cursor.fetchall()
import pysqlite3 import requests conn = pysqlite3.connect('db/phonebook5.db') cursor = conn.cursor() def remove_spaces(text): return text.replace(" ", "") def call_postcode_api(postcode): postcode_short = remove_spaces(postcode) endpoint = "https://api.postcodes.io/postcodes/" response = requests.get(endpoint + postcode_short) data = response.json() if response.status_code == 200: insert_query = "INSERT INTO postcodes(postcode, latitude, longitude) VALUES(?, ? , ?)" insert_values = (postcode, data['result']['latitude'], data['result']['longitude']) cursor.execute(insert_query, insert_values) conn.commit() return def check_postcode_exists(postcode): select_query = "SELECT * FROM postcodes WHERE postcode = ?" cursor.execute(select_query, (postcode, )) if not cursor.fetchone(): call_postcode_api(postcode)
def __init__(self,path): try: self.conn = pysqlite3.connect(path) self.cursor=self.conn.cursor() except: print("Unable to initialize database")
def db_init(db_path): db_path = rel2abs(db_path) connection = sqlite3.connect(db_path) connection.row_factory = sqlite3.Row cursor = connection.cursor() return cursor, connection
def getdb(): conn = pysqlite3.connect("db/phonebook5.db") cursor = conn.cursor() return cursor
import json import os from flask import Flask, render_template, session, redirect, url_for, request # tools that will make it easier to build on things import numpy as np import pandas as pd import pysqlite3 app = Flask(__name__) app.debug = True app.use_reloader = True app.config[ 'SECRET_KEY'] = 'hard to guess string for app security adgsdfsadfdflsdfsj' conn = pysqlite3.connect('books.sqlite') query = "SELECT * FROM Books;" df_books = pd.read_sql_query(query, conn) import pandas as pd import datetime from collections import Counter import nltk from nltk.tokenize import word_tokenize from string import punctuation from nltk.corpus import stopwords import string import numpy as np import operator import math from num2words import num2words
def get_connection() -> Cursor: conn = connect("scores.db", detect_types=PARSE_DECLTYPES) conn.row_factory = _dict_factory return conn.cursor()
def get_db(): db = getattr(g, "_database", None) if db is None: db = g._database = sqlite3.connect(DATABASE) db.row_factory = sqlite3.Row return db