Esempio n. 1
0
from src.db import get_connection
import json

CONNECTION = get_connection()


def teardown_db():
    """Drops currently used database"""
    cursor = CONNECTION.cursor()
    with open("config/config.json") as data:
        cfg = json.load(data)
    cursor.execute("DROP DATABASE {}".format(cfg["database"]["db"]))
    # cursor.execute("DROP DATABASE %s", (db_name[0]),)
Esempio n. 2
0
 class Meta:
     database = get_connection()
Esempio n. 3
0
def conn():
    g.dbconn = get_connection()
Esempio n. 4
0
def create_database():
    with open('config/config.json') as data:
        cfg = json.load(data)
    db = get_connection()
    cursor = db.cursor()
    cursor.execute("CREATE DATABASE {} CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci".format(cfg['database']['db']))
Esempio n. 5
0
from time import time
import psutil
import pyodbc

from src.db import get_connection
"""
https://github.com/mkleehammer/pyodbc/issues/802
"""

# cuando 'varchar_limit' es '100', la memoria es constante, cuando es 'MAX', la memoria incrementa con cada iteracion
varchar_limit = "MAX"
print(f"using varchar({varchar_limit})")

print(f"pyodbc {pyodbc.version}")
cnxn = get_connection()
process = psutil.Process()


def print_status(msg, _t0=None):
    s = f"{msg}: "
    mb = process.memory_info().vms / 1048576
    s += f"vms {mb:0.1f} MiB"
    if _t0:
        _diff = time() - _t0
        s += f", {_diff*1000:,.2f} ms - {_diff:0.1f} seg."
    print(s)


print_status("startup")
num_rows = 10_000
data = [(i + 1, f"col{i + 1:06}", 3.14159265 * (i + 1))