def initialize(profile='dev', config=None): if profile == 'dev': database = SqliteDatabase(':memory:') else: if config.name == 'sqlite': database = PooledSqliteExtDatabase(config.db_file, max_connections=32, stale_timeout=600, pragmas={ 'journal_mode': 'wal', 'cache_size': -1 * 64000, # 64MB 'foreign_keys': 1, 'ignore_check_constraints': 0, 'synchronous': 0}) else: raise ValueError(f'Not supported {config.name}.') database_proxy.initialize(database)
from playhouse.migrate import * from wtfpeewee.orm import model_form import wtforms, os, inspect, csv, json, pygsheets, time from os import environ, path from dotenv import load_dotenv from flask_wtf import FlaskForm from wtforms import Form, FieldList, FormField, SelectField, HiddenField, validators basedir = path.abspath(path.dirname(path.dirname(__file__))) load_dotenv(path.join(basedir, '.env')) # Builds absolute path relative to this models.py file so other directories (like bots) can find the same database when importing db_path = path.join(basedir + '/shared/', 'mlg.s') db = PooledSqliteExtDatabase(db_path, check_same_thread=False, pragmas={'foreign_keys': 1}, max_connections=30, stale_timeout=500) # Access Google Sheets for updates secret_path = basedir + '/shared/client_secret.json' gSheet = pygsheets.authorize(service_file=secret_path) p_master_log = gSheet.open_by_key(environ.get('P_MASTER_LOG')) prev_pas_sh = p_master_log.worksheet_by_title("All_PAs_1-5") s5_pas_sh = p_master_log.worksheet_by_title("All_PAs_6") persons_sh = p_master_log.worksheet_by_title("Persons") #test_sh = p_master_log.worksheet_by_title("Test") def main(): while True:
c.execute('SELECT * FROM Stocks3 WHERE symbol=?', t) purchases = [('2006-03-28', 'BUY', 'IBM', 1000, 45.00, ''), ('2006-04-05', 'BUY', 'MSFT', 1000, 72.00, ''), ('2006-04-06', 'SELL', 'IBM', 500, 53.00, ''), ('2006-09-09', 'SELL', 'MSFT', 900, 54.00, ''), ('2006-04-05', 'BUY', 'MSFT', 1000, 72.00, 'HARI')] c.executemany('INSERT INTO Stocks3 VALUES (?,?,?,?,?,?)', purchases) c.execute("ALTER TABLE Stocks3 ADD COLUMN name text") conn.commit() conn.close() db = PooledSqliteExtDatabase('pandu.db') class BaseModel(Model): class Meta: database = db class User(BaseModel): date = CharField() trans = CharField() symbol = CharField() qty = FloatField() price = FloatField()
from decouple import config from peewee import SqliteDatabase from playhouse.pool import PooledSqliteExtDatabase, PooledPostgresqlExtDatabase # db = SqliteDatabase(config('DATABASE_PATH', default='sentiment_analysis.db')) db = PooledSqliteExtDatabase( config('DATABASE_PATH', default='sentiment_analysis.db'), pragmas=[('journal_mode', 'wal')], max_connections=50, stale_timeout=3600, check_same_thread=False) # Caso utilize-se do postgresql como banco de dados # db = PooledPostgresqlExtDatabase( # 'database', # max_connections=32, # stale_timeout=300, # 5 minutes. # host='localhost', # user='******', # password='******')
''' 1、用户查询、创建、修改、删除、激活 2、工资查询、创建、修改、删除 ''' import os import json import datetime from peewee import * from playhouse.pool import PooledSqliteExtDatabase # db = SqliteDatabase("poet.db") db = PooledSqliteExtDatabase("poet.db", max_connections=8, stale_timeout=300) def db_add(act): def func(): db.connect() act() db.close() return func class BaseModel(Model): class Meta: database = db class Poet(BaseModel): ''' 1、用户创建、用户查询、用户修改、用户删除
"INSERT INTO OverlayIcon1 VALUES ('/home/harikumar/Desktop', 'syncing')") # Larger example that inserts many records at a time purchases = [ ('/home/harikumar/Desktop', 'syncerror'), ('/home/harikumar/Desktop', 'syncerror'), ('/home/harikumar/Desktop', 'syncdone'), ('/home/harikumar/Desktop', 'syncdone'), ] c.executemany('INSERT INTO OverlayIcon1 VALUES (?, ?)', purchases) conn.commit() conn.close() db = PooledSqliteExtDatabase('harikumar1.db') class BaseModel(Model): class Meta: database = db class User(BaseModel): path = CharField() status = CharField() User.create_table(True) # User.insert(path="/home/harikumar/Desktop", # status="syncerror").execute()
t = ('RHAT', ) c.execute('SELECT * FROM Stocks1 WHERE symbol=?', t) purchases = [('2006-03-28', 'BUY', 'IBM', 1000, 45.00), ('2006-04-05', 'BUY', 'MSFT', 1000, 72.00), ('2006-04-06', 'SELL', 'IBM', 500, 53.00), ('2006-09-09', 'SELL', 'MSFT', 900, 54.00)] c.executemany('INSERT INTO Stocks1 VALUES (?,?,?,?,?)', purchases) c.execute("ALTER TABLE Stocks1 ADD COLUMN name text") conn.commit() conn.close() db = PooledSqliteExtDatabase('stock.db') class BaseModel(Model): class Meta: database = db class User(BaseModel): date = CharField() trans = CharField() symbol = CharField() qty = FloatField() price = FloatField()
from decouple import config # from peewee import SqliteDatabase from playhouse.pool import PooledSqliteExtDatabase # , PooledPostgresqlExtDatabase db_name = config("DATABASE_PATH", default="covid19_tracker.db") path = os.getcwd().split("code")[0] path = f"{path}code" # db = SqliteDatabase(config('DATABASE_PATH', default='covid19_tracker.db')) db = PooledSqliteExtDatabase( f"{path}/{db_name}", pragmas={ "journal_mode": "wal", # WAL-mode. "cache_size": -64 * 1000, # 64MB cache. "foreign_keys": 1, "synchronous": 0, }, max_connections=150, stale_timeout=3600, check_same_thread=False, ) # Caso utilize-se do postgresql como banco de dados # db = PooledPostgresqlExtDatabase( # config("DATABASE_PATH", default="covid19_tracker.db"), # max_connections=32, # stale_timeout=300, # 5 minutes. # host='localhost', # user='******', # password='******')