def database(): db.connect() db.create_tables([Account, Transaction]) yield db db.close() os.remove('test.sqlite3')
def __init__(self): db.create_tables() self.tags = [] self.recent_posts = [] super(PaaSApplication, self).__init__( handlers=routers.route, **settings) self.flash_cache()
import click from database import db as sqldb from wiktionary import Wiktionary, Declensions from frequencies import Frequencies from models import Form, Lemma, Translation, MODELS wikitionary = Wiktionary("articles.xml") frequencies = Frequencies("frequency.csv") d = Declensions(wikitionary) sqldb.drop_tables(MODELS) sqldb.create_tables(MODELS) known_failures = [ 'Mið-Afríkulýðveldið', 'mar', 'endurnýjanleg orka', 'Garðabær' ] with open("failures.txt", "w") as out: failures = [] count = 0 with click.progressbar(wikitionary.pages, label="populating") as pages: for page in pages: for entry in page.get_entries(): try: if entry.name in known_failures: continue if not entry.is_icelandic: continue
from JTF2 import JTF2 from CSOR import CSOR from database import db if __name__ == "__main__": db.create_tables([JTF2, CSOR])
import database from database import db from database import Member from database import Message from database import Thanks db.connect() db.create_tables([Member, Message, Thanks]) db.close()
def main(): """ Main function in batch script, where eternal while loop is running. Script uses modules dataMining and database to retrieve and store data. """ # Logger logHandler = TimedRotatingFileHandler(os.path.join( os.path.dirname(__file__), "logs", "batch.log"), when="midnight") logFormatter = logging.Formatter('%(asctime)s: %(levelname)s; %(message)s') logHandler.setFormatter(logFormatter) logger = logging.getLogger('batch logger') logger.addHandler(logHandler) logger.setLevel(logging.DEBUG) # Signals def signal_handler(sig, frame): """ Method that handles received kill signals. """ logger.warning('Caught signal: %s, killing process.', sig) sys.exit(0) signal.signal(signal.SIGINT, signal_handler) # Search setup countries = tweetSearch.getCountries() words = tweetSearch.getSearchWords() # Database setup create_tables = False if not os.path.isfile(config.DATABASE_LOCATION): create_tables = True engine = db.init_db(config.DATABASE_LOCATION) if create_tables: db.create_tables(engine) db_session = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=engine)) logger.info('Started script (PID: %s).' % os.getpid()) def get_and_store_data(search_terms, search_function, save_function): """ Method used when getting and storing data. Method is made in a semi-generic manner, so that it the batch script can evolve into a dynamic script that can be tailored in the configuration file. """ skipped_terms = [] for terms in search_terms: results = None try: results = search_function(terms) except SearchEngineLimitError: logger.warning('Search engine limit exceeded,' ' sleeping for %s seconds.' % config.SLEEP_TIME) time.sleep(config.SLEEP_TIME) skipped_terms.append(terms) if results: # Save tweets logger.info('Found %s results when searching for %s.' % (len(results), ' and '.join(terms))) save_function(db_session, results, terms) if skipped_terms: get_and_store_data(skipped_terms, search_function, save_function) # Starting datamining loop while 1: try: search_terms = [(a, b) for b in words for a in countries] search_function = tweetSearch.getTweets save_function = db_functions.saveTweets get_and_store_data(search_terms, search_function, save_function) except: logger.exception('Exception caught!') raise
def create_tables(): with db: db.create_tables([Anime])
def migrate(): return success("migrated") if create_tables( request.args.get(db_arg)) else server_error()
logging.info("Tweet not found! :(") event_db = Event(event="withheld", user=User.get_or_create(user_id=str(data["status_withheld"]["user_id"]))[0], tweet=instance) event_db.save() if instance: # mark this tweet as deleted instance.is_withheld = True instance.save() # add tweet to job queue jobs_db = Job(tweet=instance) try: jobs_db.save() except IntegrityError: logging.error("Tweet is already marked for restoration") def on_error(self, status_code, data): logging.error("Error while processing stream: {}".format(status_code)) if __name__ == '__main__': logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s", level=logging.INFO) db.connect() db.create_tables([Tweet, User, Event, Job, ], safe=True) stream = init_detwtr_stream() stream.user() db.close()
def main(): """ Main function in batch script, where eternal while loop is running. Script uses modules dataMining and database to retrieve and store data. """ # Logger logHandler = TimedRotatingFileHandler(os.path.join( os.path.dirname(__file__), "logs", "batch.log"), when="midnight") logFormatter = logging.Formatter('%(asctime)s: %(levelname)s; %(message)s') logHandler.setFormatter(logFormatter) logger = logging.getLogger('batch logger') logger.addHandler(logHandler) logger.setLevel(logging.DEBUG) # Signals def signal_handler(sig, frame): """ Method that handles received kill signals. """ logger.warning('Caught signal: %s, killing process.', sig) sys.exit(0) signal.signal(signal.SIGINT, signal_handler) # Search setup countries = tweetSearch.getCountries() words = tweetSearch.getSearchWords() # Database setup create_tables = False if not os.path.isfile(config.DATABASE_LOCATION): create_tables = True engine = db.init_db(config.DATABASE_LOCATION) if create_tables: db.create_tables(engine) db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) logger.info('Started script (PID: %s).' % os.getpid()) def get_and_store_data(search_terms, search_function, save_function): """ Method used when getting and storing data. Method is made in a semi-generic manner, so that it the batch script can evolve into a dynamic script that can be tailored in the configuration file. """ skipped_terms = [] for terms in search_terms: results = None try: results = search_function(terms) except SearchEngineLimitError: logger.warning('Search engine limit exceeded,' ' sleeping for %s seconds.' % config.SLEEP_TIME) time.sleep(config.SLEEP_TIME) skipped_terms.append(terms) if results: # Save tweets logger.info('Found %s results when searching for %s.' % (len(results), ' and '.join(terms))) save_function(db_session, results, terms) if skipped_terms: get_and_store_data(skipped_terms, search_function, save_function) # Starting datamining loop while 1: try: search_terms = [(a, b) for b in words for a in countries] search_function = tweetSearch.getTweets save_function = db_functions.saveTweets get_and_store_data(search_terms, search_function, save_function) except: logger.exception('Exception caught!') raise
from database import db from models import Quote if __name__ == '__main__': DEBUG = config("DEBUG", default=False, cast=bool) logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.DEBUG if DEBUG else logging.INFO) token = config("TOKEN", cast=str) webhook_url = config('WEBHOOK_URL', cast=str) port = config('PORT', default=5000, cast=int) url = config('URL', default="127.0.0.1") workers = config('WORKERS', default=4, cast=int) private_key = config('PRIVATE_KEY', default=None) certificate = config('CERTIFICATE', default=None) logging.warning('Preparing database ...') db.create_tables((Quote, ), safe=True) logging.warning('Starting...') bot = Bot(token, url, webhook_url, port=port, workers=workers, private_key=private_key, certificate=certificate, debug=DEBUG) bot.run()
def create_models(): db.create_tables([Person])
def test_saveTweets(): # TODO: Move to 'test setup' db.create_tables(engine) # TODO: Make test more complicated assert db_functions.saveTweets(db_session, tweets)
from database import db, Contact db.connect() db.create_tables([Contact])
def step1(db): db.create_tables([Config, Dump])
# ACIT 2515 - Assignment 4 # create_tables.py # Group 19 """ Creates the table structure of the database """ from database import db from books import Books from multimedia import Multimedia if __name__ == "__main__": db.create_tables([Books, Multimedia])
from database import db, Contact, User, Role, UserRoles db.connect() db.create_tables([Contact, User, Role, UserRoles], safe=True)
import bcrypt import json from database import db, User, Business, Service, OwnerBusiness, Reservation, ReservationService if __name__ == '__main__': with open("./settings.json") as f: settings = json.load(f) SALT = settings["SALT"].encode("utf-8") db.connect() db.create_tables([ User, Business, Service, OwnerBusiness, Reservation, ReservationService ]) print("- Creazione amministratore: ") username = input("Inserisci username: "******"Inserisci email: ") fullname = input("Inserisci nome e cognome: ") password1 = input("Inserisci password: "******"utf-8"), SALT) admin = User.create(username=username, email=email, fullname=fullname, password=hashed, is_admin=True)
import database from database import db from database import Member from database import Message db.connect() db.create_tables([Member, Message]) db.close()
get_prefix, set_prefix, ) sentry_dsn = os.environ.get("SENTRY_DSN") if sentry_dsn: sentry_sdk.init( dsn=sentry_dsn, attach_stacktrace=True, shutdown_timeout=5, integrations=[AioHttpIntegration(), SqlalchemyIntegration()], release=f"morpheushelper@{VERSION}", ) db.create_tables() async def fetch_prefix(_, message: Message) -> Iterable[str]: if message.guild is None: return "" return await get_prefix(), f"<@!{bot.user.id}> ", f"<@{bot.user.id}> " bot = Bot(command_prefix=fetch_prefix, case_insensitive=True, description=translations.description) def get_owner() -> Optional[User]: owner_id = os.getenv("OWNER_ID")
from database import db, User, Role, UserRoles, Contact,Tweeb db.connect() db.create_tables([User, Role, UserRoles, Contact, Tweeb], safe=True)
def create(): db.create_tables([User]) db.create_tables([Pixels])
from markups import * import sys import os from dotenv import load_dotenv, find_dotenv from telebot import TeleBot from plotters import GraphPlotter, FigurePlotter from database import db, User, State, GraphPlotterModel, FigurePlotterModel from utils import is_number, parse_shape if not load_dotenv(find_dotenv()): sys.exit(1) db.create_tables([User, GraphPlotterModel, FigurePlotterModel]) bot = TeleBot(os.getenv('BOT_TOKEN')) @bot.message_handler(commands=['start']) def start(msg): user = User.get_or_none(User.uid == msg.from_user.id) if not user: User.create(uid=msg.from_user.id) resp = "Welcome! I will help you with plotting graphs and geometric figures. ✍️" else: resp = "Welcome back! Try plotting something! 🎓" bot.send_message( msg.from_user.id, resp,