def resetDB(self): self.db.session.remove() self.db.drop_all() models.create_tables(self.app) fixtures.install(self.app, *fixtures.all_data) self.db = models.init_app(self.app)
def process_staged_urls(): """Query download all staged URLs, Update Catalog and Document""" engine = db_connect() create_tables(engine) Session = sessionmaker(bind=engine) session = Session() # for event in session.query(EventStage).all(): # copy_event_from_stage(event) for url_record in session.query(UrlStage).all(): # print(url_record.url) place_record = session.query(Place). \ filter(Place.ocd_division_id == url_record.ocd_division_id).first() event_record = session.query(Event). \ filter(Event.ocd_division_id == url_record.ocd_division_id, Event.record_date == url_record.event_date, Event.name == url_record.event).first() print(f'place id: {place_record.id}\n event_id:{event_record.id}') catalog_entry = session.query(Catalog). \ filter(Catalog.url_hash == url_record.url_hash).first() # Document already exists in catalog if catalog_entry: catalog_id = catalog_entry.id print(f'catalog_id---------{catalog_id}') document = map_document( url_record, place_record.id, event_record.id, catalog_id) save_record(document) print("existing in catalog adding reference to document") else: print("Does not exist") # Download and save document catalog = Catalog( url=url_record.url, url_hash=url_record.url_hash, location='placeholder', filename=f'{url_record.url_hash}.pdf' ) doc = Media(url_record) # download result = doc.gather() # Add to doc catalog if result: catalog.location = result catalog_id = save_record(catalog) # Add document reference document = map_document( url_record, place_record.id, event_record.id, catalog_id) doc_id = save_record(document) print(f'Added {url_record.url_hash} doc_id: {doc_id}')
def import_typeform(filename): import csv fields = ["id"] + typeform.FIELDS + [ "start_time", "submit_time", "network_id" ] vals = [] with open(filename, newline='') as csvfile: reader = csv.DictReader(csvfile, fields) for row in reader: row["first_hack"] = row["first_hack"] == "1" row["sponsor"] = row["sponsor"] == "1" row["swag"] = row["swag"] == "1" row["code_of_conduct"] = row["code_of_conduct"] == "1" row["terms_and_conditions"] = row["terms_and_conditions"] == "1" vals.append(row) create_tables() with db.database.atomic(): (Registration.insert_many(vals[1:]).on_conflict( "update", conflict_target=[Registration.id], preserve=Registration.hacker_discord).execute()) click.echo("Uploaded {0} entries to the Registration table".format( len(vals) - 1))
def create(): ''' Creates/bootstraps the database ''' from models import create_tables print(INFO+'%s : Creating the database ...' % current_time()) create_tables() print(INFO+'%s : Bootstrapping the database ...' % current_time()) import setup.bootstrap
def test_process_inserts(self): models.delete_tables() models.create_tables() new_playgrounds, revision_group = data.process_changes( 'tests/data/test_inserts.json') self.assertEqual(len(new_playgrounds), 1) playground = Playground.select().where( Playground.id == new_playgrounds[0].id)[0] self.assertEqual(playground.name, 'NEW NAME') revisions = Revision.select().where( Revision.revision_group == revision_group) self.assertEqual(revisions.count(), 1) revision = revisions[0] self.assertEqual(revision.playground.id, playground.id) log = revision.get_log() self.assertEqual(len(log), 1) self.assertEqual(log[0]['field'], 'name') self.assertEqual(log[0]['from'], '') self.assertEqual(log[0]['to'], 'NEW NAME') headers = revision.get_headers() self.assertEqual(headers['content_length'], '18') self.assertEqual(headers['host'], 'localhost') cookies = revision.get_cookies()
def __init__(self): """ Initialize database connection and create tables. """ engine = db_connect() create_tables(engine) self.Session = sessionmaker(bind=engine)
def test_process_inserts(self): models.delete_tables() models.create_tables() new_playgrounds, revision_group = data.process_changes('tests/data/test_inserts.json') self.assertEqual(len(new_playgrounds), 1) playground = Playground.select().where(Playground.id == new_playgrounds[0].id)[0] self.assertEqual(playground.name, 'NEW NAME') revisions = Revision.select().where(Revision.revision_group == revision_group) self.assertEqual(revisions.count(), 1) revision = revisions[0] self.assertEqual(revision.playground.id, playground.id) log = revision.get_log() self.assertEqual(len(log), 1) self.assertEqual(log[0]['field'], 'name') self.assertEqual(log[0]['from'], '') self.assertEqual(log[0]['to'], 'NEW NAME') headers = revision.get_headers() self.assertEqual(headers['content_length'], '18') self.assertEqual(headers['host'], 'localhost') cookies = revision.get_cookies()
def initdb(create=False, drop=False, force=False, audit=True, verbose=False): """Initialize the database.""" if drop and force: models.drop_tables() if verbose: logger = logging.getLogger("peewee") if logger: logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler()) try: models.create_tables() except Exception: app.logger.exception("Failed to create tables...") if audit: app.logger.info("Creating audit tables...") models.create_audit_tables() super_user, created = models.User.get_or_create( email="*****@*****.**", roles=models.Role.SUPERUSER) if not created: return super_user.name = "The University of Auckland" super_user.confirmed = True super_user.save() org, _ = models.Organisation.get_or_create( name="The University of Auckland", tuakiri_name="University of Auckland", confirmed=True)
def __init__(self): # Enable logging logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) self.logger = logging.getLogger(__name__) self.TIME_TO_REPOST = dict() self.fochi_replyes = ["Ко ко ко", "Воу воу, палехчи паринь", "Смотрите все! Fochi бушует!", "Слушайте его, он херни не скажет", "Круто сказанул, ыыыыы"] # boobs_channels = ['@superboobs', '@boobsChannel', '@boobsblog', '@BestTits'] self.boobs_channels = ['@superboobs', '@BestTits', '@boobsblog'] self.boobs_regexp = re.compile('(сис(ек|ьки|ечки|и|яндры))|(ти(тьки|течки|тюли|ти|тяндры))', re.IGNORECASE) self.ass_channels = ['@bigasianasses', '@BestAss'] self.ass_regexp = re.compile('(поп(ка|ец))|(жоп(ка|ища|уля))', re.IGNORECASE) # init main logic self.bot_db = db.sql_database() models.create_tables(self.bot_db.get_engine()) self.updater = Updater(config.TOKEN) self.dp = self.updater.dispatcher self.dp.addHandler(CommandHandler("start", self.start)) self.dp.addHandler(CommandHandler("users", self.users)) self.dp.addHandler(CommandHandler("groups", self.groups)) self.dp.addHandler(CommandHandler("help", self.help)) self.dp.addHandler(MessageHandler([Filters.text], self.echo)) self.dp.addErrorHandler(self.error)
def create_app(): app = Flask(__name__) cors = CORS(app) basedir = os.path.abspath(__file__) app.config.update( dict( SECRET_KEY="powerful secretkey", WTF_CSRF_SECRET_KEY="a csrf secret key", # SQLALCHEMY_DATABASE_URI='mysql+mysqlconnector://root:test@notification_db/notification', SQLALCHEMY_DATABASE_URI= f'mysql+mysqlconnector://root:test@flexigym-notification-api-db/notification', #SQLALCHEMY_DATABASE_URI="sqlite:///" + os.path.join(basedir + 'flexigym-notification_api.db'), SQLALCHEMY_TRACK_MODIFICATIONS=False, JSON_SORT_KEYS=False)) models.init_app(app) models.create_tables(app) app.register_blueprint(notification_api_blueprint) swagger_ui_blueprint = get_swaggerui_blueprint( SWAGGER_URL, API_URL, ) app.register_blueprint(swagger_ui_blueprint, url_prefix=SWAGGER_URL) return app
def create(): ''' Creates/bootstraps the database ''' from models import create_tables print(INFO + '%s : Creating the database ...' % current_time()) create_tables() print(INFO + '%s : Bootstrapping the database ...' % current_time()) import setup.bootstrap
def create(options, *args, **kwargs): ''' Creates/bootstraps the database ''' from libs.ConfigManager import ConfigManager # Sets up logging from models import create_tables, boot_strap print(INFO + '%s : Creating the database ...' % current_time()) create_tables() print(INFO + '%s : Bootstrapping the database ...' % current_time()) boot_strap()
def bootstrap(): with settings(warn_only=True): local('dropdb breaking') local('createdb breaking') models.create_tables() data.load_test_event() data.load_test_facts()
def getfeeds(account): """ Download and save articles from feeds """ create_tables(account) with session_scope(account) as session: helper = FeedSetHelper(session, account) helper.get_pages_from_feeds()
def __init__(self): """ Initializes database connection and sessionmaker. Creates nfl_te_games_2015 table if it doesn't exist. """ engine = db_connect() create_tables(engine) self.Session = sessionmaker(bind=engine)
def __init__(self): """ Initializes database connection and sessionmaker. Creates nfl_rosters_2015 table. """ engine = db_connect() create_tables(engine) self.Session = sessionmaker(bind=engine)
def setUp(self): self.app = mvp.create_app('./settings/test.cfg') models.create_tables(self.app.engine) Session = sessionmaker(bind=self.app.engine) self.db = Session()
def __init__(self): """ Initializes database connection and sessionmaker. Creates all tables. """ engine = db_connect() create_tables(engine) self.Session = sessionmaker(bind=engine)
def create(options, *args, **kwargs): ''' Creates/bootstraps the database ''' from libs.ConfigManager import ConfigManager # Sets up logging from models import create_tables, boot_strap print(INFO+'%s : Creating the database ...' % current_time()) create_tables() print(INFO+'%s : Bootstrapping the database ...' % current_time()) boot_strap()
def __init__(self): """ Initializes database connection and sessionmaker. Creates tables. """ self.engine = db_connect() create_tables(self.engine) self.Session = sessionmaker(bind=self.engine)
def __dbinit__(self): ''' Initializes the SQLite database ''' logging.info("Initializing SQLite db ...") if not os.path.exists(DBFILE_NAME): logging.info("Creating SQLite tables") dbConn = sqlite3.connect(DBFILE_NAME) dbConn.close() create_tables()
def bootstrap(): with settings(warn_only=True): local("dropdb breaking") local("createdb breaking") models.create_tables() data.load_test_event() data.load_test_facts()
def init_db(request): create_tables() init_data.init_all() def fin(): # 销毁函数, 测试函数退出时执行 drop_tables() request.addfinalizer(fin)
def __init__(self): ''' initializes the database connections and sessionmaker creates all tables ''' engine = db_connect() create_tables(engine) self.Session = sessionmaker(bind=engine)
def on_novo_menu_item_activate(self, widget): self.file_chooser.set_action(Gtk.FileChooserAction.SAVE) response = self.file_chooser.run() if response == Gtk.ResponseType.OK: self.filename = self.file_chooser.get_filename() models.init(self.filename) models.open() models.create_tables() self.file_chooser.hide()
def main(): longpoll = VkLongPoll(API) create_tables() clean_up = DBCleanUp(CLEAN_UP_INTERVAL) print("STARTED") for event in longpoll.listen(): if event.type == VkEventType.MESSAGE_NEW: if event.to_me: make_bot_response(event)
def create(): ''' Creates/bootstraps the database ''' from libs.ConfigManager import ConfigManager # Sets up logging from models import create_tables, boot_strap print(INFO + '%s : Creating the database ... ' % current_time()) create_tables() if len(argv) == 3 and (argv[2] == 'bootstrap' or argv[2] == '-b'): print('\n\n\n' + INFO + \ '%s : Bootstrapping the database ... \n' % current_time()) boot_strap()
def create_app() -> Flask: if not os.path.exists(DATABASE): create_tables() app = Flask(__name__) app.config.from_pyfile('config.py') views.init_app(app) return app
def create(): """ Creates/bootstraps the database """ from libs.ConfigManager import ConfigManager # Sets up logging from models import create_tables, boot_strap print(INFO + "%s : Creating the database ... " % current_time()) create_tables() if len(argv) == 3 and (argv[2] == "bootstrap" or argv[2] == "-b"): print("\n\n\n" + INFO + "%s : Bootstrapping the database ... \n" % current_time()) boot_strap()
def create(): ''' Creates/bootstraps the database ''' from libs.ConfigManager import ConfigManager # Sets up logging from models import create_tables, boot_strap print(INFO+'%s : Creating the database ...' % current_time()) create_tables() print(INFO+'%s : Bootstrapping the database ...' % current_time()) try: boot_strap() except: print(WARN+"%s : Database has already been bootstrapped" % current_time())
def test_db(self): """tests weather data can be stored in an empty database""" print("testing database operations") models.drop_tables() models.create_tables() lottery = models.Lottery.create(name="test") self.assertIsNotNone(lottery) owner = models.Participant.create(name="test") self.assertIsNotNone(owner) ticket = models.Ticket.create(lottery=lottery, owner=owner) self.assertIsNotNone(ticket)
def context(): db = SqliteDatabase(':memory:') create_tables(db) with db: db.create_tables([ Task, ]) Task.create(name='First Task') Task.create(name='Second Task') yield
def __init__(self): """Initializes database connection and sessionmaker Create: users table reviews table violations table restaurants table """ engine = db_connect() # engine.echo = True #prints out SQL we are loading create_tables(engine) self.Session = sessionmaker(bind=engine)
def init_test_data(self, create=True, drop=True): from models import create_tables, drop_tables if drop: logging.info("drop table ...") drop_tables() if create: logging.info("create table ...") create_tables() logging.info("init test data ...") from tests import init_data init_data.init_all()
def __init__(self, url): self.url = url engine = db_connect() try: create_tables(engine) except: e = sys.exc_info()[0] log.error("Unable to create tables. %s" % e) self.Session = sessionmaker(bind=engine) if not self.model: log.warning("BaseStore instantiated without model class variable") raise NotImplementedError("Subclasses must set model class!")
def get_players(): """Gets player listings from db""" engine = models.db_connect() models.create_tables(engine) Session = sessionmaker(bind=engine) session = Session() players = {} for player in session.query(models.NFL_Player_2015): players[str(player.name)] = 0 session.close() return players
def syncdb(options, reset=False, create=False): from models import DB_FILE, database, create_tables, populate_tables import os if reset: os.unlink(DB_FILE) database.connect() create_tables()() populate_tables() elif create: database.connect() create_tables()() else: dbshell(options)
def init_db_con(): """Initializes db connection session""" try: engine = db_connect() create_tables(engine) Session = sessionmaker(bind=engine) session = Session() logger.info('SUCCESS: Connection to mysql instance succeeded') return session except Exception as e: logger.error('ERROR: Could not connect to mysql instance.') logger.error(e) sys.exit()
def main(): logging.basicConfig(level=logging.INFO, format='%(asctime)s :: %(levelname)s :: %(message)s') os.chdir(os.path.dirname(sys.argv[0])) models.create_tables() # Chargement des paramètres de recherche depuis le fichier JSON with open("parameters.json", encoding='utf-8') as parameters_data: parameters = json.load(parameters_data) # get proxies proxies = [] if parameters['use-proxy']: proxies = get_proxies() # Recherche et insertion en base if "leboncoin" in parameters['ad-providers']: try: logging.info("Retrieving from leboncoin") LeBonCoinSearch(parameters, proxies).search() except ConnectionError: logging.error("Error while retrieving from leboncoin") if "pap" in parameters['ad-providers']: try: logging.info("Retrieving from pap") PAPSearch(parameters, proxies).search() except ConnectionError: logging.error("Error while retrieving from pap") if "logic_immo" in parameters['ad-providers']: try: logging.info("Retrieving from logic_immo") LogicImmoSearch(parameters, proxies).search() except ConnectionError: logging.error("Error while retrieving from logic_immo") if "seloger" in parameters['ad-providers']: try: logging.info("Retrieving from seloger") SeLogerSearch(parameters, proxies).search() except ConnectionError: logging.error("Error while retrieving from seloger") logging.info("Posting ads to trello ") # Envoi des annonces sur Trello posted = TrelloModule().post() logging.info("%s new ads posted to Trello" % posted)
def import_devpost(filename): import csv fields = ["table", "name", "url"] with open(filename, newline='') as csvfile: reader = csv.DictReader(csvfile, fields) vals = [row for row in reader] create_tables() with db.database.atomic(): (Project.insert_many(vals[1:]).on_conflict( "update", conflict_target=[Project.table], preserve=[Project.active, Project.mu, Project.sigma_sq]).execute()) click.echo( "Uploaded {0} projects to the Projects table".format(len(vals) - 1))
def fetch(): if request.args.get('n') is None: abort(404) l = list(map(int, request.args.get('n').split("-"))) ''' Case where n=single update Zero-indexing is taken care of. /fetch?n=1 should mean that row 0 is inserted. lowerLimit = 0, Upper = 1 in case of l=[1], l[1] will throw an index error. upper_limit is L[0] which is 1. lower_limit is L[0] = which is l[0]-1 = 0 so df.iloc[0:1] for /fetch?n=3-10, lower_limit=3, upper_limit= 11 so df.iloc[3:11] ''' if len(l) > 1: upper_limit = l[1] + 1 lower_limit = l[0] else: upper_limit = l[0] lower_limit = l[0] - 1 filename = 'data.csv' path = os.path.join(app.config['UPLOAD_FOLDER'], filename) print(lower_limit) print(upper_limit) api_result = create_tables(con, meta, session, path, lower_limit, upper_limit) return jsonify({'data': api_result})
def __init__(self, db_url, **settings): if db_url is not None: engine = create_engine(db_url, connect_args={'sslmode': 'require'}) create_tables(engine) self.db = scoped_session(sessionmaker(bind=engine)) handlers = [ (r"/", MainHandler), ] + repo_crud.handlers + github_handlers.handlers settings['template_path'] = os.path.join(os.path.dirname(__file__), "templates") settings['static_path'] = os.path.join(os.path.dirname(__file__), "static") super().__init__(handlers, **settings)
def validate_args(): if args.clear_db: drop_tables(db) create_tables(db) log.info("Drop and create complete.") exit(0) if args.list: print "--- Authorization keys: ---" query = Authorizations.select(Authorizations.token, Authorizations.name) for token in query: print ("Token: " + token.token + "\tAssigned: " + token.name) exit(0) if args.generate: query = Authorizations.select(Authorizations.token).where( Authorizations.name == args.generate) if len(query): print "Token already exists for " + args.generate exit(0) print "Generating authorization token for " + args.generate lst = [random.choice(string.ascii_letters + string.digits) for n in xrange(32)] token = "".join(lst) authorization = {'token': token, 'name': args.generate} bulk_upsert(Authorizations, {0: authorization}, db) print ("Authorization token for " + args.generate + " is: " + token) exit(0) if args.revoke: query = Authorizations.delete().where( Authorizations.token == args.revoke) if query.execute(): print "Token revoked." else: print "No token found." exit(0) if args.no_gyms: args.no_gymdetail = True args.ignore_pokemon = Set([int(i) for i in args.ignore_pokemon])
def create_app(): app = Flask(__name__) app.config.update( dict(SECRET_KEY="", WTF_CSRF_SECRET_KEY="", SQLALCHEMY_DATABASE_URI= 'mysql+mysqlconnector://root:test@product_db/product', SQLALCHEMY_TRACK_MODIFICATIONS=False)) models.init_app(app) models.create_tables(app) app.register_blueprint(product_api_blueprint) swaggerui_blueprint = get_swaggerui_blueprint(SWAGGER_URL, API_URL) app.register_blueprint(swaggerui_blueprint, url_prefix=SWAGGER_URL)
def log_list(): if no_settings_file: return render_template( 'errors.html', errors=['No settings file (settings.py) importable.']) # Check if the database has the required tables and create if not. tables = models.db.get_tables() if len(tables) == 0: models.create_tables() vehicles = models.Vehicle.select().order_by( models.Vehicle.date_first_heard.desc()) system = models.SystemStatistic.select().first() for vehicle in vehicles: vehicle.show_tracker_link = False # If we have coords, get the haversine distance. if vehicle.first_latitude and vehicle.first_longitude and vehicle.last_latitude and vehicle.last_longitude: l1 = (vehicle.first_latitude, vehicle.first_longitude) l2 = (vehicle.last_latitude, vehicle.last_longitude) vehicle.flight_distance_km = haversine.haversine(l1, l2) # If we have last_heard, calculate the time since we last heard. if vehicle.date_last_heard and vehicle.date_last_heard: vehicle.flight_time_minutes = ( vehicle.date_last_heard - vehicle.date_first_heard).total_seconds() / 60 # Show the tracker link if we've heard this vehicle in the last hour. if (datetime.utcnow() - vehicle.date_last_heard ).total_seconds() < 60 * 60: # 1 Hour. vehicle.show_tracker_link = True vehicle.tracker_url = utils.get_tracker_url( vehicle.last_latitude, vehicle.last_longitude, vehicle.vehicle_id) return render_template('list.html', vehicles=vehicles, sondehub_url_prefix=settings.sondehub_url_prefix, system=system, auto_rx_url=settings.auto_rx_url)
def __init__(self): engine = db_connect() create_tables(engine) self.Session = sessionmaker(bind=engine) session = self.Session() # make sure the root of the product type tree exists ptt = session.query(ProductTypeTree).filter_by(lft=0).first() if ptt is None: ptt = ProductTypeTree(name='Root', lft=0, rgt=1) try: session.add(ptt) session.commit() except: session.rollback() raise finally: session.close()
def test_add_playground(self): models.delete_tables() models.create_tables() response = self.client.post(url_for('insert_playground'), data={'name': 'NEW PLAYGROUND'}) self.assertEqual(response.status_code, 302) redirect_url = app_config.S3_BASE_URL self.assertEqual(response.headers['Location'].split('?')[0], redirect_url + '/search.html') with open('data/changes.json') as f: inserts = json.load(f) self.assertEqual(len(inserts), 1) self.assertEqual(inserts[0]['action'], 'insert') self.assertEqual(inserts[0]['playground']['name'], 'NEW PLAYGROUND')
def test_add_playground(self): models.delete_tables() models.create_tables() response = self.client.post(url_for('insert_playground'), data={ 'name': 'NEW PLAYGROUND' }) self.assertEqual(response.status_code, 302) redirect_url = app_config.S3_BASE_URL self.assertEqual(response.headers['Location'].split('?')[0], redirect_url + '/search.html') with open('data/changes.json') as f: inserts = json.load(f) self.assertEqual(len(inserts), 1) self.assertEqual(inserts[0]['action'], 'insert') self.assertEqual(inserts[0]['playground']['name'], 'NEW PLAYGROUND')
def __init__(self): """ Initializes database connection and sessionmaker Create tables """ # this is weird, sorry. # allows access to file from this level and notebooks level fname = "authentication.json" if os.path.isfile(fname): with open(fname) as f: auth = json.load(f) else: with open("../" + fname) as f: auth = json.load(f) self.engine = mod.db_connect(auth["database"]) mod.create_tables(self.engine) Session = sessionmaker(bind=self.engine) self.session = Session()
def __init__(self, db_url, **settings): if db_url is not None: engine = create_engine(db_url, echo=True, connect_args={'sslmode': 'require'}) create_tables(engine) self.db = scoped_session(sessionmaker(bind=engine)) handlers = [ (r"/", MainHandler), URLSpec(r"/repos", RepoList, name='repo_list'), URLSpec(r"/repos/add", RepoAdd, name='repo_add'), URLSpec(r"/repos/(\d+)", RepoEdit, name='repo_edit'), ] settings['template_path'] = os.path.join(os.path.dirname(__file__), "templates") settings['static_path'] = os.path.join(os.path.dirname(__file__), "static") super().__init__(handlers, **settings)
def main(): if 'test' not in settings.KCING_DB: logger.error( 'Database for testing should contain the word "test" in it') logger.error( 'Please specify a testing database passing KCING_DB env var when running tests' ) return -1 # Deletes current db to make sure we're working with a fresh one try: os.unlink(settings.KCING_DB) except OSError: pass os.mknod(settings.KCING_DB) init() create_tables() rc = unittest.main() end() return rc
def initialize(): try: create_tables() except: pass
# Create a parser for each low-level module module_basename = module.__name__.split('.')[-1] module_parser = command_subparsers.add_parser(module_basename) # Add default arguments for each fetcher (database configuration) module_parser.add_argument( '--db', default='sqlite', help="which type of database to use (postgres, sqlite). Defaults to sqlite." ) module_parser.add_argument( '--db-config', help="Name of file containing database configuration." ) # Each module defines additional arguments module.configure_parser(module_parser) module_parser.set_defaults(func=module.main) # Parse arguments args = parser.parse_args() # Initialize database init_database(args.db, config_filename=args.db_config) create_tables() # Invoke the main program that was specified by the submodule if args.func is not None: args.func(**vars(args))
def __init__(self): engine = db_connect() create_tables(engine) self.Session = sessionmaker(bind=engine) delete_from_model(Habrahabr, engine, self.Session)
def setUpClass(self): models.create_tables(self.app) fixtures.install(self.app, *fixtures.all_data) self.db = models.init_app(self.app)
def initialize_db(db_name='scatdat.db'): global session engine = models.get_engine(db_name) session = models.get_session(engine) models.create_tables(engine)
location=url, title=soup.title.text if soup.title is not None else '' ) code_nodes = soup.select(selector) for node in code_nodes: save_snippet(node, page) page_index += 1 pbar.update(page_index) pbar.finish() if __name__ == '__main__': parser = argparse.ArgumentParser(description="Fetch snippets and save code and text") args = parser.parse_args() if Page.table_exists(): print "Data exists! Truncate tables to continue? (y/n): ", decision = raw_input() if decision == 'y': print "Truncating tables." models.drop_tables() else: raise SystemExit("Leaving tables alone. Now exiting.") models.create_tables() main()
ProductLocation, Stock, ) from sqlalchemy.orm import sessionmaker from sqlalchemy import desc from database_settings import DATABASE import random import datetime import time import string if __name__ == "__main__": engine = db_connect() create_tables(engine) Session = sessionmaker(bind=engine) session = Session() stores = session.query(Store).all() for store in stores: print "Stocking store %d" % store.id product_results = session.execute( """ SELECT DISTINCT upc FROM vendor_purchase WHERE store_id = %d ORDER BY upc""" % store.id