def save_game_data(): db.drop_all() db.create_all() loaded_data = load_data('match_details') game_list = [] for data in loaded_data: game_id = data['game_id'] country = data['country'] sport = data['sport_name'] league = data['league_name'] away_team = data['away_team'] home_team = data['home_team'] starting_time = extract_time(data['starting_time']) odds_url = data['odds_url'] game = Game(game_id=game_id, country=country, sport=sport, league=league, away_team=away_team, home_team=home_team, starting_time=starting_time, odds_url=odds_url) game_list.append(game) db.session.add_all(game_list) db.session.commit() db.session.close()
def create_db(): """ This will drop database tables and recreate them. Run this command once. """ db.drop_all() db.create_all() db.session.commit()
def recreate_db(): """ Recreates a database. This should only be used once when there's a new database instance. This shouldn't be used when you migrate your database. """ db.drop_all() db.create_all() db.session.commit()
def remake_db(really=False): if not really: print("You should probably use 'python manage.py db upgrade' instead.") print("If you really want to use remake_db, provide option --really.") print("") print("(See https://flask-migrate.readthedocs.org/en/latest/ for" " details.)") return 0 else: db.drop_all() db.create_all()
def init(): """Initializes the service.""" # Create ES indexes. es = Elasticsearch(app.config['ELASTICSEARCH_URL']) for key in ['ELASTICSEARCH_GLOSSARY', 'ELASTICSEARCH_SIMILARITY']: try: if config_name == 'test': es.indices.delete(index=app.config[key], ignore=[400, 404]) es.indices.create(index=app.config[key]) except TransportError as e: # ignore already existing index if e.error == 'resource_already_exists_exception': pass else: raise es.indices.put_mapping( doc_type='_doc', body=json.load(open('./elasticsearch/alegre_glossary.json')), index=app.config['ELASTICSEARCH_GLOSSARY'] ) es.indices.put_mapping( doc_type='_doc', body=json.load(open('./elasticsearch/alegre_similarity.json')), index=app.config['ELASTICSEARCH_SIMILARITY'] ) es.indices.close(index=app.config['ELASTICSEARCH_SIMILARITY']) es.indices.put_settings( body=json.load(open('./elasticsearch/alegre_similarity_settings.json')), index=app.config['ELASTICSEARCH_SIMILARITY'] ) es.indices.open(index=app.config['ELASTICSEARCH_SIMILARITY']) # Create database. with app.app_context(): if not database_exists(db.engine.url): create_database(db.engine.url) if config_name == 'test': db.drop_all() sqlalchemy.event.listen( db.metadata, 'before_create', DDL(""" CREATE OR REPLACE FUNCTION bit_count(value bigint) RETURNS integer AS $$ SELECT length(replace(value::bit(64)::text,'0','')); $$ LANGUAGE SQL IMMUTABLE STRICT; """) ) db.create_all()
def populate(): db.drop_all() db.create_all() db.session.commit() ''' Creates 3 users and 3 posts ''' user1 = User( username='******', password='******', registered_on=datetime.datetime.utcnow() ) save_changes(user1) post1 = upload_new_post(dict( image=image1, description='this is my favorite screenshot' ), current_user=user1) user2 = User( username='******', password='******', registered_on=datetime.datetime.utcnow() ) save_changes(user2) post2 = upload_new_post(dict( image=image2, description='what a cool pic' ), current_user=user2) user3 = User( username='******', password='******', registered_on=datetime.datetime.utcnow() ) save_changes(user3) post3 = upload_new_post(dict( image=image1, description='look at this cool shot i took!' ), current_user=user3)
def run_test_server(): """Used by the phantomjs tests to run a live testing server""" # running the server in debug mode during testing fails for some reason app.config['DEBUG'] = False app.config['TESTING'] = True port = app.config['TESTSERVER_PORT'] # Don't use the production database but a temporary test database. app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///test.db" db.drop_all() db.create_all() db.session.commit() # Add a route that allows the test code to shutdown the server, this allows # us to quit the server without killing the process thus enabling coverage # to work. app.add_url_rule('/shutdown', 'shutdown', shutdown, methods=['POST', 'GET']) main.use_log_file_handler() app.run(port=port, use_reloader=False, threaded=True) db.session.remove() db.drop_all()
def clear_db(): db.drop_all() db.create_all() db.session.commit()
def tearDownClass(cls): db.session.remove() db.drop_all() log.debug("DB destroyed.")
def tearDown(self): """ remove all tables """ db.session.remove() db.drop_all()
def recreate_db(): db.drop_all() db.create_all() db.session.commit()
def tearDown(self): with app.app_context(): db.session.remove() db.drop_all()
def tearDown(self): """ remove & clean database """ db.session.remove() db.drop_all()
def drop_db(): """Drop the database removing all its contents""" db.drop_all()
def tearDown(self): db.session.remove() db.drop_all() self.app_context.pop()
def db_drop(): db.drop_all() print('Database Dropped!')
def tearDown(self): """ Wird nach jeder Methode ausgeführt """ db.session.remove() db.drop_all()
def create_db(): db.drop_all() db.create_all()
def tearDown(self): db.session.remove() db.drop_all()
def drop_db(): """Drops the db tables.""" db.drop_all()