def create_app(test=False): # Hack to create auth # Create objects used in project app = Flask(__name__) app.config.update(SECRET_KEY=setup.secret_key) api = Api(app) # Register urls register_app_urls(app) register_api_urls(api) # Set up used objects if test: app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' else: app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///manul.db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db.init_app(app) if test: db.create_engine('sqlite:///manul.db') else: db.create_engine('sqlite:///test.db') app.app_context().push() core.auth.login_manager.init_app(app) core.auth.login_manager.login_view = 'login' return app
def append_datasource(cls, dataset, user_schema_name, user_table_name): engine = db.create_engine('{}/{}'.format( os.environ.get('DATABASE_URI_USER'), user_schema_name)) dataset.to_sql(name=user_table_name, con=engine, index=False, if_exists='append')
def db(app, request): """ A fixture to create a test DB for our session and destroy it after running the tests :param Flask app: :param Request request: :return SQLAlchemy: """ def teardown(): _db.session.remove() _db.drop_all() _db.app = app _db.create_all() """ The following will execute an SQL file to insert some fixture data in the database to make the tests ready. I don't really like this way of populating data. I think it's better to insert fixture data programmatically which is described in yaml / json files. """ with open(os.path.join(app.root_path, 'tests/fixtures/test_data.sql')) as f: engine = _db.create_engine(app.config.get('SQLALCHEMY_DATABASE_URI'), {}) connection = engine.connect() connection.execute(text(f.read())) connection.close() request.addfinalizer(teardown) return _db
def new_datasource(self, dataset_dataframe): db.engine.execute('USE {};'.format('data_science')) db.engine.execute('CREATE SCHEMA IF NOT EXISTS {};'.format( self.user_schema_name)) db.session.commit() engine = db.create_engine('{}/{}'.format( os.environ.get('DATABASE_URI_USER'), self.user_schema_name)) connection = engine.connect() dataset_dataframe.to_sql(name=self.user_table_name, con=engine, index=False, if_exists='append') connection.execute('USE {};'.format(self.user_schema_name)) result = connection.execute('''SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = "{}" AND table_schema = "{}" AND column_name = "id";'''.format(self.user_table_name, self.user_schema_name)) result_exists = [row for row in result] if result_exists == []: connection.execute('USE {};'.format(self.user_schema_name)) connection.execute( 'ALTER TABLE {} ADD id INT PRIMARY KEY AUTO_INCREMENT FIRST;'. format(self.user_table_name)) connection.close() db.engine.execute('USE {};'.format('data_science')) db.session.commit()
def get_columns(cls, user_schema_name, user_table_name): query = "select COLUMN_NAME, DATA_TYPE from INFORMATION_SCHEMA.COLUMNS where TABLE_SCHEMA='{}' AND TABLE_NAME='{}'".format( user_schema_name, user_table_name) engine = db.create_engine('{}/{}'.format( os.environ.get('DATABASE_URI_USER'), user_schema_name)) result = db.engine.execute(query) col = [{'column_name': row[0], 'data_type': row[1]} for row in result] return col
def create_table(): database = "EmergencyT04" engine = db.create_engine("mysql+mysqlconnector://root:mysql@localhost", {}) # connect to server existing_databases = engine.execute("SHOW DATABASES") existing_databases = [d[0] for d in existing_databases] if database not in existing_databases: engine.execute("CREATE DATABASE iF NOT EXISTS {}".format(database)) db.create_all()
def get_datasource_pages(self, page_size): engine = db.create_engine('{}/{}'.format( os.environ.get('DATABASE_URI_USER'), self.user_schema_name)) dataset = pd.read_sql_query( """ SELECT TABLE_ROWS FROM information_schema.tables WHERE table_schema=DATABASE() AND table_name='{}'; """.format(self.user_table_name), engine) return (ceil(int(dataset['TABLE_ROWS'][0]) / int(page_size)), int(dataset['TABLE_ROWS'][0]))
def get_datasource_per_page(self, page, page_size): page = int(page) - 1 page_size = int(page_size) engine = db.create_engine('{}/{}'.format( os.environ.get('DATABASE_URI_USER'), self.user_schema_name)) dataset = pd.read_sql_query( """ SELECT * FROM {} WHERE id > {} ORDER BY id LIMIT {} """.format(self.user_table_name, str(page * page_size), str(page_size)), engine) return dataset
import uuid from www.models import User, Blog, Comment import www.constant as Constant import smtplib """ if 'app' in locals().keys(): logging.info("app already exits") else: app = Flask(__name__) """ if db.engine: logging.info("db engine already exists") else: db.create_engine(user='******', password='******', database='yecheng') class APIError(StandardError): def __init__(self, error, data='', message=''): super(APIError, self).__init__(message) self.error = error self.data = data self.message = message class APIValueError(APIError): ''' Indicate the input value has error or invalid. The data specifies the error field of input form. ''' def __init__(self, field, message=''):
from db import db, database_file engine = db.create_engine(database_file) connection = engine.connect() metadata = db.MetaData() data = db.Table('data', metadata, autoload=True, autoload_with=engine) def get_markets(): query = db.select([data]) ResultProxy = connection.execute(query) ResultSet = ResultProxy.fetchall() markets = set() for r in ResultSet: markets.add(r[0]) return markets def get_years(): query = db.select([data]) ResultProxy = connection.execute(query) ResultSet = ResultProxy.fetchall() years = set() for r in ResultSet: years.add(r[1]) return years def get_seasons(): return data.columns.keys()[2:]
def delete_datasource(cls, user_schema_name, user_table_name): engine = db.create_engine('{}/{}'.format( os.environ.get('DATABASE_URI_USER'), user_schema_name)) connection = engine.connect() connection.execute('DROP TABLE {};'.format(user_table_name)) connection.close()
def get_datasource(self): engine = db.create_engine('{}/{}'.format( os.environ.get('DATABASE_URI_USER'), self.user_schema_name)) dataset = pd.read_sql_table(self.user_table_name, engine) return dataset