def makeDbEngine(self): ''' function to establish engine with PostgreSQl database so that additional tables can be made ''' try: ## connect to Postgres dbname = self.getDbName() username = self.getUserName() print dbname print username ## create and set engine = create_engine('postgres://%s@localhost/%s'%(username, dbname)) self.setDbEngine(engine) ## test if it exists db_exist = database_exists(engine.url) if not db_exist: create_database(engine.url) db_exist = database_exists(engine.url) self.setDbExist(db_exist) return 0 except: return 1
def create_postgres_db(): dbname = 'beer_db_2' username = '******' mypassword = '******' ## Here, we're using postgres, but sqlalchemy can connect to other things too. engine = create_engine('postgres://%s:%s@localhost/%s'%(username,mypassword,dbname)) print "Connecting to",engine.url if not database_exists(engine.url): create_database(engine.url) print "Does database exist?",(database_exists(engine.url)) # load a database from CSV brewery_data = pd.DataFrame.from_csv('clean_data_csv/brewery_information_rescrape.csv') ## insert data into database from Python (proof of concept - this won't be useful for big data, of course) ## df is any pandas dataframe brewery_data.to_sql('breweries', engine, if_exists='replace') #dbname = 'beer_review_db' # load a database from CSV beer_data = pd.DataFrame.from_csv('clean_data_csv/beer_review_information_rescrape.csv') #engine_2 = create_engine('postgres://%s:%s@localhost/%s'%(username,mypassword,dbname)) #print "connecting to",engine.url #if not database_exists(engine_2.url): # create_database(engine_2.url) #print "Does database exist?",(database_exists(engine_2.url)) beer_data.to_sql('reviews',engine,if_exists='replace') print "database",dbname,"has been created" return
def create_findmyride_database(database_name): engine = create_engine('postgresql://%s:%s@localhost/%s'%('dianeivy', password, database_name)) print(engine.url) if not database_exists(engine.url): create_database(engine.url) print(database_exists(engine.url)) return engine
def book_uri(request): name = request.param if name and database_exists(name): drop_database(name) yield name if name and database_exists(name): drop_database(name)
def createdb(): print "Connecting to %s" % settings.SQLALCHEMY_DATABASE_URI engine = create_engine(settings.SQLALCHEMY_DATABASE_URI) if settings.DROP_DB_ON_RESTART and database_exists(engine.url): print "Dropping old database... (because DROP_DB_ON_RESTART=True)" drop_database(engine.url) if not database_exists(engine.url): print "Creating databases..." create_database(engine.url)
def create_database(dbname): #create a database with name "dbname" using lordluen ad username. #dbname = 'legislatr' username = '******' engine = create_engine('postgres://%s@localhost/%s'%(username,dbname)) print(engine.url) if not database_exists(engine.url): create_database(engine.url) print(database_exists(engine.url)) return
def initialize(re_createTable= False): if re_createTable : if not database_exists(engine.url): create_database(DATABASE.url) print(database_exists(engine.url)) Base.metadata.drop_all(DATABASE, checkfirst = True) Base.metadata.create_all(DATABASE, checkfirst = True)
def new_book_USD(request): name = request.param if name and database_exists(name): drop_database(name) with create_book(uri_conn=name, currency="USD", keep_foreign_keys=False) as b: yield b if name and database_exists(name): drop_database(name)
def read_user_features(): ## create a database (if it doesn't exist) if not database_exists(local_weave_pair.url): create_database(local_weave_pair.url) print(database_exists(local_weave_pair.url)) # connect: con = psycopg2.connect(database = 'weave_pair', user = '******') # query: sql_query = """ SELECT * FROM user_features_combine; """ user_features = pd.read_sql_query(sql_query,con) return user_features
def book_db_config(request): from piecash.core.session import build_uri sql_backend, db_config = request.param name = build_uri(**db_config) if sql_backend != "sqlite_in_mem" and database_exists(name): drop_database(name) yield db_config if sql_backend != "sqlite_in_mem" and database_exists(name): drop_database(name)
def scan(): if not database_exists(Engine.url): display_failure('database does not exist.') sys.exit(1) inspector = reflection.Inspector.from_engine(Engine) if not inspector.get_table_names(): display_failure('no table(s) were found.') sys.exit(1) with session_scope() as session: q1 = session.query(DNSList) q2 = session.query(IPRange) if not(session.query(q1.exists()).scalar() and session.query(q2.exists()).scalar()): display_failure( 'scan requires records in both `dns_list` and `ip_range`.') sys.exit(1) banner() display_info('starting scan ...') Scanner().scan(session) return 0
def __init__(self, chembl_version=20): self.chembl_version = chembl_version if not database_exists(self.database_url): self.load_chembl() self.engine = create_engine(self.database_url)
def app(request): """The Flask API (scope = Session).""" config.DB_NAME = DB_NAME DATABASE_URI = config.DATABASE_URI.format(**config.__dict__) if not database_exists(DATABASE_URI): create_database(DATABASE_URI) print "Test Database: %s" % DATABASE_URI # Config the app _app.config["SQLALCHEMY_DATABASE_URI"] = DATABASE_URI _app.config["SQLALCHEMY_ECHO"] = True # Toggle SQL Alchemy output _app.config["DEBUG"] = True _app.config["TESTING"] = True # Establish an application context before running the tests. ctx = _app.app_context() ctx.push() # Initialize a null cache cache.config = {} cache.init_app(_app) def teardown(): ctx.pop() request.addfinalizer(teardown) return _app
def create_ctfd(ctf_name="CTFd", name="admin", email="*****@*****.**", password="******", setup=True): app = create_app('CTFd.config.TestingConfig') url = make_url(app.config['SQLALCHEMY_DATABASE_URI']) if url.drivername == 'postgres': url.drivername = 'postgresql' if database_exists(url): drop_database(url) create_database(url) with app.app_context(): app.db.create_all() if setup: with app.app_context(): with app.test_client() as client: data = {} r = client.get('/setup') # Populate session with nonce with client.session_transaction() as sess: data = { "ctf_name": ctf_name, "name": name, "email": email, "password": password, "nonce": sess.get('nonce') } client.post('/setup', data=data) return app
def clean_db(ctx: click.Context): """Removes Postgres database.""" db_uri = make_url(ctx.obj['db_uri']) db_uri_str = db_uri.__to_string__() if database_exists(db_uri): logging.info(f'Cleaning "{db_uri_str}" database.') drop_database(db_uri)
def init_data(): from imports import ( Widget,Article,Page, User,Setting,Type, Template,Tag,Role, Category,Block,Profile, ContactMessage) """Fish data for project""" if prompt_bool('Do you want to kill your db?'): if squ.database_exists(db.engine.url): squ.drop_database(db.engine.url) try: db.drop_all() except: pass try: squ.create_database(db.engine.url) db.create_all() except: pass user = User.query.filter(User.email=='*****@*****.**').first() if user is None: user = User(username='******', email='*****@*****.**', password='******') user.save()
def dropDatabase(dbName): """Drop specified database.""" config = CONFIG_DB connectString = "postgresql://{}:{}@{}:{}/{}".format(config["username"], config["password"], config["host"], config["port"], dbName) if sqlalchemy_utils.database_exists(connectString): sqlalchemy_utils.drop_database(connectString)
def add_db(): # pragma: no cover db_url = config['service']['db_uri'] global engine engine = create_engine(db_url) if database_exists(engine.url): print('!!! DATABASE ALREADY EXISTS !!!') return False print() print('!!! DATABASE NOT DETECTED !!!') print() try: confirm = input('Create database designated in the config file? [Y/n]') or 'Y' except KeyboardInterrupt: confirm = '' print() if confirm.strip() != 'Y': print('Not createing DB. Exiting.') return False create_database(engine.url) return True
def setup(): print(app.config['SQLALCHEMY_DATABASE_URI']) engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI']) if database_exists(engine.url): drop_database(engine.url) create_database(engine.url) engine.execute('create extension if not exists fuzzystrmatch')
def init_db(self): """ Initializes the database connection based on the configuration parameters """ db_type = self.config['db_type'] db_name = self.config['db_name'] if db_type == 'sqlite': # we can ignore host, username, password, etc sql_lite_db_path = os.path.join(os.path.split(CONFIG)[0], db_name) self.db_connection_string = 'sqlite:///{}'.format(sql_lite_db_path) else: username = self.config['username'] password = self.config['password'] host_string = self.config['host_string'] self.db_connection_string = '{}://{}:{}@{}/{}'.format(db_type, username, password, host_string, db_name) self.db_engine = create_engine(self.db_connection_string) # If db not present AND type is not SQLite, create the DB if not self.config['db_type'] == 'sqlite': if not database_exists(self.db_engine.url): create_database(self.db_engine.url) Base.metadata.bind = self.db_engine Base.metadata.create_all() # Bind the global Session to our DB engine global Session Session.configure(bind=self.db_engine)
def drop_db(app): from project.core.db import db from sqlalchemy_utils import database_exists, drop_database if database_exists(db.engine.url): print '====> Drop database' drop_database(db.engine.url) else: print '====> database not exist'
def create_db(app): from project.core.db import db from sqlalchemy_utils import database_exists, create_database if not database_exists(db.engine.url): print '====> Create database' create_database(db.engine.url) else: print '====> database exist'
def clear(**kwargs): """Clear the specified names from the specified databases. This can be highly destructive as it destroys tables and when all names are removed from a database, the database itself. """ database = kwargs.pop('database', False) expression = lambda target, table: table.drop(target) test = lambda x, tab: not database_exists(x.url) or not tab.exists(x) # TODO: Iterate through all engines in name set. if database and database_exists(engine['default'].url): drop_database(engine['default'].url) clear_cache() op(expression, reversed(metadata.sorted_tables), test=test, primary='clear', secondary='drop', **kwargs)
def create_db(username,dbname,dbpassword): ''' Returns a tuple (<bool>,database_engine_handle), such that the user can check to see if the database was created sucessfully, and if so, then access th sql_alchemy engine via the database_engine_handle ''' # Here, we're using postgres, but sqlalchemy can connect to other things too. engine = create_engine('postgres://%s:%s@localhost/%s'%(username,dbpassword,dbname)) print "Connecting to",engine.url if not database_exists(engine.url): create_database(engine.url) else: drop_database(engine.url) create_database(engine.url) database_exists_check = database_exists(engine.url) print "Database created successfully?:",database_exists_check return (database_exists_check,engine)
def create_database(dbname): """ Will create a new database. One of the earlier functions, so it creates it's own connection engine. Be cautious, it uses the default user (generally set to 'postgres'). INPUT: dbname = name of database (str) OUTPUT: None """ #create a database with name "dbname" using postgres and USERNAME. engine = create_engine('postgres://%s@localhost/%s'%(USERNAME,dbname)) print(engine.url) if not database_exists(engine.url): create_database(engine.url) print(database_exists(engine.url)) return
def createDatabase(dbName): """Create specified database if it doesn't exist.""" config = CONFIG_DB connectString = "postgresql://{}:{}@{}:{}/{}".format( config["username"], config["password"], config["host"], config["port"], dbName ) if not sqlalchemy_utils.database_exists(connectString): sqlalchemy_utils.create_database(connectString)
def drop(): """ Drop the database if it exists :return: """ app.logger.debug('Dropping the database!') if database_exists(db.engine.url): drop_database(db.engine.url) app.logger.error('Database does not exists!')
def setUpClass(cls): cls.engine = create_engine('postgresql+psycopg2://monitor@localhost/monitor_test') if not database_exists(cls.engine.url): create_database(cls.engine.url, template='template_postgis') model.create_database(cls.engine, drop=True) cls.monitor = StatusMonitor(cls.engine) cls.monitor.read_expected_csv(os.path.join(test_dir, 'data', 'expected-rates.csv'))
def create_sweography_db(): engine = create_engine(SQLALCHEMY_DATABASE_URI) if database_exists(engine.url): drop_database(engine.url) create_database(engine.url) ##creates all tables in database Base.metadata.create_all(engine)
def setUp(self): self.app = self.create_app() self.db = DB(engine,session,meta) import sqlalchemy_utils as squ if squ.database_exists(self.db.engine.url): squ.drop_database(self.db.engine.url) squ.create_database(self.db.engine.url) meta.bind = self.db.engine meta.create_all()
def create_app(config='CTFd.config.Config'): app = Flask(__name__) with app.app_context(): app.config.from_object(config) app.jinja_loader = ThemeLoader(os.path.join(app.root_path, 'themes'), followlinks=True) from CTFd.models import db, Teams, Solves, Challenges, WrongKeys, Keys, Tags, Files, Tracking url = make_url(app.config['SQLALCHEMY_DATABASE_URI']) if url.drivername == 'postgres': url.drivername = 'postgresql' if url.drivername.startswith('mysql'): url.query['charset'] = 'utf8mb4' # Creates database if the database database does not exist if not database_exists(url): if url.drivername.startswith('mysql'): create_database(url, encoding='utf8mb4') else: create_database(url) # This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in # This is mostly so we can force MySQL's charset app.config['SQLALCHEMY_DATABASE_URI'] = str(url) # Register database db.init_app(app) # Register Flask-Migrate migrate.init_app(app, db) # Alembic sqlite support is lacking so we should just create_all anyway if url.drivername.startswith('sqlite'): db.create_all() else: if 'alembic_version' not in db.engine.table_names(): # This creates tables instead of db.create_all() # Allows migrations to happen properly migrate_upgrade() app.db = db cache.init_app(app) app.cache = cache version = utils.get_config('ctf_version') if not version: # Upgrading from an unversioned CTFd utils.set_config('ctf_version', __version__) if version and (StrictVersion(version) < StrictVersion(__version__)): # Upgrading from an older version of CTFd print("/*\\ CTFd has updated and must update the database! /*\\") print("/*\\ Please backup your database before proceeding! /*\\") print("/*\\ CTFd maintainers are not responsible for any data loss! /*\\") if input('Run database migrations (Y/N)').lower().strip() == 'y': migrate_stamp() migrate_upgrade() utils.set_config('ctf_version', __version__) else: print('/*\\ Ignored database migrations... /*\\') exit() if not utils.get_config('ctf_theme'): utils.set_config('ctf_theme', 'original') from CTFd.views import views from CTFd.challenges import challenges from CTFd.scoreboard import scoreboard from CTFd.auth import auth from CTFd.admin import admin, admin_statistics, admin_challenges, admin_pages, admin_scoreboard, admin_keys, admin_teams from CTFd.utils import init_utils, init_errors, init_logs init_utils(app) init_errors(app) init_logs(app) app.register_blueprint(views) app.register_blueprint(challenges) app.register_blueprint(scoreboard) app.register_blueprint(auth) app.register_blueprint(admin) app.register_blueprint(admin_statistics) app.register_blueprint(admin_challenges) app.register_blueprint(admin_teams) app.register_blueprint(admin_scoreboard) app.register_blueprint(admin_keys) app.register_blueprint(admin_pages) from CTFd.plugins import init_plugins init_plugins(app) return app
def create_schema(): engine = create_engine(application.config['SQLALCHEMY_DATABASE_URI']) if not database_exists(engine.url): create_database(engine.url) engine.dispose()
def exists(self): return database_exists(self._url)
def create_app(config='CTFd.config.Config'): app = CTFdFlask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) with app.app_context(): app.config.from_object(config) theme_loader = ThemeLoader(os.path.join(app.root_path, 'themes'), followlinks=True) app.jinja_loader = theme_loader from CTFd.models import db, Teams, Solves, Challenges, WrongKeys, Keys, Tags, Files, Tracking url = make_url(app.config['SQLALCHEMY_DATABASE_URI']) if url.drivername == 'postgres': url.drivername = 'postgresql' if url.drivername.startswith('mysql'): url.query['charset'] = 'utf8mb4' # Creates database if the database database does not exist if not database_exists(url): if url.drivername.startswith('mysql'): create_database(url, encoding='utf8mb4') else: create_database(url) # This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in # This is mostly so we can force MySQL's charset app.config['SQLALCHEMY_DATABASE_URI'] = str(url) # Register database db.init_app(app) # Register Flask-Migrate migrate.init_app(app, db) # Alembic sqlite support is lacking so we should just create_all anyway if url.drivername.startswith('sqlite'): db.create_all() else: if len(db.engine.table_names()) == 0: # This creates tables instead of db.create_all() # Allows migrations to happen properly migrate_upgrade() elif 'alembic_version' not in db.engine.table_names(): # There is no alembic_version because CTFd is from before it had migrations # Stamp it to the base migration if confirm_upgrade(): migrate_stamp(revision='cb3cfcc47e2f') run_upgrade() else: exit() app.db = db app.VERSION = __version__ cache.init_app(app) app.cache = cache update_check(force=True) version = utils.get_config('ctf_version') # Upgrading from an older version of CTFd if version and (StrictVersion(version) < StrictVersion(__version__)): if confirm_upgrade(): run_upgrade() else: exit() if not version: utils.set_config('ctf_version', __version__) if not utils.get_config('ctf_theme'): utils.set_config('ctf_theme', 'core') from CTFd.views import views from CTFd.challenges import challenges from CTFd.scoreboard import scoreboard from CTFd.auth import auth from CTFd.admin import admin, admin_statistics, admin_challenges, admin_pages, admin_scoreboard, admin_keys, admin_teams from CTFd.utils import init_utils, init_errors, init_logs init_utils(app) init_errors(app) init_logs(app) app.register_blueprint(views) app.register_blueprint(challenges) app.register_blueprint(scoreboard) app.register_blueprint(auth) app.register_blueprint(admin) app.register_blueprint(admin_statistics) app.register_blueprint(admin_challenges) app.register_blueprint(admin_teams) app.register_blueprint(admin_scoreboard) app.register_blueprint(admin_keys) app.register_blueprint(admin_pages) from CTFd.plugins import init_plugins init_plugins(app) return app
def init_db(): global engine, Base, session if not database_exists(engine.url): create_database(engine.url) Base.metadata.create_all() populate_privleges(Base, session)
app.config['UPLOADED_IMAGES_URL'] = 'templates/images/' admin_emai = 'djamelsbargoud' admin_password = '******' mail = Mail() mail.init_app(app) sess = Session() db.init_app(app) images = UploadSet('images', IMAGES) configure_uploads(app, (images)) patch_request_class(app, 16 * 1024 * 1024) admin = Admin(app, template_mode='bootstrap3') if not database_exists(DB_URI): with app.app_context(): db.drop_all() db.create_all() for i in range(1, 10): db.session.add( Product(name=f"Robe {i}", size='XL', price=1500 + (i * 10))) db.session.add( Category(name=f"categorie de Robe {i}", size='XL', price=1500)) db.session.add(User(name="Djamel")) db.session.add(User(name="Yahya")) db.session.commit() """ route for frent end """
# Read from the configuration file. config = configparser.ConfigParser() config.read('config.ini') config_auth = config['AUTH'] user = config_auth['PostgreSQL_User'] password = config_auth['PostgreSQL_Password'] host = config_auth['PostgreSQL_Host'] db = config_auth['PostgreSQL_DBName'] postgre_db_addr = "postgresql://%s:%s@%s:5432/%s" % (user, password, host, db) engine = create_engine(postgre_db_addr, client_encoding='utf8') return engine while True: try: engine = connect() if not database_exists(engine.url): create_database(engine.url) Base.metadata.create_all(engine) break except Exception: print("[x] Auth Service PostgreSQL Not Ready Yet...")
def __init__(self) -> None: if not database_exists(ENGINE.url): self.make_database()
def _DB_conn(user, passwd, db): conn = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.format( user, passwd, db)) if not database_exists(conn.url): create_database(conn.url) return conn
def setUpModule(): if not database_exists(models.engine.url): create_database(models.engine.url)
def recreate_db(db_url): if database_exists(url=db_url): drop_database(url=db_url) create_database(url=db_url)
def auto_migrate(engine: sqlalchemy.engine.Engine, models: [sqlalchemy.sql.schema.MetaData]): """ Compares a database with a list of defined orm models and applies the diff. Prints executed SQL statements to stdout. Based on `alembic automigrations`_, but doesn't require intermediate migration files. Use with care, does not work in many cases. Args: engine: the database to use models: A list of orm models Returns: True in case of no failures .. _alembic automigrations: http://alembic.zzzcomputing.com/en/latest/autogenerate.html """ import alembic.runtime.migration import alembic.autogenerate import sqlalchemy_utils try: # create database if it does not exist if not sqlalchemy_utils.database_exists(engine.url): sqlalchemy_utils.create_database(engine.url) print(f'Created database "{engine.url}"\n') except Exception as e: print(f'Could not access or create database "{engine.url}":\n{e}', file=sys.stderr) return False # merge all models into a single metadata object combined_meta_data = MetaData() for model in models: model.metadata.tables[model.__tablename__].tometadata( combined_meta_data) # create diff between models and current db and translate to ddl ddl = [] with engine.connect() as connection: output = io.StringIO() diff_context = alembic.runtime.migration.MigrationContext( connection.dialect, connection, opts={}) autogen_context = alembic.autogenerate.api.AutogenContext( diff_context, opts={ 'sqlalchemy_module_prefix': 'sqlalchemy.', 'alembic_module_prefix': 'executor.' }) execution_context = alembic.runtime.migration.MigrationContext( connection.dialect, connection, opts={ 'output_buffer': output, 'as_sql': True }) # needed for the eval below executor = alembic.operations.Operations(execution_context) # Step 1: create a diff between the meta data and the data base # operations is a list of MigrateOperation instances, e.g. a DropTableOp operations = alembic.autogenerate.produce_migrations( diff_context, combined_meta_data).upgrade_ops.ops for operation in operations: # Step 2: autogenerate a python statement from the operation, e.g. "executor.drop_table('bar')" renderer = alembic.autogenerate.renderers.dispatch(operation) statements = renderer(autogen_context, operation) if not isinstance(statements, list): statements = [statements] for statement in statements: # Step 3: "execute" python statement and get sql from buffer, e.g. "DROP TABLE bar;" try: eval(statement) except Exception as e: print('statement: ' + statement) raise (e) ddl.append(output.getvalue()) output.truncate(0) output.seek(0) with engine.begin() as connection: for statement in ddl: sys.stdout.write('\033[1;32m' + statement + '\033[0;0m') connection.execute(statement) return True
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ # settings['sqlalchemy.url'] = settings['cn.dialect'] + quote_plus(settings['sqlalchemy.url']) # settings['sqlalchemy.url'] = settings['cn.dialect'] + quote_plus(settings['sqlalchemy.url']) engine = engine_from_config(settings, 'sqlalchemy.') print (engine.url) if not database_exists(engine.url) : print("la base n'existe pas") create_database(engine.url) else : print("la base existe") dbConfig['url'] = settings['sqlalchemy.url'] # """ Creation repertoire pour photos """ # dbConfig['bspipes'] = {} # dbConfig['bspipes']['folder'] = settings['bspipes.folder'] # # if(os.path.exists(dbConfig['bspipes']['folder']) ): # try : # os.access( dbConfig['bspipes']['folder'], os.W_OK) # print("folder : %s exist" %(dbConfig['bspipes']['folder'])) # except : # print("app cant write in this directory ask your admin %s" %(dbConfig['bspipes']['folder']) ) # raise # #declenché erreur # else: # print ("folder %s doesn't exist we gonna try to create it" %(dbConfig['bspipes']['folder'])) # try: # os.makedirs(dbConfig['bspipes']['folder']) # print("folder created : %s" %(dbConfig['bspipes']['folder'])) # except OSError as exception: # if exception.errno != errno.EEXIST: # raise # # dbConfig['bspipes']['folderReports'] = settings['bspipes.folderReports'] # if(os.path.exists(dbConfig['bspipes']['folderReports']) ): # try : # os.access( dbConfig['bspipes']['folderReports'], os.W_OK) # print("folder : %s exist" %(dbConfig['bspipes']['folderReports'])) # except : # print("app cant write in this directory ask your admin %s" %(dbConfig['bspipes']['folderReports']) ) # raise # #declenché erreur # else: # print ("folder %s doesn't exist we gonna try to create it" %(dbConfig['bspipes']['folderReports'])) # try: # os.makedirs(dbConfig['bspipes']['folderReports']) # print("folder created : %s" %(dbConfig['bspipes']['folderReports'])) # except OSError as exception: # if exception.errno != errno.EEXIST: # raise """ Configuration de la connexion à la BDD """ DBSession.configure(bind=engine) Base.metadata.bind = engine Base.metadata.create_all(engine) Base.metadata.reflect(views=True) """ Configuration du serveur pyramid""" print(settings) config = Configurator(settings=settings , autocommit=True) config.include('pyramid_chameleon') # Add renderer for datetime objects json_renderer = JSON() json_renderer.add_adapter(datetime, datetime_adapter) json_renderer.add_adapter(Decimal, decimal_adapter) json_renderer.add_adapter(bytes, bytes_adapter) config.add_renderer('json', json_renderer) # Set up authentication and authorization # config.set_authorization_policy(ACLAuthorizationPolicy()) #Enable JWT authentification # config.include('pyramid_jwt') #config.set_jwt_authentication_policy('secret' , expiration='') # includeme(config) # config.set_jwt_authentication_policy('secret', http_header='Auth-Header-Secure' , expiration=3600) # Set the default permission level to 'read' # config.set_default_permission('read') config.include('pyramid_tm') # config.add_subscriber(add_cors_headers_response_callback, NewRequest) add_routes(config) config.scan() print("init complete") return config.make_wsgi_app()
def drop_database(self): if database_exists(config.RDF_DATABASE_URI): dba(config.RDF_DATABASE_URI) print("Database dropped")
db = SQLAlchemy(app) class Offer(db.Model): __tablename__ = 'offers' date = db.Column(db.String(80), unique=True, nullable=False, primary_key=True) placement = db.Column(db.String(3), unique=True, nullable=False, primary_key=False) if not database_exists(database_file): db.create_all() db.session.commit() def init_db(): with app.app_context(): db = get_db() with app.open_resource('schema.sql', mode='r') as f: db.cursor().executescript(f.read()) db.commit() @app.route('/', methods=["GET", "POST"]) def home():
return { "nric": self.nric, "applicant_name": self.applicant_name, "sex": self.sex, "race": self.race, "nationality": self.nationality, "dob": self.dob, "email": self.email, "mobile_no": self.mobile_no, "address": self.address, "grades": self.grades, "userid": self.userid } # Create new database if it does not exist if not database_exists(app.config['SQLALCHEMY_DATABASE_URI']): create_database(app.config['SQLALCHEMY_DATABASE_URI']) print("New database created: " + str(database_exists(app.config['SQLALCHEMY_DATABASE_URI']))) print("Database location: " + app.config['SQLALCHEMY_DATABASE_URI']) else: print("Database at " + app.config['SQLALCHEMY_DATABASE_URI'] + " already exists") # Create new table if it does not exist engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI']) # Access the DB Engine if not engine.dialect.has_table(engine, tablename): # If table don't exist, Create. db.drop_all() db.create_all() @app.route("/applicant_details") def get_all():
def create_or_verify_database(url, engine_options={}, app=None): """ """ # Create engine and metadata if not database_exists(url): message = "Creating database for URI [%s]" % url log.info(message) create_database(url) engine = create_engine(url, **engine_options) def migrate(): try: # Declare the database to be under a repository's version control db_schema = schema.ControlledSchema.create(engine, migrate_repository) except Exception: # The database is already under version control db_schema = schema.ControlledSchema(engine, migrate_repository) # Apply all scripts to get to current version migrate_to_current_version(engine, db_schema) meta = MetaData(bind=engine) if app and getattr(app.config, 'database_auto_migrate', False): migrate() return # Try to load tool_shed_repository table try: Table("tool_shed_repository", meta, autoload=True) except NoSuchTableError: # No table means a completely uninitialized database. If we # have an app, we'll set its new_installation setting to True # so the tool migration process will be skipped. log.info("Creating install database from scratch, skipping migrations") mapping.init(url=url, create_tables=True) current_version = migrate_repository.version().version schema.ControlledSchema.create(engine, migrate_repository, version=current_version) db_schema = schema.ControlledSchema(engine, migrate_repository) assert db_schema.version == current_version migrate() return try: Table("migrate_version", meta, autoload=True) except NoSuchTableError: # The database exists but is not yet under migrate version control, so init with version 1 log.info("Adding version control to existing database") try: Table("metadata_file", meta, autoload=True) schema.ControlledSchema.create(engine, migrate_repository, version=2) except NoSuchTableError: schema.ControlledSchema.create(engine, migrate_repository, version=1) # Verify that the code and the DB are in sync db_schema = schema.ControlledSchema(engine, migrate_repository) if migrate_repository.versions.latest != db_schema.version: exception_msg = "Your database has version '%d' but this code expects version '%d'. " % ( db_schema.version, migrate_repository.versions.latest) exception_msg += "Back up your database and then migrate the schema by running the following from your Galaxy installation directory:" exception_msg += "\n\nsh manage_db.sh upgrade install\n" else: log.info("At database version %d" % db_schema.version)
def is_database_exists(): if not database_exists(engine.url): create_database(engine.url) return False return True
def reset_db(): if database_exists(DB_URI): drop_database(DB_URI) create_db()
def pytest_sessionfinish(session, exitstatus): """ pytest終了時に一度だけ呼ばれる処理 """ # テストDB削除 if database_exists(get_env().test_database_url): drop_database(get_env().test_database_url)
def create_database(self): if not database_exists(config.RDF_DATABASE_URI): cd(config.RDF_DATABASE_URI) print("Database created")
def create_book(sqlite_file=None, uri_conn=None, currency="EUR", overwrite=False, keep_foreign_keys=False, db_type=None, db_user=None, db_password=None, db_name=None, db_host=None, db_port=None, version_format="2.6", **kwargs): """Create a new empty GnuCash book. If both sqlite_file and uri_conn are None, then an "in memory" sqlite book is created. :param str sqlite_file: a path to an sqlite3 file (only used if uri_conn is None) :param str uri_conn: a sqlalchemy connection string :param str currency: the ISO symbol of the default currency of the book :param bool overwrite: True if book should be deleted and recreated if it exists already :param bool keep_foreign_keys: True if the foreign keys should be kept (may not work at all with GnuCash) :param str db_type: type of database in ["postgres","mysql"] :param str db_user: username of database :param str db_password: password for the use of database :param str db_name: name of database :param str db_host: host of database :param str db_port: port of database :param str version_format: the format (2.6 or 2.7) for the schema tables to generate :return: the document as a gnucash session :rtype: :class:`GncSession` :raises GnucashException: if document already exists and overwrite is False """ from sqlalchemy_utils.functions import database_exists, create_database, drop_database uri_conn = build_uri(sqlite_file, uri_conn, db_type, db_user, db_password, db_name, db_host, db_port) _db_created = False # create database (if DB is not a sqlite in memory) if uri_conn != "sqlite:///:memory:": if database_exists(uri_conn): if overwrite: drop_database(uri_conn) else: raise GnucashException("'{}' db already exists".format(uri_conn)) create_database(uri_conn) _db_created = True engine = create_piecash_engine(uri_conn, **kwargs) # Do any special setup we need to do the first time the database is created if _db_created: # For postgresql, GnuCash needs the standard_conforming_strings database variable set to 'on' in order to # find the gnclock table as expected. (Probably would break some other stuff too.) match = re.match('postgres://([^:]+):([^@]+)@([^/]+)/(.+)', uri_conn) if match: # TODO: figure out how to use sqlalchemy.sql.expression.literal to make this slightly SQL injection safer. # t = text('ALTER DATABASE :db_name SET standard_conforming_string TO on') # engine.execute(t, db_name="blah") # produces: ALTER DATABASE 'blah' SET standard_conforming_string TO on # we need: ALTER DATABASE blah SET standard_conforming_string TO on t = text('ALTER DATABASE {} SET standard_conforming_strings TO on'.format(match.group(4))) engine.execute(t) # drop constraints if we de not want to keep them (keep_foreign_keys=False), the default if not keep_foreign_keys: for n, tbl in DeclarativeBase.metadata.tables.items(): # drop index constraints for idx in tbl.indexes: event.listen(tbl, "after_create", DropIndex(idx), once=True) # drop FK constraints for cstr in tbl.constraints: if isinstance(cstr, PrimaryKeyConstraint): continue else: event.listen(tbl, "before_drop", DropConstraint(cstr), once=True) # # create all (tables, fk, ...) DeclarativeBase.metadata.create_all(engine) s = Session(bind=engine) # create all rows in version table assert version_format in version_supported, "The 'version_format'={} is not supported. " \ "Choose one of {}".format(version_format, list(version_supported.keys())) for table_name, table_version in version_supported[version_format].items(): s.add(Version(table_name=table_name, table_version=table_version)) # create book and merge with session b = Book() s.add(b) adapt_session(s, book=b, readonly=False) # create commodities and initial accounts from .account import Account b.root_account = Account(name="Root Account", type="ROOT", commodity=None, book=b) b.root_template = Account(name="Template Root", type="ROOT", commodity=None, book=b) b["default-currency"] = b.currencies(mnemonic=currency) b.save() s.create_lock() b._acquire_lock = True return b
# Turn it to a folder later and include psql.py and __init__.py? # pip install psycopg2 # Refer to this when you want to use Docker. # $docker volume create postgresqldata # $docker run -d -v postgresqldata:/data/db --name postgresql -p 5432:5432 postgresql from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker from settings import PSQL_DATABASE_URL from sqlalchemy_utils import create_database, database_exists if not database_exists(PSQL_DATABASE_URL): create_database(PSQL_DATABASE_URL) SQLALCHEMY_DATABASE_URL = PSQL_DATABASE_URL engine = create_engine(SQLALCHEMY_DATABASE_URL, ) SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) Base = declarative_base()
def open_book(sqlite_file=None, uri_conn=None, readonly=True, open_if_lock=False, overwrite_lock_if_lock=False, do_backup=True, db_type=None, db_user=None, db_password=None, db_name=None, db_host=None, db_port=None, **kwargs): """Open an existing GnuCash book :param str sqlite_file: a path to an sqlite3 file (only used if uri_conn is None) :param str uri_conn: a sqlalchemy connection string :param bool readonly: open the file as readonly (useful to play with and avoid any unwanted save) :param bool open_if_lock: open the file even if it is locked by another user (using open_if_lock=True with readonly=False is not recommended) :param bool overwrite_lock_if_lock: remove any existing lock from another user and replace it with our own (only relevant with open_if_lock=True and readonly=False. WARNING: this option should only be used if you know the existing lock is in error and no other client actually has the file locked!!!) :param bool do_backup: do a backup if the file written in RW (i.e. readonly=False) (this only works with the sqlite backend and copy the file with .{:%Y%m%d%H%M%S}.gnucash appended to it) :return: the document as a gnucash session :rtype: :class:`GncSession` :raises GnucashException: if the document does not exist :raises GnucashException: if there is a lock on the file and open_if_lock is False """ uri_conn = build_uri(sqlite_file, uri_conn, db_type, db_user, db_password, db_name, db_host, db_port) if uri_conn == "sqlite:///:memory:": raise ValueError("An in memory sqlite gnucash databook cannot be opened, it should be created") # create database (if not sqlite in memory) if not database_exists(uri_conn): raise GnucashException("Database '{}' does not exist (please use create_book to create " \ "GnuCash books from scratch)".format(uri_conn)) engine = create_piecash_engine(uri_conn, **kwargs) # backup database if readonly=False and do_backup=True if not readonly and do_backup: if engine.name != "sqlite": raise GnucashException( "Cannot do a backup for engine '{}'. Do yourself a backup and then specify do_backup=False".format( engine.name)) url = uri_conn[len("sqlite:///"):] url_backup = url + ".{:%Y%m%d%H%M%S}.gnucash".format(datetime.datetime.now()) shutil.copyfile(url, url_backup) locks = list(engine.execute(gnclock.select())) # ensure the file is not locked by GnuCash itself if locks and not open_if_lock: raise GnucashException("Lock on the file") s = Session(bind=engine) # check the versions in the table versions is consistent with the API version_book = {v.table_name: v.table_version for v in s.query(Version).all() if "Gnucash" not in v.table_name} assert any(version_book == {k: v for k, v in vt.items() if "Gnucash" not in k} for version, vt in version_supported.items()), "Unsupported table versions" book = s.query(Book).one() adapt_session(s, book=book, readonly=readonly) if not readonly: # We assume open_if_lock is true at this point because we raise an exception if not and there is a lock if not locks or overwrite_lock_if_lock: if locks: # The delete_lock() define function created by adapt_session() only deletes our own lock engine.execute(gnclock.delete()) s.create_lock() book._acquire_lock = True else: # In this case (existing lock, opening in read/write mode, not overwriting lock) we don't change gnclock. # This is potentially a dangerous state to be in because we can write to the DB while another client can # write to the DB, but we assume the user knows what they are doing if they get here. pass return book
app.config['CELERY_ACCEPT_CONTENT'] = ['json'] app.config['CELERY_TASK_SERIALIZER'] = 'json' app.config['CELERY_RESULT_SERIALIZER'] = 'json' app.config['CELERY_TIMEZONE'] = 'Asia/Seoul' app.config['CELERY_BROKER_URL'] = 'amqp://guest@localhost//' app.config['CELERY_RESULT_BACKEND'] = app.config['CELERY_BROKER_URL'] app.config['VIRTUAL_ENV'] = DEFAULT_PYTHON app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/feedback.db' celery = Celery(app.name) celery.conf.update(app.config) db.app = app db.init_app(app) if not database_exists(app.config['SQLALCHEMY_DATABASE_URI']): create_database(app.config['SQLALCHEMY_DATABASE_URI']) db.drop_all() db.create_all() monkeypatch_db_celery(app, celery) Markdown(app) def task_judge(problemset, problem, filename): PROBLEMS = problems.get_testcase_for_judging(problemset, problem) subtasks = chain([ subtask_judge.s(filename=filename, idx=idx, json=tc['json']) for idx, tc in enumerate(PROBLEMS['testcases']) ]) feedback = Feedback()
def run(self, context, *args, **kwargs): if database_exists(context.config.target): engine = create_engine(context.config.target) engine.execute('drop view if exists vw_project_access_requests;') analysis.Base.metadata.drop_all(engine)
def run(self, context, *args, **kwargs): if not database_exists(context.config.target): create_database(context.config.target) engine = create_engine(context.config.target) analysis.Base.metadata.create_all(engine)
def create_db(): """ 创建数据库的接口,并可以直接完成表的迁移,同时初始化,默认数据。这个是给人事用。 #todo 接口1 人事添加用户接口 :return: """ try: data = request.form user_id = data.get("user_id") user_name = data.get("username") user_role = data.get("work") # 这里和以前的角色内容一样,只不过名字变成了岗位 user_job_id = data.get("numVal") user_department_id = data.get("department_id") for i in [user_id,user_name,user_role,user_job_id,user_department_id]: if i.isspace() or len(i) == 0: res = {'status': 404, "data": "数据格式不合法,请重新请求"} return jsonify(res),404 database = str(user_id) + ":" +user_name +":" + str(user_job_id) +":" + user_role conn_str = 'mysql+pymysql://{}:{}@{}:{}/{}'.format(user, password, host, port, database) engine = sqlalchemy.create_engine(conn_str, echo=True) if database_exists(engine.url): print(engine.url) # Base.metadata.drop_all(engine) return jsonify("数据库已经存在,"),200 else: create_database(engine.url) Base.metadata.create_all(engine) insert_manage_table(database,user_department_id,user_name) Session = sessionmaker(bind=engine) session = Session() s = Status(status_name ="草") s1 = Status(status_name="报") s2 = Status(status_name="副") s3 = Status(status_name="垃") s4 = Status(status_name = "收") d1 = Date_name(date_name = "日") d2 = Date_name(date_name = "周") d3 = Date_name(date_name = "旬") d4 =Date_name(date_name = "月") d5 =Date_name(date_name = "季") d6 = Date_name(date_name = "半") d7 = Date_name(date_name = "年") w1 = Work_name(work_name = "人") w2 = Work_name(work_name = "机") w3 = Work_name(work_name = "物") w4 = Work_name(work_name = "法") try : session.add_all([s,s1,s2,s3,s4,d1,d2,d3,d4,d5,d6,d7,w1,w2,w3,w4]) session.commit() res = {'status': 200, "data": "ok"} return jsonify(res),200 except Exception as e: session.rollback() print(e,"记录日志") res = {'status': 404, "data": "not ok"} return jsonify(res),404 except Exception as e: current_app.logger.error("error_msg: %s remote_ip: %s user_agent: %s ", e, request.remote_addr, request.user_agent.browser) print(e)
def check_database_created(database_uri="postgres://localhost/starlette"): engine = create_engine(database_uri) if not database_exists(engine.url): print(f"Database {database_uri} not found, creating...") create_database(engine.url) print(f"Database {database_uri} found: {database_exists(engine.url)}")
def create_postgres_db(): connection_string = SQLALCHEMY_URI engine = create_engine(connection_string, echo=False) if not database_exists(engine.url): create_database(engine.url)