Esempio n. 1
0
def init_data():
    from imports import (
            Widget,Article,Page,
            User,Setting,Type,
            Template,Tag,Role,
            Category,Block,Profile,
            ContactMessage)
    """Fish data for project"""
    if prompt_bool('Do you want to kill your db?'):
        if squ.database_exists(db.engine.url):
            squ.drop_database(db.engine.url)
    try:
        db.drop_all()
    except:
        pass
    try:
        squ.create_database(db.engine.url)
        db.create_all()
    except:
        pass

    user = User.query.filter(User.email=='*****@*****.**').first()
    if user is None:
       user = User(username='******', email='*****@*****.**', password='******')
    user.save()
Esempio n. 2
0
def app(request):
    """The Flask API (scope = Session)."""
    config.DB_NAME = DB_NAME
    DATABASE_URI = config.DATABASE_URI.format(**config.__dict__)

    if not database_exists(DATABASE_URI):
        create_database(DATABASE_URI)

    print "Test Database: %s" % DATABASE_URI

    # Config the app
    _app.config["SQLALCHEMY_DATABASE_URI"] = DATABASE_URI
    _app.config["SQLALCHEMY_ECHO"] = True # Toggle SQL Alchemy output
    _app.config["DEBUG"] = True
    _app.config["TESTING"] = True

    # Establish an application context before running the tests.
    ctx = _app.app_context()
    ctx.push()

    # Initialize a null cache
    cache.config = {}
    cache.init_app(_app)

    def teardown():
        ctx.pop()

    request.addfinalizer(teardown)
    return _app
Esempio n. 3
0
def create_ctfd(ctf_name="CTFd", name="admin", email="*****@*****.**", password="******", setup=True):
    app = create_app('CTFd.config.TestingConfig')

    url = make_url(app.config['SQLALCHEMY_DATABASE_URI'])
    if url.drivername == 'postgres':
        url.drivername = 'postgresql'

    if database_exists(url):
        drop_database(url)
        create_database(url)
        with app.app_context():
            app.db.create_all()

    if setup:
        with app.app_context():
            with app.test_client() as client:
                data = {}
                r = client.get('/setup')  # Populate session with nonce
                with client.session_transaction() as sess:
                    data = {
                        "ctf_name": ctf_name,
                        "name": name,
                        "email": email,
                        "password": password,
                        "nonce": sess.get('nonce')
                    }
                client.post('/setup', data=data)
    return app
    def makeDbEngine(self):
        '''
        function to establish engine with PostgreSQl database
        so that additional tables can be made 
        '''
        try:
            ## connect to Postgres
            dbname = self.getDbName()
            username = self.getUserName()
            print dbname
            print username
            
            ## create and set 
            engine = create_engine('postgres://%s@localhost/%s'%(username, dbname))
            self.setDbEngine(engine)

            ## test if it exists
            db_exist = database_exists(engine.url)
            if not db_exist:
                create_database(engine.url)
            db_exist = database_exists(engine.url)
            self.setDbExist(db_exist)
            return 0
        except:
            return 1
Esempio n. 5
0
def add_db():  # pragma: no cover
    db_url = config['service']['db_uri']
    global engine
    engine = create_engine(db_url)

    if database_exists(engine.url):
        print('!!! DATABASE ALREADY EXISTS !!!')
        return False

    print()
    print('!!! DATABASE NOT DETECTED !!!')
    print()

    try:
        confirm = input('Create database designated in the config file? [Y/n]') or 'Y'
    except KeyboardInterrupt:
        confirm = ''
        print()

    if confirm.strip() != 'Y':
        print('Not createing DB. Exiting.')
        return False

    create_database(engine.url)
    return True
Esempio n. 6
0
    def init_db(self):
        """
        Initializes the database connection based on the configuration parameters
        """
        db_type = self.config['db_type']
        db_name = self.config['db_name']
        if db_type == 'sqlite':
            # we can ignore host, username, password, etc
            sql_lite_db_path = os.path.join(os.path.split(CONFIG)[0], db_name)
            self.db_connection_string = 'sqlite:///{}'.format(sql_lite_db_path)
        else:
            username = self.config['username']
            password = self.config['password']
            host_string = self.config['host_string']
            self.db_connection_string = '{}://{}:{}@{}/{}'.format(db_type, username, password, host_string, db_name)

        self.db_engine = create_engine(self.db_connection_string)
        # If db not present AND type is not SQLite, create the DB
        if not self.config['db_type'] == 'sqlite':
            if not database_exists(self.db_engine.url):
                create_database(self.db_engine.url)
        Base.metadata.bind = self.db_engine
        Base.metadata.create_all()
        # Bind the global Session to our DB engine
        global Session
        Session.configure(bind=self.db_engine)
def create_postgres_db():
    dbname = 'beer_db_2'
    username = '******'
    mypassword = '******'
    
    ## Here, we're using postgres, but sqlalchemy can connect to other things too.
    engine = create_engine('postgres://%s:%s@localhost/%s'%(username,mypassword,dbname))
    print "Connecting to",engine.url
    
    if not database_exists(engine.url):
        create_database(engine.url)
    print "Does database exist?",(database_exists(engine.url))
    
    # load a database from CSV
    brewery_data = pd.DataFrame.from_csv('clean_data_csv/brewery_information_rescrape.csv')
    
    ## insert data into database from Python (proof of concept - this won't be useful for big data, of course)
    ## df is any pandas dataframe 
    brewery_data.to_sql('breweries', engine, if_exists='replace')

    #dbname = 'beer_review_db'
    # load a database from CSV
    beer_data = pd.DataFrame.from_csv('clean_data_csv/beer_review_information_rescrape.csv')
    #engine_2 = create_engine('postgres://%s:%s@localhost/%s'%(username,mypassword,dbname))
    #print "connecting to",engine.url
    
    #if not database_exists(engine_2.url):
    #    create_database(engine_2.url)
        
    #print "Does database exist?",(database_exists(engine_2.url))
    beer_data.to_sql('reviews',engine,if_exists='replace')
    print "database",dbname,"has been created"
    return
Esempio n. 8
0
def setup():
    print(app.config['SQLALCHEMY_DATABASE_URI'])
    engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'])
    if database_exists(engine.url):
        drop_database(engine.url)
    create_database(engine.url)
    engine.execute('create extension if not exists fuzzystrmatch')
Esempio n. 9
0
def create_db(app):
    from project.core.db import db
    from sqlalchemy_utils import database_exists, create_database
    if not database_exists(db.engine.url):
        print '====> Create database'
        create_database(db.engine.url)
    else:
        print '====> database exist'
Esempio n. 10
0
def create_db_test():
    create_database(url_db)
    session = sessionmaker()
    engine = create_engine(url_db)
    session.configure(bind=engine)
    Base.metadata.create_all(engine)

    return engine
def create_findmyride_database(database_name):
    engine = create_engine('postgresql://%s:%s@localhost/%s'%('dianeivy', password, database_name))
    print(engine.url)

    if not database_exists(engine.url):
        create_database(engine.url)
    print(database_exists(engine.url))
    return engine
Esempio n. 12
0
    def start_fixture(self):
        """Create necessary temp files and do the config dance."""

        global CONF

        data_tmp_dir = tempfile.mkdtemp(prefix='gnocchi')
        coordination_dir = os.path.join(data_tmp_dir, 'tooz')
        os.mkdir(coordination_dir)
        coordination_url = 'file://%s' % coordination_dir

        conf = service.prepare_service([])

        CONF = self.conf = conf
        self.tmp_dir = data_tmp_dir

        # Use the indexer set in the conf, unless we have set an
        # override via the environment.
        if 'GNOCCHI_TEST_INDEXER_URL' in os.environ:
            conf.set_override('url',
                              os.environ.get("GNOCCHI_TEST_INDEXER_URL"),
                              'indexer')

        # TODO(jd) It would be cool if Gabbi was able to use the null://
        # indexer, but this makes the API returns a lot of 501 error, which
        # Gabbi does not want to see, so let's just disable it.
        if conf.indexer.url is None or conf.indexer.url == "null://":
            raise case.SkipTest("No indexer configured")

        # Use the presence of DEVSTACK_GATE_TEMPEST as a semaphore
        # to signal we are not in a gate driven functional test
        # and thus should override conf settings.
        if 'DEVSTACK_GATE_TEMPEST' not in os.environ:
            conf.set_override('driver', 'file', 'storage')
            conf.set_override('coordination_url', coordination_url, 'storage')
            conf.set_override('policy_file',
                              os.path.abspath('etc/gnocchi/policy.json'),
                              group="oslo_policy")
            conf.set_override('file_basepath', data_tmp_dir, 'storage')

        # NOTE(jd) All of that is still very SQL centric but we only support
        # SQL for now so let's say it's good enough.
        url = sqlalchemy_url.make_url(conf.indexer.url)

        url.database = url.database + str(uuid.uuid4()).replace('-', '')
        db_url = str(url)
        conf.set_override('url', db_url, 'indexer')
        sqlalchemy_utils.create_database(db_url)

        index = indexer.get_driver(conf)
        index.connect()
        index.upgrade()

        conf.set_override('pecan_debug', False, 'api')

        # Turn off any middleware.
        conf.set_override('middlewares', [], 'api')

        self.index = index
Esempio n. 13
0
File: core.py Progetto: occrp/osoba
def createdb():
    print "Connecting to %s" % settings.SQLALCHEMY_DATABASE_URI
    engine = create_engine(settings.SQLALCHEMY_DATABASE_URI)
    if settings.DROP_DB_ON_RESTART and database_exists(engine.url):
        print "Dropping old database... (because DROP_DB_ON_RESTART=True)"
        drop_database(engine.url)
    if not database_exists(engine.url):
        print "Creating databases..."
        create_database(engine.url)
Esempio n. 14
0
 def _create_new_database(cls, url):
     """Used by testing to create a new database."""
     purl = sqlalchemy_url.make_url(
         cls.dress_url(
             url))
     purl.database = purl.database + str(uuid.uuid4()).replace('-', '')
     new_url = str(purl)
     sqlalchemy_utils.create_database(new_url)
     return new_url
Esempio n. 15
0
    def start_fixture(self):
        """Set up config."""

        self.conf = None
        self.conn = None

        # Determine the database connection.
        db_url = os.environ.get(
            'AODH_TEST_STORAGE_URL', "").replace(
                "mysql://", "mysql+pymysql://")
        if not db_url:
            self.fail('No database connection configured')

        conf = service.prepare_service([], config_files=[])
        # NOTE(jd): prepare_service() is called twice: first by load_app() for
        # Pecan, then Pecan calls pastedeploy, which starts the app, which has
        # no way to pass the conf object so that Paste apps calls again
        # prepare_service. In real life, that's not a problem, but here we want
        # to be sure that the second time the same conf object is returned
        # since we tweaked it. To that, once we called prepare_service() we
        # mock it so it returns the same conf object.
        self.prepare_service = service.prepare_service
        service.prepare_service = mock.Mock()
        service.prepare_service.return_value = conf
        conf = fixture_config.Config(conf).conf
        self.conf = conf
        opts.set_defaults(self.conf)

        conf.set_override('policy_file',
                          os.path.abspath(
                              'aodh/tests/open-policy.json'),
                          group='oslo_policy',
                          enforce_type=True)
        conf.set_override(
            'paste_config',
            os.path.abspath('aodh/tests/functional/gabbi/gabbi_paste.ini'),
            group='api',
        )

        conf.set_override('pecan_debug', True, group='api',
                          enforce_type=True)

        parsed_url = urlparse.urlparse(db_url)
        if parsed_url.scheme != 'sqlite':
            parsed_url = list(parsed_url)
            parsed_url[2] += '-%s' % str(uuid.uuid4()).replace('-', '')
            db_url = urlparse.urlunparse(parsed_url)

        conf.set_override('connection', db_url, group='database',
                          enforce_type=True)

        if (parsed_url[0].startswith("mysql")
           or parsed_url[0].startswith("postgresql")):
            sqlalchemy_utils.create_database(conf.database.connection)

        self.conn = storage.get_connection_from_config(self.conf)
        self.conn.upgrade()
def createDatabase(dbName):
    """Create specified database if it doesn't exist."""
    config = CONFIG_DB
    connectString = "postgresql://{}:{}@{}:{}/{}".format(
        config["username"], config["password"], config["host"], config["port"], dbName
    )

    if not sqlalchemy_utils.database_exists(connectString):
        sqlalchemy_utils.create_database(connectString)
Esempio n. 17
0
def create_tables(db_url=None):
    if db_url is None:
        db_url = generate_url()
    print("Connecting to database at %s" % db_url)
    engine = create_engine(db_url)
    if not database_exists(engine.url):
        print("Creating database %s" % DB_NAME)
        create_database(engine.url)
    Base.metadata.create_all(create_engine(db_url))
    print("Created tables! Open up mysql to see.")
Esempio n. 18
0
def check_schema(schema):
    url = db.engine.url

    if url.database != schema:
        copy_url = copy.copy(url)
        copy_url.database = schema
        url = copy_url

    if not database_exists(url):
        create_database(url)
Esempio n. 19
0
def app():
    """
    Create test database and application
    """
    app = create_app(testing=True)
    sqlalchemy_url = app.config['SQLALCHEMY_DATABASE_URI']
    if database_exists(sqlalchemy_url):
        drop_database(sqlalchemy_url)
    create_database(sqlalchemy_url)
    yield app
Esempio n. 20
0
def initialize(re_createTable= False):
    if re_createTable :
        if not database_exists(engine.url):
            create_database(DATABASE.url)

        print(database_exists(engine.url))
        Base.metadata.drop_all(DATABASE, checkfirst = True)


    Base.metadata.create_all(DATABASE, checkfirst = True)
Esempio n. 21
0
 def setUp(self):
     self.app = self.create_app()
     
     self.db = DB(engine,session,meta)
     import sqlalchemy_utils as squ
     if squ.database_exists(self.db.engine.url):
         squ.drop_database(self.db.engine.url)
     squ.create_database(self.db.engine.url)
     meta.bind = self.db.engine
     meta.create_all()
Esempio n. 22
0
def create_database(dbname):
    #create a database with name "dbname" using lordluen ad username.
    #dbname = 'legislatr'
    username = '******'
    engine = create_engine('postgres://%s@localhost/%s'%(username,dbname))
    print(engine.url)
    if not database_exists(engine.url):
        create_database(engine.url)
    print(database_exists(engine.url))
    return
Esempio n. 23
0
def create_sweography_db():

    engine = create_engine(SQLALCHEMY_DATABASE_URI)

    if database_exists(engine.url):
        drop_database(engine.url)
    create_database(engine.url)

    ##creates all tables in database
    Base.metadata.create_all(engine)
Esempio n. 24
0
    def setUpClass(cls):
        cls.engine = create_engine('postgresql+psycopg2://monitor@localhost/monitor_test')

        if not database_exists(cls.engine.url):
            create_database(cls.engine.url, template='template_postgis')

        model.create_database(cls.engine, drop=True)

        cls.monitor = StatusMonitor(cls.engine)
        cls.monitor.read_expected_csv(os.path.join(test_dir, 'data', 'expected-rates.csv'))
Esempio n. 25
0
def create():
    """
    Create database and all tables
    """
    if not database_exists(db.engine.url):
        app.logger.debug('Creating a fresh database to work with')
        create_database(db.engine.url)
        alembic_stamp()
        db.create_all()
    app.logger.debug('Database already exists, please drop the database and try again!')
Esempio n. 26
0
def db_init(ctx):
    """Create db."""
    url = config.DATABASE_URL
    if database_exists(url):
        click.secho('Database %s already exists...' % url, fg='yellow')
        ctx.abort()
    click.secho('Creating %s...' % url, fg='blue')
    create_database(url)
    click.secho('Creating tables...', fg='blue')
    Base.metadata.create_all(engine)
    click.secho('Database initialized!', fg='green')
Esempio n. 27
0
 def setUp(self):
     self.dao_url = make_url('sqlite:///community_app.db')
     if not database_exists(self.dao_url):
         create_database(self.dao_url)
     self.dao = DataAccessObject(self.dao_url)
     self.photos = [
         Photo(owner_id=-129836227, object_id=431928280, album_id=-7, album='wall',
               date_time=datetime.datetime(2016, 9, 30, 23, 55, 7), user_id=100,
               text=None,
               link='http://cs638122.vk.me/v638122248/1c41/SnfoaFP-Hfk.jpg')
     ]
Esempio n. 28
0
def test_client(request):
    db_uri = dci_app.config['DCI_DATABASE_URI']
    sqlalchemy_utils.create_database(db_uri)
    mydci.server.engine = sqlalchemy.create_engine(db_uri, echo=False)
    models.metadata.create_all(mydci.server.engine)

    def drop_db():
        sqlalchemy_utils.drop_database(db_uri)
    request.addfinalizer(drop_db)
    dci_app.config['TESTING'] = True
    return dci_app.test_client()
Esempio n. 29
0
def create_if_not_exists(force_creation=False):
    connect_db()

    db_exists = database_exists(_engine.url)
    if not db_exists or force_creation:
        get_module_logger().info("Creating database '{}'. ({}, {})".format(
            _engine.url, not db_exists, force_creation))
        create_database(_engine.url)
        Base.metadata.create_all(bind=_engine)

    _session.close()
Esempio n. 30
0
    def setUp(self):
        self.engine = sqlalchemy.create_engine(
            CONFIG.get('database', 'test_conn_string'),
            isolation_level="AUTOCOMMIT"
        )
        if not database_exists(self.engine.url):
            create_database(self.engine.url)

        self.maxDiff = None
        self.connection = self.engine.connect()
        Base.metadata.create_all(self.engine, checkfirst=True)
        self.conn = AlchemyConnection(self.engine.url)
Esempio n. 31
0
def postgresql(host, rows, queries, include_hits):
    """Insert rows and run search queries in postgresql.

    :param host: PostgreSQL server location
    :type host: str
    :param rows: Rows to insert in the logs table
    :type count: list(dict(str))
    :param queries: Queries to execute
    :type queries: list(str)
    :param include_hits: Include hits in results
    :type include_hits: bool
    :returns: Insert and query timers
    :rtype: dict(str, contexttimer.Timer)

    """
    table_name = 'logs'

    url = 'postgres://postgres@{}/database'.format(host)
    logging.debug('Connecting to postgresql in: %r...', url)
    engine = create_engine(url)
    if not database_exists(url):
        create_database(url)

    metadata = MetaData()
    metadata.bind = engine
    table = Table(
        table_name,
        metadata,
        Column('timestamp', TIMESTAMP, nullable=False),
        Column('message', TEXT, nullable=False),
        Column('vector', TSVECTOR, nullable=False),
    )
    Index(
        'vector_index',
        table.c.vector,
        postgresql_using='gin',
    )
    metadata.drop_all()
    metadata.create_all()

    connection = engine.connect()

    logging.debug('Inserting %d rows...', len(rows))
    insert_query = text(
        "INSERT INTO logs (timestamp, message, vector) "
        "VALUES (:timestamp, :message, to_tsvector('english', :message))")
    with Timer() as insert_timer:
        connection.execute(insert_query, rows)
    logging.debug('Inserting took %f seconds', insert_timer.elapsed)

    logging.debug('Running random search queries...')

    if include_hits:
        select_query = text(
            "WITH matches AS ("
            "SELECT message, query "
            "FROM logs, to_tsquery('english', :plain_query) AS query "
            "WHERE vector @@ query "
            "ORDER BY ts_rank(vector, query) DESC "
            ")"
            "SELECT "
            "(SELECT COUNT(*) FROM matches) AS count, "
            "ts_headline('english', message, query) "
            "FROM matches "
            "LIMIT 1")
    else:
        select_query = text(
            "SELECT ts_headline('english', message, query) "
            "FROM ("
            "SELECT message, query "
            "FROM logs, to_tsquery('english', :plain_query) AS query "
            "WHERE vector @@ query "
            "ORDER BY ts_rank(vector, query) DESC "
            "LIMIT 1"
            ") AS subquery")

    with Timer() as query_timer:
        for words in queries:
            plain_query = ' | '.join(words.split())
            result = connection.execute(
                select_query,
                {
                    'plain_query': plain_query
                },
            ).fetchone()

            if include_hits:
                if result:
                    total, highlight = result
                else:
                    total, highlight = 0, None
                logging.debug('%r -> %d hits', plain_query, total)
            else:
                if result:
                    highlight = result[0]
                logging.debug('%r', plain_query)
            if highlight is not None:
                logging.debug(pformat(highlight))
    logging.debug('Querying took %f seconds', query_timer.elapsed)
    return {'insert': insert_timer, 'query': query_timer}
Esempio n. 32
0
def generate_database(engine_url, data_path, engine):
    create_database(engine_url)

    # ---------------------
    # Species_table generation
    # ----------------------
    # extract Species information from the list
    species_list_df = pd.read_csv(data_path + "vog.species.list",
                                  sep='\t',
                                  header=0,
                                  names=['SpeciesName', 'TaxonID', 'Phage', 'Source', 'Version']) \
        .assign(Phage=lambda p: p.Phage == 'phage')

    # create a species table in the database
    species_list_df.to_sql(name='Species_profile',
                           con=engine,
                           if_exists='replace',
                           index=False,
                           chunksize=1000)

    with engine.connect() as con:
        con.execute(
            'ALTER TABLE `Species_profile` ADD PRIMARY KEY (`TaxonID`);')
        con.execute(
            'ALTER TABLE Species_profile  MODIFY  SpeciesName char(100) NOT NULL; '
        )
        con.execute(
            'ALTER TABLE Species_profile  MODIFY  TaxonID int(255) NOT NULL; ')
        con.execute(
            'ALTER TABLE Species_profile  MODIFY  Phage bool NOT NULL; ')
        con.execute(
            'ALTER TABLE Species_profile  MODIFY  Source char(100) NOT NULL; ')
        con.execute(
            'ALTER TABLE Species_profile  MODIFY  Version int(255) NOT NULL; ')

    # ToDo add foreign key to connect tax_id in protein_profile and species_profile? create index?

    print('Species_profile table successfully created!')

    # ---------------------
    # VOG_table generation
    # ----------------------

    # read in the data files
    members = pd.read_csv(os.path.join(data_path, 'vog.members.tsv.gz'),
                          compression='gzip',
                          sep='\t',
                          header=0,
                          names=[
                              'VOG_ID', 'ProteinCount', 'SpeciesCount',
                              'FunctionalCategory', 'Proteins'
                          ],
                          usecols=[
                              'VOG_ID', 'ProteinCount', 'SpeciesCount',
                              'FunctionalCategory', 'Proteins'
                          ],
                          index_col='VOG_ID')

    annotations = pd.read_csv(os.path.join(data_path,
                                           'vog.annotations.tsv.gz'),
                              compression='gzip',
                              sep='\t',
                              header=0,
                              names=[
                                  'VOG_ID', 'ProteinCount', 'SpeciesCount',
                                  'FunctionalCategory',
                                  'Consensus_func_description'
                              ],
                              usecols=['VOG_ID', 'Consensus_func_description'],
                              index_col='VOG_ID')

    lca = pd.read_csv(
        os.path.join(data_path, 'vog.lca.tsv.gz'),
        compression='gzip',
        sep='\t',
        header=0,
        names=['VOG_ID', 'GenomesInGroup', 'GenomesTotal', 'Ancestors'],
        index_col='VOG_ID')

    virusonly = pd.read_csv(os.path.join(data_path, 'vog.virusonly.tsv.gz'),
                            compression='gzip',
                            sep='\t',
                            header=0,
                            names=[
                                'VOG_ID', 'StringencyHigh', 'StringencyMedium',
                                'StringencyLow'
                            ],
                            dtype={
                                'StringencyHigh': bool,
                                'StringencyMedium': bool,
                                'StringencyLow': bool
                            },
                            index_col='VOG_ID')

    dfr = members.join(annotations).join(lca).join(virusonly)
    dfr['VirusSpecific'] = np.where((dfr['StringencyHigh']
                                     | dfr['StringencyMedium']
                                     | dfr['StringencyLow']), True, False)

    #create number of phages and non-phages for VOG. also "phages_only" "np_only" or "mixed"
    dfr['NumPhages'] = 0
    dfr['NumNonPhages'] = 0
    dfr['PhageNonphage'] = ''

    species_list_df.set_index("TaxonID", inplace=True)
    for index, row in dfr.iterrows():
        num_nonphage = 0
        num_phage = 0
        p = row['Proteins'].split(",")
        for protein in p:
            species_id = protein.split('.')[0]
            species_id = int(species_id)
            if (species_list_df.loc[species_id])["Phage"]:
                num_phage = num_phage + 1
            else:
                num_nonphage = num_nonphage + 1

        dfr.at[index, 'NumPhages'] = num_phage
        dfr.at[index, 'NumNonPhages'] = num_nonphage

        if (num_phage > 0) and (num_nonphage > 0):
            dfr.at[index, 'PhageNonphage'] = "mixed"
        elif num_phage > 0:
            dfr.at[index, 'PhageNonphage'] = "phages_only"
        else:
            dfr.at[index, 'PhageNonphage'] = "np_only"

    # create a table in the database
    dfr.to_sql(name='VOG_profile',
               con=engine,
               if_exists='replace',
               index=True,
               dtype={
                   'VOG_ID':
                   VARCHAR(
                       dfr.index.get_level_values('VOG_ID').str.len().max()),
                   'Proteins':
                   LONGTEXT
               },
               chunksize=1000)

    with engine.connect() as con:
        con.execute('ALTER TABLE VOG_profile ADD PRIMARY KEY (`VOG_ID`(8)); '
                    )  # add primary key
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  VOG_ID char(30) NOT NULL; '
        )  # convert text to char
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  FunctionalCategory char(30) NOT NULL; '
        )
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  Consensus_func_description char(100) NOT NULL; '
        )
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  ProteinCount int(255) NOT NULL; '
        )
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  SpeciesCount int(255) NOT NULL; '
        )
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  GenomesInGroup int(255) NOT NULL; '
        )
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  GenomesTotal int(255) NOT NULL; '
        )
        con.execute('ALTER TABLE VOG_profile  MODIFY  Ancestors TEXT; ')
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  StringencyHigh bool NOT NULL; ')
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  StringencyMedium bool NOT NULL; '
        )
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  StringencyLow bool NOT NULL; ')
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  VirusSpecific bool NOT NULL; ')
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  NumPhages int(255) NOT NULL; ')
        con.execute(
            'ALTER TABLE VOG_profile  MODIFY  NumNonPhages int(255) NOT NULL; '
        )
        con.execute('ALTER TABLE VOG_profile  MODIFY  PhageNonphage TEXT; ')
        con.execute('ALTER TABLE VOG_profile  MODIFY  Proteins LONGTEXT; ')
        con.execute(
            'CREATE UNIQUE INDEX VOG_profile_index ON VOG_profile (VOG_ID, FunctionalCategory);'
        )  # create index
        # con.execute('CREATE INDEX VOG_profile_index2 ON VOG_profile (Consensus_func_description);')  # create index
        #con.execute('ALTER TABLE VOG_profile  ADD FOREIGN KEY (TaxonID) REFERENCES Species_profile(TaxonID); ')
    # ToDo: add foreign keys to link to proteins, and species lists.
    print('VOG_table successfully created!')

    #---------------------
    # Protein_table generation
    #----------------------

    # extract proteins for each VOG
    protein_list_df = pd.read_csv(data_path + "vog.members.tsv.gz",
                                  compression='gzip',
                                  sep='\t').iloc[:, [0, 4]]

    protein_list_df = protein_list_df.rename(columns={
        "#GroupName": "VOG_ID",
        "ProteinIDs": "ProteinID"
    })

    # assign each protein a vog
    protein_list_df = (protein_list_df["ProteinID"].str.split(",").apply(
        lambda x: pd.Series(x)).stack().reset_index(
            level=1,
            drop=True).to_frame("ProteinID").join(protein_list_df[["VOG_ID"]],
                                                  how="left"))
    protein_list_df.set_index("ProteinID")

    # save the taxon ID in a separate column
    protein_list_df["TaxonID"] = protein_list_df["ProteinID"].str.split(
        ".").str[0]

    # create a protein table in the database
    protein_list_df.to_sql(name='Protein_profile',
                           con=engine,
                           if_exists='replace',
                           index=False,
                           chunksize=1000)

    with engine.connect() as con:
        con.execute(
            'ALTER TABLE Protein_profile  MODIFY  ProteinID char(30) NOT NULL; '
        )
        con.execute(
            'ALTER TABLE Protein_profile  MODIFY  TaxonID int(30) NOT NULL; ')
        con.execute(
            'ALTER TABLE Protein_profile  MODIFY  VOG_ID char(30) NOT NULL; ')
        con.execute(
            'CREATE INDEX VOG_profile_index_by_protein ON Protein_profile (ProteinID);'
        )
        # add foreign key
        con.execute(
            'ALTER TABLE Protein_profile  ADD FOREIGN KEY (TaxonID) REFERENCES Species_profile(TaxonID); '
        )
        con.execute(
            'ALTER TABLE Protein_profile  ADD FOREIGN KEY (VOG_ID) REFERENCES VOG_profile(VOG_ID); '
        )

    print('Protein_profile table successfully created!')

    #---------------------
    # Amino Acid and Nucleotide Sequence Table Generation
    #----------------------

    proteinfile = data_path + "vog.proteins.all.fa"
    genefile = data_path + "vog.genes.all.fa"

    prot = []
    for seq_record in SeqIO.parse(proteinfile, "fasta"):
        prot.append([seq_record.id, str(seq_record.seq)])
    df = pd.DataFrame(prot, columns=['ID', 'AAseq'])
    df.set_index("ID")

    genes = []
    for seq_record in SeqIO.parse(genefile, "fasta"):
        genes.append([seq_record.id, str(seq_record.seq)])
    dfg = pd.DataFrame(genes, columns=['ID', 'NTseq'])
    dfg.set_index('ID')

    # convert dataframes to DB Tables:
    df.to_sql(name='AA_seq',
              con=engine,
              if_exists='replace',
              index=False,
              chunksize=1000)
    dfg.to_sql(name='NT_seq',
               con=engine,
               if_exists='replace',
               index=False,
               chunksize=1000)

    with engine.connect() as con:
        con.execute('ALTER TABLE AA_seq  MODIFY  ID char(30) NOT NULL; ')
        con.execute('ALTER TABLE AA_seq  MODIFY  AASeq LONGTEXT; ')
        con.execute('CREATE INDEX ID ON AA_seq (ID);')

        con.execute('ALTER TABLE NT_seq  MODIFY  ID char(30) NOT NULL; ')
        con.execute('ALTER TABLE NT_seq  MODIFY  NTSeq LONGTEXT; ')
        con.execute('CREATE INDEX ID ON NT_seq (ID);')
Esempio n. 33
0
def create_db(username, password, host, port, db_name):
    engine = create_engine(f'postgresql://{username}:{password}@{host}:{port}/{db_name}')
    if not database_exists(engine.url):
        create_database(engine.url)
Esempio n. 34
0
def db_init():
    from models.db_model.db import engine, Db
    from models.db_model.model import Base, SysUser, SysMenu, SysRole, SysRoleMenu, SysUserRole
    from sqlalchemy_utils import database_exists, create_database
    if not database_exists(engine.url):
        create_database(engine.url)
    Base.metadata.create_all(engine)
    if not database_exists(engine.url):
        session = Db.get_session()
        try:
            admin_user = SysUser(
                name='admin',
                password=
                '******',
                cname='管理员',
            )
            session.add(admin_user)
            workspace_menu = SysMenu(name='我的工作台',
                                     menu_path='/workspace',
                                     menu_reg='^\/workspace\/?$',
                                     icon='workspace')
            session.add(workspace_menu)
            project_menu = SysMenu(name='项目',
                                   menu_path='/project',
                                   menu_reg='^\/project\/?',
                                   icon='project')
            session.add(project_menu)
            tool_menu = SysMenu(name='工具',
                                menu_path='/tool',
                                menu_reg='^\/tool\/?',
                                icon='tool')
            session.add(tool_menu)
            manage_menu = SysMenu(name='管理',
                                  menu_path='/manage',
                                  menu_reg='^\/manage\/?$',
                                  icon='manage')
            session.add(manage_menu)
            session.commit()
            user_manage_menu = SysMenu(name='用户管理',
                                       menu_path='/manage/user',
                                       menu_reg='^\/manage\/user\/?$',
                                       parent_id=manage_menu.id)
            session.add(user_manage_menu)
            role_manage_menu = SysMenu(name='角色管理',
                                       menu_path='/manage/role',
                                       menu_reg='^\/manage\/role\/?$',
                                       parent_id=manage_menu.id)
            session.add(role_manage_menu)
            admin_role = SysRole(role_name='管理员', )
            session.add(admin_role)
            session.commit()
            session.add(
                SysUserRole(user_id=admin_user.id, role_id=admin_role.id))
            session.add(
                SysRoleMenu(role_id=admin_role.id, menu_id=workspace_menu.id))
            session.add(
                SysRoleMenu(role_id=admin_role.id, menu_id=project_menu.id))
            session.add(
                SysRoleMenu(role_id=admin_role.id, menu_id=tool_menu.id))
            session.add(
                SysRoleMenu(role_id=admin_role.id, menu_id=manage_menu.id))
            session.add(
                SysRoleMenu(role_id=admin_role.id,
                            menu_id=user_manage_menu.id))
            session.add(
                SysRoleMenu(role_id=admin_role.id,
                            menu_id=role_manage_menu.id))
            session.commit()
        except Exception as e:
            session.rollback()
            logging.error(traceback.format_exc())
Esempio n. 35
0
 def test_create_and_drop(self):
     assert not database_exists(self.url)
     create_database(self.url)
     assert database_exists(self.url)
     drop_database(self.url)
     assert not database_exists(self.url)
Esempio n. 36
0
def create_app(test_config=None):
    """
    The flask application factory. To run the app somewhere else you can:
    ```
    from api import create_app
    app = create_app()
    if __main__ == "__name__":
        app.run()
    """
    app = Flask(__name__)

    CORS(app)  # add CORS

    JWTManager(app)

    # check environment variables to see which config to load
    env = os.environ.get("FLASK_ENV", "dev")
    # for configuration options, look at api/config.py
    if test_config:
        # purposely done so we can inject test configurations
        # this may be used as well if you'd like to pass
        # in a separate configuration although I would recommend
        # adding/changing it in api/config.py instead
        # ignore environment variable config if config was given
        app.config.from_mapping(**test_config)
    else:
        app.config.from_object(
            config[env])  # config dict is from api/config.py

    # logging
    formatter = RequestFormatter(
        "%(asctime)s %(remote_addr)s: requested %(url)s: %(levelname)s in [%(module)s: %(lineno)d]: %(message)s"
    )
    if app.config.get("LOG_FILE"):
        fh = logging.FileHandler(app.config.get("LOG_FILE"))
        fh.setLevel(logging.DEBUG)
        fh.setFormatter(formatter)
        app.logger.addHandler(fh)

    strm = logging.StreamHandler()
    strm.setLevel(logging.DEBUG)
    strm.setFormatter(formatter)

    app.logger.addHandler(strm)
    app.logger.setLevel(logging.DEBUG)

    root = logging.getLogger("core")
    root.addHandler(strm)

    # decide whether to create database
    if env != "prod":
        db_url = app.config["SQLALCHEMY_DATABASE_URI"]
        if not database_exists(db_url):
            create_database(db_url)

    # register sqlalchemy to this app
    from api.db import db

    db.init_app(app)  # initialize Flask SQLALchemy with this flask app
    Migrate(app, db)

    init_jtw_security(app)

    # register error Handler
    app.register_error_handler(Exception, all_exception_handler)

    # import and register blueprints
    from .blueprints import register_blueprints

    # why blueprints http://flask.pocoo.org/docs/1.0/blueprints/
    register_blueprints(app)

    return app
Esempio n. 37
0
from flask import Flask
from acceptme import Bot
from flask_login import LoginManager
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy_utils import create_database, database_exists


bot = Bot
app = Flask(__name__)
app.config['SECRET_KEY'] = 'supersecretkey'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://*****:*****@localhost/instamax'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)

if not database_exists(app.config['SQLALCHEMY_DATABASE_URI']):
    create_database(app.config['SQLALCHEMY_DATABASE_URI'])

login_manager = LoginManager()
login_manager.init_app(app)
login_manager.blueprint_login_views = {
    'core': 'core.login',
    'admin': 'admin.login'
}

from project.core.views import core_blueprint
from project.users.views import users_blueprint
from project.admin.views import admins_blueprint
from project.error_pages.handler import error_pages


app.register_blueprint(error_pages)
Esempio n. 38
0
 def _DB_conn(user, passwd, db):
     conn = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.format(
         user, passwd, db))
     if not database_exists(conn.url):
         create_database(conn.url)
     return conn
Esempio n. 39
0
def init_database(request):
    DB_URL = TestingConfig.SQLALCHEMY_DATABASE_URI
    if not database_exists(DB_URL):
        print('Creating database.')
        create_database(DB_URL)
    db.create_all()
    # Fill database with data for testing
    author_1 = Author(name="Niel Gaiman",
                      date_of_birth=datetime.strptime('1976-12-29',
                                                      '%Y-%m-%d'),
                      country="UK")
    author_2 = Author(name="Terry Pratchet",
                      date_of_birth=datetime.strptime('1950-02-15',
                                                      '%Y-%m-%d'),
                      country="UK")
    author_3 = Author(name="George Martin",
                      date_of_birth=datetime.strptime('1960-06-09',
                                                      '%Y-%m-%d'),
                      country="USA")
    author_4 = Author(name="Stephen King",
                      date_of_birth=datetime.strptime('1970-07-21',
                                                      '%Y-%m-%d'),
                      country="USA")
    author_5 = Author(name="Niel Stephenson",
                      date_of_birth=datetime.strptime('1974-05-29',
                                                      '%Y-%m-%d'),
                      country="USA")
    author_6 = Author(name="Paolo Bachigalupi",
                      date_of_birth=datetime.strptime('1976-12-13',
                                                      '%Y-%m-%d'),
                      country="USA")
    author_7 = Author(name="Patrick Rothfuss",
                      date_of_birth=datetime.strptime('1970-12-13',
                                                      '%Y-%m-%d'),
                      country="USA")
    author_8 = Author(name="Isaak Asimov",
                      date_of_birth=datetime.strptime('1940-12-13',
                                                      '%Y-%m-%d'),
                      country="USA")
    author_9 = Author(name="Miguel de Servantes",
                      date_of_birth=datetime.strptime('1630-02-13',
                                                      '%Y-%m-%d'),
                      country="Spain")

    book_1 = Book(title="Good omens",
                  publish_date=datetime.strptime('2008-12-29', '%Y-%m-%d'),
                  author_ids=[author_1, author_2],
                  avg_rating=1,
                  num_rating=10)
    book_2 = Book(title="Storm of swords",
                  publish_date=datetime.strptime('2005-11-29', '%Y-%m-%d'),
                  author_ids=[author_3],
                  avg_rating=5,
                  num_rating=10)
    book_3 = Book(title="Wind-up girl",
                  publish_date=datetime.strptime('2009-07-12', '%Y-%m-%d'),
                  author_ids=[author_6, author_7],
                  avg_rating=5,
                  num_rating=10)
    book_4 = Book(title="Great omens",
                  publish_date=datetime.strptime('2002-12-29', '%Y-%m-%d'),
                  author_ids=[author_1, author_3],
                  avg_rating=3,
                  num_rating=10)
    book_5 = Book(title="Game of thrones",
                  publish_date=datetime.strptime('2003-12-29', '%Y-%m-%d'),
                  author_ids=[author_1, author_3, author_5],
                  avg_rating=4,
                  num_rating=10)
    book_6 = Book(title="Feast for crows",
                  publish_date=datetime.strptime('2015-12-29', '%Y-%m-%d'),
                  author_ids=[author_3],
                  avg_rating=2,
                  num_rating=10)
    book_7 = Book(title="Fever dreams",
                  publish_date=datetime.strptime('2002-12-29', '%Y-%m-%d'),
                  author_ids=[author_3],
                  avg_rating=2,
                  num_rating=10)
    book_8 = Book(title="Song of Lia",
                  publish_date=datetime.strptime('2018-01-29', '%Y-%m-%d'),
                  author_ids=[author_3],
                  avg_rating=1,
                  num_rating=10)
    book_9 = Book(title="Wind-up Girl 2",
                  publish_date=datetime.strptime('2011-03-29', '%Y-%m-%d'),
                  author_ids=[author_7, author_5],
                  avg_rating=3.4,
                  num_rating=10)
    book_10 = Book(title="Name of the wind",
                   publish_date=datetime.strptime('2011-12-11', '%Y-%m-%d'),
                   author_ids=[author_7, author_3],
                   avg_rating=1.3,
                   num_rating=10)
    book_11 = Book(title="Wise man fear",
                   publish_date=datetime.strptime('2018-12-22', '%Y-%m-%d'),
                   author_ids=[author_1, author_7],
                   avg_rating=5,
                   num_rating=10)
    book_12 = Book(title="Don Quihote",
                   publish_date=datetime.strptime('1670-12-02', '%Y-%m-%d'),
                   author_ids=[author_9],
                   avg_rating=1,
                   num_rating=9)
    book_13 = Book(title="Foundation and Empire",
                   publish_date=datetime.strptime('2018-12-29', '%Y-%m-%d'),
                   author_ids=[author_8],
                   avg_rating=1,
                   num_rating=9)

    db.session.add_all([
        author_1, author_2, author_3, author_4, author_5, author_6, author_7,
        author_8, author_9, book_1, book_2, book_3, book_4, book_5, book_6,
        book_7, book_8, book_9, book_10, book_11, book_12, book_13
    ])
    db.session.commit()

    def teardown():
        db.drop_all()

    request.addfinalizer(teardown)
    return db
Esempio n. 40
0
def _create_test_db():
    db_url = os.environ['DATABASE_URL']
    if not sqlalchemy_utils.database_exists(db_url):
        sqlalchemy_utils.create_database(db_url)
Esempio n. 41
0
    def connect(self):  # noqa
        if not database_exists(self.engine.url):
            create_database(self.engine.url)

        self.session = sessionmaker(bind=self.engine)()
        Result.metadata.create_all(self.engine)
Esempio n. 42
0
def auto_migrate(engine: sqlalchemy.engine.Engine, models: [sqlalchemy.sql.schema.MetaData]):
    """
    Compares a database with a list of defined orm models and applies the diff. Prints executed SQL statements to stdout.

    Based on `alembic automigrations`_, but doesn't require intermediate migration files.

    Use with care, does not work in many cases.

    Args:
        engine: the database to use
        models: A list of orm models

    Returns:
        True in case of no failures

    .. _alembic automigrations:
        http://alembic.zzzcomputing.com/en/latest/autogenerate.html
    """
    import alembic.runtime.migration
    import alembic.autogenerate
    import sqlalchemy_utils

    try:
        # create database if it does not exist
        if not sqlalchemy_utils.database_exists(engine.url):
            sqlalchemy_utils.create_database(engine.url)
            print(f'Created database "{engine.url!r}"\n')
    except Exception as e:
        print(f'Could not access or create database "{engine.url!r}":\n{e}', file=sys.stderr)
        return False

    # merge all models into a single metadata object
    combined_meta_data = MetaData()
    for model in models:
        model.metadata.tables[model.__tablename__].tometadata(combined_meta_data)

    # create diff between models and current db and translate to ddl
    ddl = []
    with engine.connect() as connection:
        output = io.StringIO()

        diff_context = alembic.runtime.migration.MigrationContext(connection.dialect, connection, opts={})

        autogen_context = alembic.autogenerate.api.AutogenContext(diff_context,
                                                                  opts={'sqlalchemy_module_prefix': 'sqlalchemy.',
                                                                        'alembic_module_prefix': 'executor.'})

        execution_context = alembic.runtime.migration.MigrationContext(connection.dialect, connection,
                                                                       opts={'output_buffer': output, 'as_sql': True})

        # needed for the eval below
        executor = alembic.operations.Operations(execution_context)

        # Step 1: create a diff between the meta data and the data base
        # operations is a list of MigrateOperation instances, e.g. a DropTableOp
        operations = alembic.autogenerate.produce_migrations(diff_context, combined_meta_data).upgrade_ops.ops

        for operation in operations:
            # Step 2: autogenerate a python statement from the operation, e.g. "executor.drop_table('bar')"
            renderer = alembic.autogenerate.renderers.dispatch(operation)
            statements = renderer(autogen_context, operation)
            if not isinstance(statements, list):
                statements = [statements]

            for statement in statements:
                # Step 3: "execute" python statement and get sql from buffer, e.g. "DROP TABLE bar;"
                try:
                    eval(statement)
                except Exception as e:
                    print('statement: ' + statement)
                    raise (e)
                ddl.append(output.getvalue())
                output.truncate(0)
                output.seek(0)

    with engine.begin() as connection:
        for statement in ddl:
            sys.stdout.write('\033[1;32m' + statement + '\033[0;0m')
            connection.execute(statement)

    return True
Esempio n. 43
0
from core import db
from core.manager import MinitManager
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from sqlalchemy.engine.url import make_url
from sqlalchemy_utils import database_exists, create_database

migrate = Migrate(app, db)

manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command('minit', MinitManager)

url = make_url(app.config['SQLALCHEMY_DATABASE_URI'])
if not database_exists(url):
    create_database(url, encoding='utf8mb4')

from modules.models import *


@manager.command
def filldb():
    user0 = User(username="******",
                 password="******",
                 role=roles["customer"])
    user1 = User(username="******", password="******", role=roles["customer"])
    user2 = User(username="******", password="******", role=roles["sales"])
    user3 = User(username="******", password="******", role=roles["engineering"])
    user4 = User(username="******", password="******", role=roles["production"])
    user5 = User(username="******", password="******", role=roles["finance"])
    user6 = User(username="******", password="******", role=roles["test"])
Esempio n. 44
0
app.register_blueprint(screamscape, url_prefix='/api/v1/screamscape')
app.register_blueprint(coastergoals, url_prefix='/api/v1/coastergoals')
app.register_blueprint(articles, url_prefix='/api/v1/coasters/articles')
app.register_blueprint(tripjournals, url_prefix='/api/v1/coasters/journals')
app.register_blueprint(parks, url_prefix='/api/v1/coasters/parks')
app.register_blueprint(jobs, url_prefix='/api/v1/jobs')

migrate = Migrate(app, db)

with app.app_context():
    db.init_app(app)
    if not os.path.isfile(
            os.path.dirname(os.path.realpath(__file__)) +
            "/data/homeserver.db"):
        print("No database found. Creating one")
        create_database(app.config['SQLALCHEMY_DATABASE_URI'],
                        encoding='utf8mb4')
        db.create_all()
        db.session.commit()


@app.teardown_appcontext
def shutdown_session(exception=None):
    db.session.remove()


@app.route("/", defaults={'u_path': ''})
@app.route('/<path:u_path>')
def hello(u_path):
    return render_template('index.html')

Esempio n. 45
0
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-

__author__ = 'RemiZOffAlex'
__copyright__ = '(c) RemiZOffAlex'
__license__ = 'MIT'
__email__ = '*****@*****.**'

from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy_utils import database_exists, create_database

from .. import app

engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'], echo=True)
if not database_exists(engine.url):
    create_database(engine.url)

db_session = scoped_session(
    sessionmaker(autocommit=False, autoflush=False, bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()

from .users import User

Base.metadata.create_all(engine)

__all__ = ['db_session', 'User']
Esempio n. 46
0
from utils.database import get_all_user_db_urls
from views import blueprints
from views.auth import get_user_by_id

JINJA_ENVIRONMENT = jinja2.Environment(loader=jinja2.FileSystemLoader(
    os.path.join(os.path.dirname(__file__), 'build')),
                                       extensions=['jinja2.ext.autoescape'],
                                       autoescape=True)

set_logging(app)
set_config(app)

db.init_app(app)

if not database_exists(app.config['SQLALCHEMY_DATABASE_URI']):
    create_database(app.config['SQLALCHEMY_DATABASE_URI'], encoding='utf8mb4')
    logging.error(f'The application database has just been created, the tables'
                  f' still need to be created (run reate_tables_new_db.sh) '
                  f'for the app to work.')

if not database_exists(app.config['SQLALCHEMY_EXAMPLE_CUSTOMER_DB_URI']):
    create_database(app.config['SQLALCHEMY_EXAMPLE_CUSTOMER_DB_URI'],
                    encoding='utf8mb4')
    logging.error(f'The example database has just been created, the tables '
                  f'still need to be created (run reate_tables_new_db.sh) '
                  f'for the app to work.')

with app.app_context():
    try:
        load_customer_db_config(app)
    except ProgrammingError:
Esempio n. 47
0
def create_connector(json_conf, logger=None):
    """Creator function for the database connection. It necessitates the following information from
    the json_conf dictionary:

    - dbtype (one of sqlite, mysql, postgresql)
    - db (name of the database file, for sqlite, otherwise name of the database)

    If the database is MySQL/PostGreSQL, the method also requires:

    - dbuser
    - dbhost
    - dbpasswd
    - dbport

    These are controlled and added automatically by the json_utils functions.

    :param json_conf: configuration dictionary
    :type json_conf: dict

    :param logger: a logger instance
    :type logger: logging.Logger

    :rtype : MySQLdb.connect | sqlite3.connect | psycopg2.connect

    """

    if logger is None:
        # Create a default null handler
        logger = logging.Logger("null")
        logger.addHandler(logging.NullHandler())

    db_settings = json_conf["db_settings"]

    func = None
    if db_settings["dbtype"] == "sqlite":
        if not database_exists("sqlite:///{}".format(db_settings["db"])):
            logger.warning("No database found, creating a mock one!")
            create_database("sqlite:///{}".format(db_settings["db"]))
        if json_conf["pick"]["run_options"]['shm'] is False:
            logger.debug("Connecting to %s", db_settings["db"])
            func = sqlite3.connect(database=db_settings["db"],
                                   check_same_thread=False)
        else:
            logger.debug("Connecting to %s",
                         json_conf["pick"]["run_options"]["shm_db"])

            func = sqlite3.connect(
                database=json_conf["pick"]["run_options"]["shm_db"],
                check_same_thread=False,
                isolation_level=None)
    elif db_settings["dbtype"] in ("mysql", "postgresql"):
        if db_settings["dbpasswd"] != '':
            passwd = ":{0}".format(db_settings["dbpasswd"])
        else:
            passwd = ''
        url = "{dialect}://{user}{passwd}@{host}:{port}/{db}".format(
            dialect=db_settings["dbtype"],
            host=db_settings["dbhost"],
            user=db_settings["dbuser"],
            passwd=passwd,
            db=db_settings["db"],
            port=db_settings["dbport"])
        if database_exists(url) is False:
            create_database(url)

        if db_settings["dbtype"] == "mysql":
            import MySQLdb
            logger.debug("Connecting to MySQL %s", db_settings["db"])
            func = MySQLdb.connect(host=db_settings["dbhost"],
                                   user=db_settings["dbuser"],
                                   passwd=db_settings["dbpasswd"],
                                   db=db_settings["db"],
                                   port=db_settings["dbport"])
        elif db_settings["dbtype"] == "postgresql":
            import psycopg2
            logger.debug("Connecting to PSQL %s", db_settings["db"])
            func = psycopg2.connect(host=db_settings["dbhost"],
                                    user=db_settings["dbuser"],
                                    password=db_settings["dbpasswd"],
                                    database=db_settings["db"],
                                    port=db_settings["dbport"])
    else:
        raise ValueError("DB type not supported! {0}".format(
            db_settings["dbtype"]))
    return func
Esempio n. 48
0
    def __init__(self, config, db_type=TYPE_MYSQL):
        self.conf = config

        self.user = self.conf["sqlUser"]
        self.passwd = self.conf["sqlPass"]
        self.host = "127.0.0.1"
        self.database = "MoltenIron"
        self.db_type = db_type

        engine = None
        try:
            # Does the database exist?
            engine = self.create_engine()
            c = engine.connect()
            c.close()
        except (OperationalError, InternalError) as e:
            if DEBUG:
                print("Database:__init__: Caught %s" % (e, ))
            # engine.connect will throw sqlalchemy.exc exception
            if isinstance(e, InternalError):
                (num, msg) = e.orig.args
                if DEBUG:
                    print("Database:__init__: (%d,%s)" % (
                        num,
                        msg,
                    ))
                if num != 1049 or msg != "Unknown database 'MoltenIron'":
                    # If it is not the above then reraise it!
                    raise
            # It does not! Create it.
            # CREATE DATABASE MoltenIron;
            sqlalchemy_utils.create_database(engine.url)
            engine = self.create_engine()
            c = engine.connect()
            c.close()
        self.engine = engine

        self.create_metadata()

        self.blob_status = {
            "element_info": [
                # The following are returned from the query call

                # field_name length special_fmt skip
                ("id", 4, int, False),
                ("name", 6, str, False),
                ("ipmi_ip", 16, str, False),
                ("blob", 40, str, False),
                ("status", 8, str, False),
                ("provisioned", 40, str, False),
                # We add timeString
                ("time", 14, float, False),
                ("node_pool", 19, str, False),
            ]
        }
        self.baremetal_status = {
            "element_info": [
                # The following are returned from the query call

                # field_name length special_fmt skip
                ("id", 4, int, False),
                ("name", 6, str, False),
                ("ipmi_ip", 9, str, False),
                ("ipmi_user", 11, str, False),
                ("ipmi_password", 15, str, False),
                ("port_hwaddr", 19, str, False),
                ("cpu_arch", 10, str, False),
                ("cpus", 6, int, False),
                ("ram_mb", 8, int, False),
                ("disk_gb", 9, int, False),
                ("status", 8, str, False),
                ("provisioned", 40, str, False),
                # We add timeString
                ("time", 14, float, False),
                ("node_pool", 19, str, False),
            ]
        }

        # Pass map by reference but update our copy with the new information
        self.baremetal_status = self.setup_status(**self.baremetal_status)
        self.blob_status = self.setup_status(**self.blob_status)
Esempio n. 49
0
from sqlalchemy_utils import database_exists, create_database
from flask import Flask
from flask_cors import CORS
from common.config import UPLOAD_FOLDER
from common.config import META_DATABASE_ENDPOINT
from common.config import DEFAULT_RUNTIME
from operators.runtime import runtime_client_getter

app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config['SQLALCHEMY_DATABASE_URI'] = META_DATABASE_ENDPOINT

for i in range(5):
    try:
        if not database_exists(META_DATABASE_ENDPOINT):
            create_database(META_DATABASE_ENDPOINT)
            break
    except Exception as e:
        print(e)
        print(
            f"count {i+1}; retry to connect to database {META_DATABASE_ENDPOINT}"
        )
        time.sleep(3)

db = SQLAlchemy(app)
db.init_app(app)
#db.create_all()
CORS(app)

runtime_client = runtime_client_getter(DEFAULT_RUNTIME)
Esempio n. 50
0
def _test_db_fixture_implementation(test_db_uri: str):
    engine = create_engine(test_db_uri)
    create_database(engine.url)
    yield
    drop_database(engine.url)
Esempio n. 51
0
def create_or_verify_database(url,
                              galaxy_config_file,
                              engine_options={},
                              app=None,
                              map_install_models=False):
    """
    Check that the database is use-able, possibly creating it if empty (this is
    the only time we automatically create tables, otherwise we force the
    user to do it using the management script so they can create backups).

    1) Empty database --> initialize with latest version and return
    2) Database older than migration support --> fail and require manual update
    3) Database at state where migrate support introduced --> add version control information but make no changes (might still require manual update)
    4) Database versioned but out of date --> fail with informative message, user must run "sh manage_db.sh upgrade"
    """
    # Create the base database if it doesn't yet exist.
    new_database = not database_exists(url)
    if new_database:
        template = app and getattr(app.config, "database_template", None)
        encoding = app and getattr(app.config, "database_encoding", None)
        create_kwds = {}

        message = "Creating database for URI [%s]" % url
        if template:
            message += " from template [%s]" % template
            create_kwds["template"] = template
        if encoding:
            message += " with encoding [%s]" % encoding
            create_kwds["encoding"] = encoding
        log.info(message)
        create_database(url, **create_kwds)

    # Create engine and metadata
    engine = create_engine(url, **engine_options)

    def migrate():
        try:
            # Declare the database to be under a repository's version control
            db_schema = schema.ControlledSchema.create(engine,
                                                       migrate_repository)
        except Exception:
            # The database is already under version control
            db_schema = schema.ControlledSchema(engine, migrate_repository)
        # Apply all scripts to get to current version
        migrate_to_current_version(engine, db_schema)

    def migrate_from_scratch():
        log.info("Creating new database from scratch, skipping migrations")
        current_version = migrate_repository.version().version
        mapping.init(file_path='/tmp',
                     url=url,
                     map_install_models=map_install_models,
                     create_tables=True)
        schema.ControlledSchema.create(engine,
                                       migrate_repository,
                                       version=current_version)
        db_schema = schema.ControlledSchema(engine, migrate_repository)
        assert db_schema.version == current_version
        migrate()
        if app:
            # skips the tool migration process.
            app.new_installation = True

    meta = MetaData(bind=engine)
    if new_database:
        migrate_from_scratch()
        return
    elif app and getattr(app.config, 'database_auto_migrate', False):
        migrate()
        return

    # Try to load dataset table
    try:
        Table("dataset", meta, autoload=True)
    except NoSuchTableError:
        # No 'dataset' table means a completely uninitialized database.
        log.info("No database, initializing")
        migrate_from_scratch()
        return
    try:
        hda_table = Table("history_dataset_association", meta, autoload=True)
    except NoSuchTableError:
        raise Exception(
            "Your database is older than hg revision 1464:c7acaa1bb88f and will need to be updated manually"
        )
    # There is a 'history_dataset_association' table, so we (hopefully) have
    # version 1 of the database, but without the migrate_version table. This
    # happens if the user has a build from right before migration was added.
    # Verify that this is true, if it is any older they'll have to update
    # manually
    if 'copied_from_history_dataset_association_id' not in hda_table.c:
        # The 'copied_from_history_dataset_association_id' column was added in
        # rev 1464:c7acaa1bb88f.  This is the oldest revision we currently do
        # automated versioning for, so stop here
        raise Exception(
            "Your database is older than hg revision 1464:c7acaa1bb88f and will need to be updated manually"
        )
    # At revision 1464:c7acaa1bb88f or greater (database version 1), make sure
    # that the db has version information. This is the trickiest case -- we
    # have a database but no version control, and are assuming it is a certain
    # version. If the user has postion version 1 changes this could cause
    # problems
    try:
        Table("migrate_version", meta, autoload=True)
    except NoSuchTableError:
        # The database exists but is not yet under migrate version control, so init with version 1
        log.info("Adding version control to existing database")
        try:
            Table("metadata_file", meta, autoload=True)
            schema.ControlledSchema.create(engine,
                                           migrate_repository,
                                           version=2)
        except NoSuchTableError:
            schema.ControlledSchema.create(engine,
                                           migrate_repository,
                                           version=1)
    # Verify that the code and the DB are in sync
    db_schema = schema.ControlledSchema(engine, migrate_repository)
    if migrate_repository.versions.latest != db_schema.version:
        config_arg = ''
        if galaxy_config_file and os.path.abspath(
                os.path.join(os.getcwd(), 'config',
                             'galaxy.ini')) != galaxy_config_file:
            config_arg = ' -c %s' % galaxy_config_file.replace(
                os.path.abspath(os.getcwd()), '.')
        expect_msg = "Your database has version '%d' but this code expects version '%d'" % (
            db_schema.version, migrate_repository.versions.latest)
        instructions = ""
        if db_schema.version > migrate_repository.versions.latest:
            instructions = "To downgrade the database schema you have to checkout the Galaxy version that you were running previously. "
            cmd_msg = "sh manage_db.sh%s downgrade %d" % (
                config_arg, migrate_repository.versions.latest)
        else:
            cmd_msg = "sh manage_db.sh%s upgrade" % config_arg
        backup_msg = "Please backup your database and then migrate the database schema by running '%s'." % cmd_msg
        raise Exception("%s. %s%s" % (expect_msg, instructions, backup_msg))
    else:
        log.info("At database version %d" % db_schema.version)
from flask import Flask
from flask_bcrypt import Bcrypt
from flask_sqlalchemy import SQLAlchemy

from sqlalchemy import create_engine
from sqlalchemy_utils import database_exists, create_database

from kazoo.client import KazooClient
from kazoo.exceptions import NodeExistsError, ConnectionLossException

import json

db_engine = create_engine(constant.DATABASE_BASE_URL + constant.DATABASE_NAME)
if not database_exists(db_engine.url):
    create_database(db_engine.url)


def registerUserManagementService(host, port):
    try:
        zk = KazooClient(hosts='zookeeper', read_only=True)
        zk.start()
        path = '/user_management'
        data = json.dumps({'host': host, 'port': port}).encode('utf-8')

        zk.create(path, value=data, ephemeral=True, makepath=True)
        print('user_management service is running on ' + path + ':' +
              str(port))
    except NodeExistsError:
        print('Node already exists in zookeeper')
    except ConnectionLossException:
Esempio n. 53
0
def createdb():
    if not database_exists(URI):
        create_database(URI)
    print("資料庫已經存在")
    db.create_all()
    return redirect(url_for("main.index"))
Esempio n. 54
0
app.static_path = path.join(path.abspath(__file__), 'static')

app.config['SQLALCHEMY_DATABASE_URI'] = DB.SQLALCHEMY_DATABASE_URI
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = DB.SQLALCHEMY_TRACK_MODIFICATIONS
app.config['SECRET_KEY'] = DB.SECRET_KEY

Bootstrap(app)

db.init_app(app)
make_searchable(db.metadata)



with app.test_request_context():
    if not database_exists(DB.SQLALCHEMY_DATABASE_URI):
        create_database(DB.SQLALCHEMY_DATABASE_URI)
    from auth.models import User, WorkingTimeRecord, LeaveApplication
    from invoices.models import Products, Customers, Invoices, Basket, Quantities, Suppliers, Orders
    sa.orm.configure_mappers()
    db.create_all()
    db.session.commit()
    if not User.query.filter_by(username='******').first():
        init_admin()


from home.views import home_blueprint
from auth.views import auth_blueprint
from invoices.views import invoices_blueprint
from stock.views import stock_blueprint

Esempio n. 55
0
 def create_database(self):
     """
     create database method
     """
     create_database(self.engine.url)
     logging.debug("Database created!")
Esempio n. 56
0
    # Initialize Flask app
    app = Flask(__name__)

    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
    app.config['SQLALCHEMY_DATABASE_URI'] = DB_URL

    with app.app_context():
        db.init_app(app)

        if reset_db:
            if database_exists(DB_URL):
                print('Deleting database.')
                drop_database(DB_URL)
            if not database_exists(DB_URL):
                print('Creating database.')
                create_database(DB_URL)
            db.create_all()
            db.session.commit()
        else:
            db.create_all()
            db.session.commit()







        user_data = [('*****@*****.**', 'Jack Smith', sha256_crypt.hash('password1')), ('*****@*****.**', 'Jane Doe', sha256_crypt.hash('password2')), \
           ('*****@*****.**', 'Bubble Owner', sha256_crypt.hash('bubbles_pass')), ('*****@*****.**', 'Ron LaFlamme', sha256_crypt.hash('moneymoneymoney')), \
           ('*****@*****.**', 'Sarah Example', sha256_crypt.hash('imastudent')), ('*****@*****.**', 'Coney Owner', sha256_crypt.hash('coneyconey1234'))]
Esempio n. 57
0
def create(uri):
    log.debug(f'Creating database at {uri}')
    create_database(uri)
Esempio n. 58
0
 def recreate_pg(self):
     if database_exists(self.conn):
         drop_database(self.conn)
     create_database(self.conn)
def db_engine(base_app):
    test_db_engine = create_engine(base_app.config['SQLALCHEMY_DATABASE_URI'])
    if not database_exists(test_db_engine.url):
        create_database(test_db_engine.url)
    yield test_db_engine
    drop_database(test_db_engine.url)
Esempio n. 60
0
 def start_session(self):
     create_if_missing = self._source.create_if_missing
     is_database_missing = lambda: not database_exists(self._source.url)
     if create_if_missing and is_database_missing():
         create_database(self._source.url)
     self._session = self._SessionClass()