Esempio n. 1
0
def create_db():
    """
    This will create the database, table, and schema.

    return -- The database engine connection from sqlalchemy
    """

    if database_exists(DB_URI):
        engine = create_engine(DB_URI)
        conn = engine.connect()
        for schema in DB_SCHEMAS:
            if not conn.dialect.has_schema(conn, schema=schema):
                engine.execute(CreateSchema(schema))
        conn.close()

        return engine

    else:
        create_database(DB_URI)
        engine = create_engine(DB_URI)
        conn = engine.connect()
        for schema in DB_SCHEMAS:
            if not conn.dialect.has_schema(conn, schema=schema):
                engine.execute(CreateSchema(schema))
        conn.close()

        return engine
Esempio n. 2
0
def mssql_setup_and_teardown(request):
    # create database
    with catch_warnings():
        simplefilter("ignore", category=SAWarning)
        master_engine = engine_from(request.config)
        with master_engine.connect() as connection:
            connection.execution_options(isolation_level="AUTOCOMMIT")
            connection.execute(f'CREATE DATABASE {TEST_DB_NAME}')
    # create schema and tables
    engine = engine_from(request.config, database=TEST_DB_NAME)
    with engine.connect() as connection:
        connection.execute(CreateSchema('store'))
        connection.execute(CreateSchema('report'))
        Base.metadata.create_all(engine)
        with database_cwd():
            # create index and procedure
            create_index = open('index.sql', 'r', encoding='utf-8').read()
            connection.execute(text(create_index))
            create_procedure = open('procedure.sql', 'r',
                                    encoding='utf-8').read()
            connection.execute(text(create_procedure))
            # add extended properties with ahjo
            ahjo.update_db_object_properties(engine, ['store', 'report'])
    engine.dispose()
    yield
    # drop database
    with master_engine.connect() as connection:
        connection.execution_options(isolation_level="AUTOCOMMIT")
        result = connection.execute(
            'SELECT session_id FROM sys.dm_exec_sessions WHERE database_id = DB_ID(?)',
            (TEST_DB_NAME, ))
        for row in result.fetchall():
            connection.execute(f'KILL {row.session_id}')
        connection.execute(f'DROP DATABASE {TEST_DB_NAME}')
Esempio n. 3
0
def attach_schema(obj, schema):
    if schema is not None:
        ddl = CreateSchema(schema, quote=True)
        event.listen(
            obj, 'before_create',
            ddl.execute_if(callable_=should_create_schema,
                           dialect='postgresql'))
    return obj
Esempio n. 4
0
def attach_schema(obj, schema):
    if schema is not None:
        ddl = CreateSchema(schema, quote=True)
        event.listen(obj,
                     'before_create',
                     ddl.execute_if(callable_=should_create_schema,
                                    dialect='postgresql'))
    return obj
Esempio n. 5
0
def setup_schema(engine, schema):
    with engine.connect() as conn:
        has_schema = conn.execute(
            text(
                f"SELECT schema_name FROM information_schema.schemata WHERE schema_name = '{schema}';"
            ))

        if not has_schema.scalar():
            conn.execute(CreateSchema(schema))

        conn.execute(DropSchema(schema, None, True))
        conn.execute(CreateSchema(schema))
        pretty_print(f"Created Schema {schema}", True)
Esempio n. 6
0
    def init_store(self):
        """
        Runs on server with super user privileges
        """
        if self.schema:
            # User
            try:
                self._execute("CREATE USER %s WITH PASSWORD '%s'" % (self.schema,
                    self.password))
            except Exception, e:
                self.session.rollback()
                if "already exists" in repr(e):
                    logger.info("Could not create user '%s', already exists." % self.schema)
                    logger.info("Update password anyway for user '%s'" % self.schema)
                    self._execute("ALTER USER %s WITH PASSWORD '%s'" % (self.schema, self.password))
                else:
                    logger.exception("Could not create user '%s'." % self.schema)

            # Schema
            try:
                self.engine.execute(CreateSchema(self.schema))
                logger.info("Schema created")
            except Exception, e:
                self.session.rollback()
                if "already exists" in repr(e):
                    logger.info("Could not create schema '%s', already exists." % self.schema)
                else:
                    logger.exception("Could not create schema '%s'." % self.schema)
Esempio n. 7
0
    def __init__(self, dbconn, table_def=None, DbSchema_overide=None):
        assert isinstance(dbconn, DbConn)
        if table_def is None:
            self.table = CoinTbl
        else:
            self.table = table_def

        if DbSchema_overide is not None:
            # pprint.pprint((self.table.__dict__))
            self.table.DbSchema = DbSchema_overide
            self.table.__table_args__['schema'] = self.table.DbSchema

        self.engine = create_engine(dbconn.get_conn_url())
        try:
            self.engine.execute(CreateSchema(self.table.DbSchema))
            logging.debug("Creating Database Schema: {}".format(
                self.table.DbSchema))
        except:
            # logging.debug("Schema Already Exists No need to create:")
            pass

        # create tables
        assert isinstance(MetaBase,
                          sqlalchemy.ext.declarative.api.DeclarativeMeta)
        MetaBase.metadata.create_all(bind=self.engine)

        # create session
        Session = sessionmaker()
        Session.configure(bind=self.engine)
        self.session = Session()

        # reflecting whole schema
        self.metadata = MetaData()
        self.metadata.reflect(bind=self.engine)
Esempio n. 8
0
    def __init__(
        self,
        db,
        store,
        read=True,
        write=True,
        read_through_write=True,
        delete=True,
        create_db=False,
        schema=None,
        create_schema=True,
        persistent_connections=True,
    ):
        upgrade_db = False
        super(PostgresRepo,
              self).__init__(read=read,
                             write=write,
                             read_through_write=read_through_write,
                             delete=delete)

        if not isinstance(db, str) and schema is not None:
            raise ValueError('You can only provide a schema with a DB url.')

        init_db = False
        if create_db and isinstance(db, str):
            _create_db_if_needed(db)
            init_db = True
            upgrade_db = False

        self._run = None
        if isinstance(db, str):
            if create_db:
                init_db = True

            self._db_engine = _db_engine(db, schema, persistent_connections)
            self._sessionmaker = sqlalchemy.orm.sessionmaker(
                bind=self._db_engine)
        else:
            self._session = db

        if create_schema and schema is not None:
            with self.session() as session:
                q = sa.exists(
                    sa.select([sa.text('schema_name')]).select_from(
                        sa.text('information_schema.schemata')).where(
                            sa.text('schema_name = :schema').bindparams(
                                schema=schema)))
                if not session.query(q).scalar():
                    session.execute(CreateSchema(schema))
                    session.commit()
                    init_db = True
                    upgrade_db = False

        if init_db:
            self._db_init()

        if upgrade_db:
            self._db_upgrade()

        self.blobstore = store
Esempio n. 9
0
def temporary_testing_schema(engine_with_types):
    schema = TEST_SCHEMA
    with engine_with_types.begin() as conn:
        conn.execute(CreateSchema(schema))
    yield engine_with_types, schema
    with engine_with_types.begin() as conn:
        conn.execute(DropSchema(schema, cascade=True, if_exists=True))
Esempio n. 10
0
def create_schema_with_engine(engine, schema_name):
    """
  Creates a schema via the ENGINE rather than cursor.
  This is NOT dialect-dependent, it uses sqlalchemy and an engine
  First checks if the schema_name is None: if so, it skips everything and returns True
  Then checks if schema exists already: if it does, it won't try to recreate
  If it does not already exist, it attempts to create it
  Returns True if the schema exists at the end
  Returns False if errors encountered
  """
    result = False
    try:
        if (schema_name == None):
            print("Skipping schema creation, as schema_name set to {}".format(
                schema_name))
            result = True
            return (result)

        schema_exists = check_if_schema_exists_with_engine(engine, schema_name)
        if (schema_exists == True):
            print(
                "Schema name {} already exists, do not need to create".format(
                    schema_name))
            result = True
        else:
            print("Attempting to create schema: {}".format(schema_name))
            engine.execute(CreateSchema(schema_name))
            schema_exists = check_if_schema_exists_with_engine(
                engine, schema_name)
            result = schema_exists
        return (result)
    except:
        traceback.print_exc()
        return (result)
Esempio n. 11
0
def postgres_test_db():
    """
    Fixture to yield a PostgreSQL database to be used in the unit tests.
    The database will only be created once per test session, but the
    etl_records table will be cleared for each test.
    """
    postgres_test_db = testing.postgresql.Postgresql()
    engine = create_engine(postgres_test_db.url())

    from sqlalchemy.schema import CreateSchema
    from sqlalchemy_utils import database_exists, create_database

    if not database_exists(engine.url):
        create_database(engine.url)
    engine.execute(CreateSchema("etl"))
    Base.metadata.create_all(bind=engine, tables=[ETLRecord.__table__])
    engine.execute("""
        CREATE TABLE IF NOT EXISTS mds_raw_data_dump (
            imei TEXT,
            msisdn TEXT,
            event_time TIMESTAMPTZ,
            cell_id TEXT
        );
        """)

    yield postgres_test_db
    postgres_test_db.stop()
Esempio n. 12
0
def app(base_app):
    """Flask application fixture.

    Scope: function

    This fixture offers a Flask application with already a database connection
    and all the models created. When finished it will delete all models.

    .. code-block:: python

        def create_ninja_turtle()
            with app.test_client() as client:
                somedata = 'ninja turtle'
                res = client.post(url_for('api.create_object'),
                                  content_type='application/json',
                                  data=json.dumps(somedata))

                assert res.status_code == 200

    """
    from reana_db.database import Session
    from reana_db.models import Base, Resource

    engine = create_engine(base_app.config["SQLALCHEMY_DATABASE_URI"])
    base_app.session.bind = engine
    with base_app.app_context():
        if not engine.dialect.has_schema(engine, "__reana"):
            engine.execute(CreateSchema("__reana"))
        if not database_exists(engine.url):
            create_database(engine.url)
        Base.metadata.create_all(bind=engine)
        Resource.initialise_default_resources()
        yield base_app
        Session.close()  # close hanging connections
        Base.metadata.drop_all(bind=engine)
Esempio n. 13
0
def create():
    if not session.query(
            exists(
                select([("schema_name")
                        ]).select_from("information_schema.schemata").where(
                            "schema_name = SCHEMA"))).scalar():
        engine.execute(CreateSchema(SCHEMA))

    if not session.query(
            exists(
                select([("schema_name")
                        ]).select_from("information_schema.schemata").where(
                            "schema_name = 'raw'"))).scalar():
        engine.execute(CreateSchema('raw'))

    Base.metadata.create_all(engine)
Esempio n. 14
0
def main():
    from vdv2pg.parser import Parser

    args = get_argument_parser(__doc__).parse_args()
    engine, metadata = configure(args)
    parser = Parser(metadata, args.schema)

    conn = engine.connect()
    with conn.begin():
        if not args.append:
            try:
                conn.execute(CreateSchema(args.schema))
            except ProgrammingError as e:
                logger.warning("Could not create schema '%s': %s", args.schema,
                               e)
                exit(1)
        for filename in args.input_file:
            try:
                parser.parse(conn, filename)
            except Exception:
                logger.exception("Failed to ingest %s", filename)
                exit(1)

        if args.post_ingest_script:
            try:
                execute_sql_script(conn, args.post_ingest_script, args.schema)
            except Exception:
                logger.exception("Failed to execute %s",
                                 args.post_ingest_script)
                exit(1)
Esempio n. 15
0
async def create_dataset(
    *,
    dataset: str = Depends(dataset_dependency),
    request: DatasetCreateIn,
    is_authorized: bool = Depends(is_admin),
    response: Response,
) -> DatasetResponse:
    """Create or update a dataset."""

    input_data: Dict = request.dict(exclude_none=True, by_alias=True)

    try:
        new_dataset: ORMDataset = await datasets.create_dataset(
            dataset, **input_data)
    except RecordAlreadyExistsError as e:
        raise HTTPException(status_code=400, detail=str(e))

    await db.status(CreateSchema(dataset))
    await db.status(f"GRANT USAGE ON SCHEMA {dataset} TO {READER_USERNAME};")
    await db.status(
        f"ALTER DEFAULT PRIVILEGES IN SCHEMA {dataset} GRANT SELECT ON TABLES TO {READER_USERNAME};"
    )
    response.headers["Location"] = f"/{dataset}"

    return await _dataset_response(dataset, new_dataset)
Esempio n. 16
0
def init_database(engine):
    """Initializes a the database."""
    if not database_exists(str(config.SQLALCHEMY_DATABASE_URI)):
        create_database(str(config.SQLALCHEMY_DATABASE_URI))

    schema_name = "dispatch_core"
    if not engine.dialect.has_schema(engine, schema_name):
        with engine.connect() as connection:
            connection.execute(CreateSchema(schema_name))

    tables = get_core_tables()

    Base.metadata.create_all(engine, tables=tables)

    version_schema(script_location=config.ALEMBIC_CORE_REVISION_PATH)
    setup_fulltext_search(engine, tables)

    # setup an required database functions
    session = sessionmaker(bind=engine)
    db_session = session()

    # default organization
    organization = Organization(
        name="default",
        slug="default",
        default=True,
        description="Default dispatch organization.",
    )

    db_session.add(organization)
    db_session.commit()

    init_schema(engine=engine, organization=organization)
Esempio n. 17
0
def create_schema(schema, engine):
    """
    This method creates a Postgres schema.
    """
    if schema not in get_all_schemas(engine):
        with engine.begin() as connection:
            connection.execute(CreateSchema(schema))
Esempio n. 18
0
    def run(self):
        engine = create_engine(
            'postgresql://%s:%s@%s/%s' %
            (self.user, self.password, self.host, self.database))

        try:
            engine.execute(CreateSchema(parameters().exp_name))
        except:  # should catch psycopg2.ProgrammingError, but doesnt work
            pass

        pandas_files = [
            pd.read_table(self.input()[name].path,
                          skiprows=2,
                          index_col=0,
                          names=[
                              'Gene', 'Chr', 'Start', 'End', 'Strand',
                              'Length', name
                          ],
                          usecols=['Gene', name],
                          header=None) for name in self.input()
        ]
        count_table = pd.concat(pandas_files, axis=1).sort_index(axis=1)
        count_table.to_csv("%s/%s.csv" % (parameters().exp_dir, self.table))
        count_table.to_sql(self.table,
                           con=engine,
                           schema=parameters().exp_name)

        # Taken from luigi source code, makes marker table and adds entry
        self.output().create_marker_table()
        connection = self.output().connect()
        self.output().touch(connection)
        connection.commit()
        connection.close()
Esempio n. 19
0
    def __init__(self, import_base, revision_date, has_sql_context=False, force_fetch=True):
        self.amplicon_code_names = {}  # mapping from dirname to amplicon ontology
        self._engine = make_engine()
        self._create_extensions()
        self._session = sessionmaker(bind=self._engine)()
        self._import_base = import_base
        self._methodology = 'v1'
        self._analysis_url = ''
        self._revision_date = revision_date
        self._has_sql_context = has_sql_context
        self._force_fetch = force_fetch

        # these are used exclusively for reporting back to CSIRO on the state of the ingest
        self.sample_metadata_incomplete = set()
        self.sample_non_integer = set()
        self.sample_not_in_metadata = set()

        self.otu_invalid = set()

        try:
            self._session.execute(DropSchema(SCHEMA, cascade=True))
        except sqlalchemy.exc.ProgrammingError:
            self._session.invalidate()
        self._session.execute(CreateSchema(SCHEMA))
        self._session.commit()
        Base.metadata.create_all(self._engine)
        self.ontology_init()
    def test_handle_events_no_triggers(self, app, db):
        with create_pgevents(app) as pg:

            class Widget(db.Model):
                __tablename__ = "widget"
                id = db.Column(db.Integer, primary_key=True)

            db.session.execute(CreateSchema("private"))
            db.session.commit()

            class Gadget(db.Model):
                __tablename__ = "gadget"
                __table_args__ = {"schema": "private"}
                id = db.Column(db.Integer, primary_key=True)

            create_all(db)

            widget_callback_called = 0

            def widget_callback(event_id, row_id, identifier):
                nonlocal widget_callback_called
                widget_callback_called += 1

            db.session.add(Gadget())
            db.session.commit()

            pg.handle_events()

            assert widget_callback_called == 0
Esempio n. 21
0
def create_schema(engine, schema):
    sql = select([('schema_name')]).\
          select_from('information_schema.schemata').\
          where("schema_name = '%s'" % schema)
    q = engine.execute(sql)
    if not q.fetchone():
        engine.execute(CreateSchema(schema))
Esempio n. 22
0
def create_schema(schema):
    """
    This method creates a Postgres schema corresponding to the application.
    """
    if not schema_exists(schema):
        with engine.begin() as connection:
            connection.execute(CreateSchema(schema))
    def test_listen_mixed(self, app, db):
        class Widget(db.Model):
            __tablename__ = "widget"
            id = db.Column(db.Integer, primary_key=True)

        db.session.execute(CreateSchema("private"))
        db.session.commit()

        class Gadget(db.Model):
            __tablename__ = "gadget"
            __table_args__ = {"schema": "private"}
            id = db.Column(db.Integer, primary_key=True)

        create_all(db)

        def widget_callback(row_id, identifier):
            pass

        def gadget_callback(row_id, identifier):
            pass

        with create_pgevents() as pg:
            pg.listen(Widget, {"insert"}, widget_callback)

            assert "public.widget" in pg._triggers
            assert len(pg._triggers["public.widget"]) == 1

            widget_trigger = pg._triggers["public.widget"][0]
            assert not widget_trigger.installed
            assert widget_trigger.target == Widget
            assert widget_trigger.events == {"insert"}
            assert widget_trigger.callback == widget_callback

            with create_connection(db, raw=True) as conn:
                trigger_installed_ = trigger_installed(conn, "widget")
                assert not trigger_installed_

            pg.init_app(app)

            assert widget_trigger.installed

            with create_connection(db, raw=True) as conn:
                trigger_installed_ = trigger_installed(conn, "widget")
                assert trigger_installed_

            pg.listen(Gadget, {"delete"}, gadget_callback)

            assert "private.gadget" in pg._triggers
            assert len(pg._triggers["private.gadget"]) == 1

            trigger = pg._triggers["private.gadget"][0]
            assert trigger.installed
            assert trigger.target == Gadget
            assert trigger.events == {"delete"}
            assert trigger.callback == gadget_callback

            with create_connection(db, raw=True) as conn:
                trigger_installed_ = trigger_installed(conn, "gadget", schema="private")
                assert trigger_installed_
Esempio n. 24
0
    def test_listens_for_mixed(self, app, db):
        with create_pgevents() as pg:

            class Widget(db.Model):
                __tablename__ = 'widget'
                id = db.Column(db.Integer, primary_key=True)

            db.session.execute(CreateSchema('private'))
            db.session.commit()

            class Gadget(db.Model):
                __tablename__ = 'gadget'
                __table_args__ = {'schema': 'private'}
                id = db.Column(db.Integer, primary_key=True)

            create_all(db)

            @pg.listens_for(Widget, {'insert'})
            def widget_callback(row_id, identifier):
                pass

            assert ('public.widget' in pg._triggers)
            assert (len(pg._triggers['public.widget']) == 1)
            trigger = pg._triggers['public.widget'][0]
            assert (trigger.installed == False)
            assert (trigger.target == Widget)
            assert (trigger.events == {'insert'})
            assert (trigger.callback == widget_callback)

            with create_connection(db, raw=True) as conn:
                trigger_installed_ = trigger_installed(conn, 'widget')
                assert (trigger_installed_ == False)

            pg.init_app(app)

            assert (trigger.installed == True)

            with create_connection(db, raw=True) as conn:
                trigger_installed_ = trigger_installed(conn, 'widget')
                assert (trigger_installed_ == True)

            @pg.listens_for(Gadget, {'delete'})
            def gadget_callback(row_id, identifier):
                pass

            assert ('private.gadget' in pg._triggers)
            assert (len(pg._triggers['private.gadget']) == 1)

            trigger = pg._triggers['private.gadget'][0]
            assert (trigger.installed == True)
            assert (trigger.target == Gadget)
            assert (trigger.events == {'delete'})
            assert (trigger.callback == gadget_callback)

            with create_connection(db, raw=True) as conn:
                trigger_installed_ = trigger_installed(conn,
                                                       'gadget',
                                                       schema='private')
                assert (trigger_installed_ == True)
    def _setup_schemas(self):
        """
        Create new schema, identify previous and drop older ones.
        It also verifies if all the data from the previous schema has been
        processed.
        """
        # Schema name for current file
        self.last_modification_date = datetime.utcfromtimestamp(
            os.stat(self.input_refids_filename).st_mtime)
        self.schema_name = self.schema_prefix + self.last_modification_date.strftime(
            "%Y%m%d_%H%M%S")

        # Create schema if needed
        existing_schema_names = Inspector.from_engine(
            self.engine).get_schema_names()
        existing_schema_names = filter(
            lambda x: x.startswith(self.schema_prefix), existing_schema_names)
        if self.schema_name not in existing_schema_names:
            self.connection.execute(CreateSchema(self.schema_name))
            filtered_existing_schema_names = existing_schema_names
        else:
            filtered_existing_schema_names = filter(
                lambda x: x != self.schema_name, existing_schema_names)

        # Determine previous schema name if any
        if len(filtered_existing_schema_names) > 0:
            filtered_existing_schema_names.sort(reverse=True)
            self.previous_schema_name = filtered_existing_schema_names[0]

            # Verify the data that is going to be imported is newer than the data already imported
            schema_date_fingerprint = int(
                self.schema_name.replace(self.schema_prefix,
                                         "").replace("_", ""))
            previous_schema_date_fingerprint = int(
                self.previous_schema_name.replace(self.schema_prefix,
                                                  "").replace("_", ""))
            if previous_schema_date_fingerprint >= schema_date_fingerprint:
                raise Exception(
                    "The data to be imported has a date fingerprint '{0}' equal or older than the data already in the DB '{1}'"
                    .format(self.schema_name, self.previous_schema_name))

            # Verify if all the data from the previous schema has been processed.
            self._reconstruct_previous_expanded_raw_data()
            missing = self._find_not_processed_records_from_previous_run()
            if missing:
                missing_str = ",\n".join([
                    "citing: '{}', content: '{}'".format(m[0], m[1])
                    for m in missing
                ])
                #self.logger.error("Some previous records were not processed ({} in total) and will be re-processed: {}".format(len(missing), missing_str))
                self.logger.error(
                    "Some previous records were not processed ({} in total) and will be re-processed"
                    .format(len(missing)))

            # Drop old schemas (just keep last 3)
            if len(filtered_existing_schema_names) > 2:
                for old_schema_name in filtered_existing_schema_names[2:]:
                    drop_schema = "drop schema {0} cascade;"
                    self._execute_sql(drop_schema, old_schema_name)
def createSchema(name, bind=None):
    try:
        engine = sqla.get_engine(app, bind)
        engine.execute(CreateSchema(name))
    except sqlalchemy.exc.ProgrammingError as e:
        # schema probably already exists... do nothing
        # but just in case it's another error, print it
        print("Error creating schema {}, ignoring: {}".format(name, e))
Esempio n. 27
0
 def create_schema_if_not_exists(self):
     """
     Creates the schema defined in given instance of
     PandasSpecialEngine if it does not exist.
     """
     if (self.schema is not None and
         not self.engine.dialect.has_schema(self.engine, self.schema)):
         self.engine.execute(CreateSchema(self.schema))
Esempio n. 28
0
def CreateSchemaByName(engine,schema_name,call_back=None):
    """创建schema,schema_name:架构名,call_back:回调函数"""
    obj = CreateSchema(schema_name)
#     print(link)
#     engine = create_engine(link,encoding = 'utf-8')
    engine.execute(obj)
    if call_back:
        call_back()
Esempio n. 29
0
def create_schemas(engine, schemas):
    # Create schemas
    for schema in schemas:
        try:
            engine.execute(CreateSchema(schema))
        except ProgrammingError as error:
            if not 'schema "%s" already exists' % schema in str(error):
                raise
Esempio n. 30
0
    def __init__(self, db, table_def):
        # type: (dbutils.conn, str, str) -> object
        """

        :rtype: 
        """
        self.host = db.host
        self.dbschema = 'logging'
        self.database = db.dbname

        self.engine = None  # instance
        #assert isinstance(db, db_utils.DB)

        key = str(table_def.DbSchema + table_def.__tablename__)

        self.table = self.table_dict.get(key, None)
        if self.table is None:
            self.table_dict[key] = table_def
            self.table = self.table_dict[key]

        # call class method to make sure url attribute is set
        if self.engine_dict.get('only1', None) is None:

            #db.connect_SqlAlchemy()
            sql_alchemy_uri_connected = db.sql_alchemy_uri.format(
                userid=db.userid,
                pwd=db.pwd,
                host=db.host,
                port=db.port,
                db=db.dbname)
            self.engine_dict['only1'] = sqlalchemy.create_engine(
                sql_alchemy_uri_connected)
            self.engine = self.engine_dict['only1']

            try:
                self.engine.execute(CreateSchema(self.table.DbSchema))
                logging.debug("Creating Database Schema: {}".format(
                    self.table.DbSchema))
            except:
                # logging.debug("Schema Already Exists No need to create:")
                pass

            # create tables
            ""
            MetaBase.metadata.create_all(bind=self.engine)
        else:

            self.engine = self.engine_dict['only1']
        # create session

        Session = sqlalchemy.orm.sessionmaker()
        Session.configure(bind=self.engine)
        self.session = Session()

        # reflecting whole schema
        self.metadata = MetaData()
        self.metadata.reflect(bind=self.engine)
Esempio n. 31
0
def init_db():
    import dqm.models

    # create database schema
    # see http://stackoverflow.com/questions/13677781/getting-sqlalchemy-to-issue-create-schema-on-create-all
    event.listen(Base.metadata, 'before_create', CreateSchema('dqm'))

    # create database tables
    db.create_all()