Пример #1
0
def test_client(request):

    # Setup
    api_prefix = '/api/'
    _urls = {
        'user': api_prefix + 'user',
        'product': api_prefix + 'product'
    }

    request.cls.resources_path = resources_path
    request.cls.urls = _urls
    os.environ['DB_SCHEMA'] = 'test_expense'
    os.environ['DB_HOST'] = 'postgres'

    app = create_app()
    testing_client = app.test_client()

    yield testing_client

    # Teardown
    engine = get_db_engine()
    try:
        with engine.begin():
            engine.execute(DropSchema('test_expense', cascade=True))
    except Exception:
        print("Test Schema Deleted")
Пример #2
0
def drop_schema(schema_name):
    '''
    drop schemas according to configuration file
    @param udl2_conf: The configuration dictionary for
    '''
    with get_udl_connection() as conn:
        conn.execute(DropSchema(schema_name, cascade=True))
Пример #3
0
async def delete_dataset(
        *,
        dataset: str = Depends(dataset_dependency),
        is_authorized: bool = Depends(is_admin),
) -> DatasetResponse:
    """Delete a dataset.

    By the time users are allowed to delete datasets, there should be no
    versions and assets left. So only thing beside deleting the dataset
    row is to drop the schema in the database.
    """

    version_rows: List[ORMVersion] = await versions.get_versions(dataset)
    if len(version_rows):
        raise HTTPException(
            status_code=409,
            detail="There are versions registered with the dataset."
            "Delete all related versions prior to deleting a dataset",
        )

    try:
        row: ORMDataset = await datasets.delete_dataset(dataset)
    except RecordNotFoundError as e:
        raise HTTPException(status_code=404, detail=str(e))

    # Delete all dataset related entries
    await db.status(DropSchema(dataset))

    return await _dataset_response(dataset, row)
Пример #4
0
    def __init__(self, import_base, revision_date, has_sql_context=False, force_fetch=True):
        self.amplicon_code_names = {}  # mapping from dirname to amplicon ontology
        self._engine = make_engine()
        self._create_extensions()
        self._session = sessionmaker(bind=self._engine)()
        self._import_base = import_base
        self._methodology = 'v1'
        self._analysis_url = ''
        self._revision_date = revision_date
        self._has_sql_context = has_sql_context
        self._force_fetch = force_fetch

        # these are used exclusively for reporting back to CSIRO on the state of the ingest
        self.sample_metadata_incomplete = set()
        self.sample_non_integer = set()
        self.sample_not_in_metadata = set()

        self.otu_invalid = set()

        try:
            self._session.execute(DropSchema(SCHEMA, cascade=True))
        except sqlalchemy.exc.ProgrammingError:
            self._session.invalidate()
        self._session.execute(CreateSchema(SCHEMA))
        self._session.commit()
        Base.metadata.create_all(self._engine)
        self.ontology_init()
Пример #5
0
 def drop_repository(self, session, namespace):
     self.registry['engine'].execute(DropSchema(namespace, cascade=True))
     repo = session.query(Repository).filter(
         Repository.namespace == namespace).first()
     if repo:
         session.delete(repo)
     session.flush()
Пример #6
0
def temporary_testing_schema(engine_with_types):
    schema = TEST_SCHEMA
    with engine_with_types.begin() as conn:
        conn.execute(CreateSchema(schema))
    yield engine_with_types, schema
    with engine_with_types.begin() as conn:
        conn.execute(DropSchema(schema, cascade=True, if_exists=True))
Пример #7
0
def drop_db(database_uri=None):
    """ Convenience method for dropping the related tables in the database """
    drop_fnc = DropSchema(SCHEMA_NAME)
    engine = get_engine(database_uri)

    event.listen(Base.metadata, 'after_drop', drop_fnc)
    Base.metadata.drop_all(engine)
    event.remove(Base.metadata, 'after_drop', drop_fnc)
Пример #8
0
def execute():
    """Sets up a database.

    """
    # Initialize schemas.
    db_session.sa_engine.execute(DropSchema('public'))
    for schema in SCHEMAS:
        db_session.sa_engine.execute(CreateSchema(schema))

    # Initialize tables.
    METADATA.create_all(db_session.sa_engine)
Пример #9
0
def drop_schema(connector, schema_name):
    """
    Drops the entire schema

    @param connector: connection to the database
    @param schema_name: name of the schema to be dropped
    """
    if schema_exists(connector, schema_name):
        connector.set_metadata_by_reflect(schema_name)
        metadata = connector.get_metadata()
        metadata.drop_all()
        connector.execute(DropSchema(schema_name, cascade=True))
Пример #10
0
    def drop_schema(self, name):
        """
        Drop a schema.

        :param str name: name of schema.
        """
        try:
            self.engine.execute(DropSchema(name))
        except Exception as e:
            e.logger = self.logger
            e.message = "Failed to drop schema: {}".format(name)
            raise
Пример #11
0
def del_schemes(op):
    op.execute(DropSchema('gamer'))
    op.execute(DropSchema('elements'))
    op.execute(DropSchema('shop'))
    op.execute(DropSchema('donation'))
    op.execute(DropSchema('farm'))
    op.execute(DropSchema('quest'))
Пример #12
0
def connect_db(schema, database, host, user, passwd, action, port):
    db_string = 'postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{database}'.format(user=user, passwd=passwd, host=host, port=port, database=database)
    engine = create_engine(db_string)
    db_connection = engine.connect()

    if action == 'setup':
        db_connection.execute(CreateSchema(schema))
        metadata = generate_table(schema_name=schema, bind=engine)
        metadata.create_all(engine)
    elif action == 'teardown':
        metadata = generate_table(schema_name=schema, bind=engine)
        metadata.drop_all(engine)
        db_connection.execute(DropSchema(schema, cascade=True))
Пример #13
0
def setup_schema(engine, schema):
    with engine.connect() as conn:
        has_schema = conn.execute(
            text(
                f"SELECT schema_name FROM information_schema.schemata WHERE schema_name = '{schema}';"
            ))

        if not has_schema.scalar():
            conn.execute(CreateSchema(schema))

        conn.execute(DropSchema(schema, None, True))
        conn.execute(CreateSchema(schema))
        pretty_print(f"Created Schema {schema}", True)
Пример #14
0
 def __init__(self, import_base):
     self._clear_import_log()
     self._engine = make_engine()
     Session = sessionmaker(bind=self._engine)
     self._create_extensions()
     self._session = Session()
     self._import_base = import_base
     try:
         self._session.execute(DropSchema(SCHEMA, cascade=True))
     except sqlalchemy.exc.ProgrammingError:
         self._session.invalidate()
     self._session.execute(CreateSchema(SCHEMA))
     self._session.commit()
     Base.metadata.create_all(self._engine)
Пример #15
0
def delete_schema(schema, engine, cascade=False, if_exists=False):
    """
    This method deletes a Postgres schema.
    """
    if if_exists and schema not in get_all_schemas(engine):
        return

    with engine.begin() as connection:
        try:
            connection.execute(DropSchema(schema, cascade=cascade))
        except InternalError as e:
            if isinstance(e.orig, DependentObjectsStillExist):
                raise e.orig
            else:
                raise e
Пример #16
0
def _delete_data(public_tenant):
    '''
    delete all data from tables.
    '''
    with EdMigratePublicConnection(public_tenant) as public_conn:
        metadata = public_conn.get_metadata()
        try:
            public_conn.execute(DropSchema(metadata.schema, cascade=True))
            public_conn.execute(CreateSchema(metadata.schema))
            #metadata = generate_ed_metadata(schema_name=public_tenant, bind=engine)
            public_conn.execute('CREATE SEQUENCE "' + metadata.schema + '"."global_rec_seq"')
            metadata.create_all(public_conn.get_engine())
        except:
            for table_name in reversed(TABLE_LIST):
                public_table = public_conn.get_table(table_name)
                public_conn.execute(public_table.delete())
Пример #17
0
def log_out(*, db_session, org_id: int, org_code: str):

    # delete organization,dispatch_user, dispatch_user_organization,
    # delete schema for organization

    redis_conn = redis.Redis(connection_pool=redis_pool)
    all_user = authService.get_by_org_id(db_session=db_session, org_id=org_id)
    for user in all_user:
        redis_conn.delete(f"user:{user.email}")
    result = delete(db_session=db_session, org_id=org_id)
    authService.delete_by_org_id(db_session=db_session, org_id=org_id)
    schema_name = f"dispatch_organization_{org_code}"
    if engine.dialect.has_schema(engine, schema_name):
        with engine.connect() as connection:
            connection.execute(DropSchema(schema_name, cascade=True))
    db_session.close()
    return result
Пример #18
0
def test_multi_db_tables(engine, multi_db_engine, client):
    schema_name = "test_multi_db_tables_schema"
    test_tables = ["test_table_1", "test_table_2"]
    for table_name in test_tables:
        tables.create_mathesar_table(table_name, schema_name, [], engine)
        tables.create_mathesar_table("multi_db_" + table_name, schema_name, [],
                                     multi_db_engine)

    cache.clear()
    response = client.get('/api/v0/tables/')
    response_tables = [s['name'] for s in response.json()['results']]

    assert response.status_code == 200
    expected_tables = test_tables + ["multi_db_" + s for s in test_tables]
    for table_name in expected_tables:
        assert table_name in response_tables

    # We have to delete the schema to not break later tests
    with engine.begin() as conn:
        conn.execute(DropSchema(schema_name, cascade=True))
Пример #19
0
def test_multi_db_schema(engine, multi_db_engine, client):
    test_schemas = ["test_schema_1", "test_schema_2"]
    for schema in test_schemas:
        schemas.create_schema(schema, engine)
        schemas.create_schema("multi_db_" + schema, multi_db_engine)

    cache.clear()
    response = client.get('/api/v0/schemas/')
    response_data = response.json()
    response_schemas = [
        s['name'] for s in response_data['results'] if s['name'] != 'public'
    ]

    assert response.status_code == 200
    assert len(response_schemas) == 4

    expected_schemas = test_schemas + ["multi_db_" + s for s in test_schemas]
    assert set(response_schemas) == set(expected_schemas)

    # We have to delete the schemas to not break later tests
    with engine.begin() as conn:
        for schema in test_schemas:
            conn.execute(DropSchema(schema))
Пример #20
0
def clear_schema():
    cred = get_sql_credentials()
    DropSchema(cred["sql_db"])
Пример #21
0
def engine_email_type(temporary_testing_schema):
    engine, schema = temporary_testing_schema
    install.install_mathesar_on_database(engine)
    yield engine, schema
    with engine.begin() as conn:
        conn.execute(DropSchema(base.SCHEMA, cascade=True, if_exists=True))
Пример #22
0
 def tearDown(self):
     insp = reflection.Inspector.from_engine(self.eng)
     if self._schema_output in insp.get_schema_names():
         self.eng.execute(DropSchema(self._schema_output, cascade=True))
Пример #23
0
def drop_target_schema(tenant, schema_name):
    with get_target_connection(tenant) as connector:
        try:
            connector.execute(DropSchema(schema_name, cascade=True))
        except:
            pass
Пример #24
0
    report = db.Column(db.String(50),db.ForeignKey(schema+'report.report'),nullable=False)
    dataviz = db.Column(db.String(50),db.ForeignKey(schema+'dataviz.dataviz'),nullable=False)
    __table_args__ = (
        db.PrimaryKeyConstraint('report', 'dataviz'),
        tableschema
    )
    def __repr__(self):
        return '<Report_composition {}>'.format(self.report)
#/MODELS


#db.init_app(app)
try :
    schema = app.config['SCHEMA']
    event.listen(db.metadata, 'before_create', CreateSchema(schema))
    event.listen(db.metadata, 'after_drop', DropSchema(schema))

    #  Uncomment these lines to insert sample data when creating the database
    #  event.listen(db.metadata, "after_create", db.DDL(insertdb("Datainit/alimentation.sql",schema+".")))
except KeyError :
    # Uncomment these lines to insert sample data when creating the database
    '''
    event.listen(db.metadata, "after_create", db.DDL(insertdb("Datainit/dataid.sql","")))
    event.listen(db.metadata, "after_create", db.DDL(insertdb("Datainit/report.sql","")))
    event.listen(db.metadata, "after_create", db.DDL(insertdb("Datainit/dataviz.sql","")))
    event.listen(db.metadata, "after_create", db.DDL(insertdb("Datainit/report_composition.sql","")))
    event.listen(db.metadata, "after_create", db.DDL(insertdb("Datainit/rawdata.sql","")))
    '''
    print("If you want to add a schema edit config.py with SCHEMA variable")

Пример #25
0
 def drop_schema(self, schema_name):
     self.engine.execute(DropSchema(schema_name))
     self.logger.info(f'DELETE SCHEMA `{schema_name}`')
Пример #26
0
 def _drop_schema(cls, name, cascade=False):
     cls.database_engine.execute(DropSchema(name, cascade=cascade))
Пример #27
0
    def handle_noargs(self, **options):
        verbosity = 1 #int(options.get('verbosity'))
        interactive = options.get('interactive')
        show_traceback = options.get('traceback')

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError as exc:
                # This is slightly hackish. We want to ignore ImportErrors
                # if the "management" module itself is missing -- but we don't
                # want to ignore the exception if the management module exists
                # but raises an ImportError for some reason. The only way we
                # can do this is to check the text of the exception. Note that
                # we're a bit broad in how we check the text, because different
                # Python implementations may not use the same text.
                # CPython uses the text "No module named management"
                # PyPy uses "No module named myproject.myapp.management"
                msg = exc.args[0]
                if not msg.startswith('No module named') or 'management' not in msg:
                    raise

        db = options.get('database')
        orm = ORM.get(db)
        db_info = orm.settings_dict
        is_test_db = db_info.get('TEST', False)
        if not is_test_db:
            print 'Database "%s" cannot be purged because it is not a test ' \
                  'database.\nTo flag this as a test database, set TEST to ' \
                  'True in the database settings.' % db
            sys.exit()

        if interactive:
            confirm = raw_input('\nYou have requested a purge of database ' \
                '"%s" (%s). This will IRREVERSIBLY DESTROY all data ' \
                'currently in the database, and DELETE ALL TABLES AND ' \
                'SCHEMAS. Are you sure you want to do this?\n\n' \
                'Type "yes" to continue, or "no" to cancel: ' \
                % (db, orm.engine.url))
        else:
            confirm = 'yes'

        if confirm == 'yes':
            # get a list of all schemas used by the app
            default_schema = orm.engine.url.database
            app_schemas = set(orm.Base.metadata._schemas)
            app_schemas.add(default_schema)

            url = deepcopy(orm.engine.url)
            url.database = None
            engine = create_engine(url)
            inspector = inspect(engine)

            # get a list of existing schemas
            db_schemas = set(inspector.get_schema_names())

            schemas = app_schemas.intersection(db_schemas)

            app_tables = set()
            for table in orm.Base.metadata.tables.values():
                schema = table.schema or default_schema
                app_tables.add('%s.%s' % (schema, table.name))

            metadata = MetaData()
            db_tables = []
            all_fks = []

            for schema in schemas:
                for table_name in inspector.get_table_names(schema):
                    fullname = '%s.%s' % (schema, table_name)
                    if fullname not in app_tables:
                        continue
                    fks = []
                    for fk in inspector.get_foreign_keys(table_name, schema=schema):
                        if not fk['name']:
                            continue
                        fks.append(ForeignKeyConstraint((),(),name=fk['name']))
                    t = Table(table_name, metadata, *fks, schema=schema)
                    db_tables.append(t)
                    all_fks.extend(fks)

            session = Session(bind=engine)
            for fkc in all_fks:
                session.execute(DropConstraint(fkc))
            for table in db_tables:
                session.execute(DropTable(table))
            for schema in schemas:
                session.execute(DropSchema(schema))
            session.commit()
            session.bind.dispose()

        else:
            self.stdout.write("Purge cancelled.\n")
Пример #28
0
 def dropSchema(self, schema, cascade=False):
     self.dbeng.execute(DropSchema(schema.lower(), cascade=cascade))
Пример #29
0
        print("Please specify --schema option")
        exit(-1)
    __URL = DBDRIVER + "://" + __user + ":" + __passwd + "@" + __host + "/" + __database
    print("DB Driver:" + DBDRIVER)
    print("     User:"******"  Password:"******"      Host:" + __host)
    print("  Database:" + __database)
    print("    Schema:" + __schema)
    print("####################")
    engine = create_engine(__URL, echo=True)
    connection = engine.connect()

    if __action == 'setup':
        connection.execute(CreateSchema(__schema))
        if __metadata == 'edware':
            metadata = generate_ed_metadata(schema_name=__schema, bind=engine)
            connection.execute('CREATE SEQUENCE "' + __schema +
                               '"."global_rec_seq"')
        else:
            metadata = generate_stats_metadata(schema_name=__schema,
                                               bind=engine)
        metadata.create_all(engine)
    elif __action == 'teardown':
        # metadata = generate_ed_metadata(schema_name=__schema, bind=engine)
        # metadata.drop_all(engine)
        try:
            connection.execute(DropSchema(__schema, cascade=True))
        except:
            pass
Пример #30
0
 def teardown_class(cls):
     with config.db.connect() as conn:
         conn.execute(DropSchema(cls.tenant_schema_name, cascade=True))