예제 #1
0
def get_alembic_config():
    config = alembic_config.Config(
        os.path.join(os.path.dirname(__file__), 'alembic.ini'))
    config.set_main_option('script_location',
                           'storyboard.db.migration:alembic_migrations')
    return config
예제 #2
0
파일: base.py 프로젝트: verenceLola/mrm_api
class BaseTestCase(TestCase):
    alembic_configuration = config.Config("./alembic.ini")

    def create_app(self):
        app = create_app('testing')
        self.base_url = 'https://127.0.0.1:5000/mrm'
        self.headers = {'content-type': 'application/json'}
        self.client = Client(schema)
        return app

    @patch('api.room.models.verify_calendar_id')
    def setUp(self, mock_verify_calendar_id):
        app = self.create_app()
        self.app_test = app.test_client()
        with app.app_context():
            Base.metadata.create_all(bind=engine)

            command.stamp(self.alembic_configuration, 'head')
            command.downgrade(self.alembic_configuration, '-1')
            command.upgrade(self.alembic_configuration, 'head')

            admin_user = User(email="*****@*****.**",
                              name="Peter Walugembe",
                              picture="https://www.andela.com/walugembe")
            admin_user.location = "Kampala"
            admin_user.save()
            lagos_admin = User(email="*****@*****.**",
                               location="Lagos",
                               name="Peter Adeoye",
                               picture="https://www.andela.com/adeoye")
            lagos_admin.save()
            global role
            role = Role(role="Admin")
            role.save()
            role_2 = Role(role="Test")
            role_2.save()
            role_3 = Role(role="Super Admin")
            role_3.save()
            admin_user.roles.append(role)
            lagos_admin.roles.append(role)
            tag = Tag(name='Block-B',
                      color='green',
                      description='The description')
            tag.save()

            location = Location(name='Kampala', abbreviation='KLA')
            location.save()
            location_two = Location(name='Nairobi', abbreviation='NBO')
            location_two.save()
            location_three = Location(name='Lagos', abbreviation='LOS')
            location_three.save()
            tag_two = Tag(name='Block-C',
                          color='blue',
                          description='The description')
            tag_two.save()
            room = Room(
                name='Entebbe',
                room_type='meeting',
                capacity=6,
                location_id=location.id,
                structure_id='851ae8b3-48dd-46b5-89bc-ca3f8111ad87',
                calendar_id=
                '*****@*****.**',  # noqa: E501
                image_url=
                "https://www.officelovin.com/wp-content/uploads/2016/10/andela-office-main-1.jpg",  # noqa: E501
                room_labels=["1st Floor", "Wing A"])
            room.save()
            room.room_tags.append(tag)
            room_2 = Room(
                name='Tana',
                room_type='meeting',
                capacity=14,
                location_id=location.id,
                structure_id='851ae8b3-48dd-46b5-89bc-ca3f8111ad87',
                calendar_id=
                '*****@*****.**',  # noqa: E501
                image_url=
                "https://www.officelovin.com/wp-content/uploads/2016/10/andela-office-main-1.jpg",  # noqa: E501
                room_labels=["1st Floor", "Wing B"])
            room_2.save()
            room_2.room_tags.append(tag)
            resource = Resource(name='Markers', quantity=3)
            resource.save()
            device = Devices(last_seen="2018-06-08T11:17:58.785136",
                             date_added="2018-06-08T11:17:58.785136",
                             name="Samsung",
                             location="Kampala",
                             device_type="External Display",
                             room_id=1,
                             state="active")
            device.save()
            question_1 = Question(
                question_type="rate",
                question_title="Rating Feedback",
                question="How will you rate the brightness of the room",
                start_date="20 Nov 2018",
                end_date="28 Nov 2018",
                is_active=True)
            question_1.save()
            question_2 = Question(
                question_type="check",
                question_title="check Feedback",
                question="Is there anything missing in the room",
                check_options=['apple tv', 'whiteboard', 'maker pen'],
                start_date="20 Nov 2018",
                end_date="30 Nov 2018",
                is_active=True)
            event = Events(event_id="test_id5",
                           room_id=1,
                           event_title="Onboarding",
                           start_time="2018-07-11T09:00:00Z",
                           end_time="2018-07-11T09:45:00Z",
                           number_of_participants=4,
                           checked_in=False,
                           cancelled=False)
            event.save()
            question_2.save()
            question_3 = Question(question_type="input",
                                  question_title="input Feedback",
                                  question="Any other suggestion",
                                  start_date="20 Nov 2018",
                                  end_date="28 Nov 2018")
            question_3.save()
            question_4 = Question(question_type="check",
                                  question_title="Missing item",
                                  question="Anything missing in the room?",
                                  check_options=['duster'],
                                  start_date="20 Nov 2018",
                                  end_date="30 Nov 2018",
                                  is_active=True)
            question_4.save()
            response_1 = Response(
                question_id=1,
                room_id=1,
                question_type="rate",
                created_date=datetime.now(),
                response="1",
                resolved=False,
            )
            response_1.save()

            response_2 = Response(
                question_id=question_2.id,
                room_id=room.id,
                question_type="check",
                created_date=datetime.now(),
                response=['marker pen', 'apple tv'],
                resolved=True,
            )
            response_2.save()
            response_2.missing_resources.append(resource)

            response_3 = Response(question_id=question_4.id,
                                  room_id=room_2.id,
                                  question_type="check",
                                  created_date=datetime.now(),
                                  response=['duster'],
                                  resolved=True,
                                  state="archived")
            response_3.save()

            structure = Structure(
                structure_id='b05fc5f2-b4aa-4f48-a8fb-30bdcc3fc968',
                level=1,
                name='Epic tower',
                parent_id="1",
                parent_title="parent_title",
                tag='Building',
                location_id=1,
                position=1,
            )
            structure.save()
            parent_node = OfficeStructure(
                id='C56A4180-65AA-42EC-A945-5FD21DEC0518',
                name='Epic Tower',
                tag='Lagos Building',
                location_id=1)
            parent_node.save()
            child_node = OfficeStructure(
                id='C56A4180-65AA-42EC-A945-5FD21DEC0519',
                name='Gold Coast',
                tag='First Floor',
                parent_id='C56A4180-65AA-42EC-A945-5FD21DEC0518',
                location_id=1)
            child_node.save()
            db_session.commit()
            f = open('mrm.err.log', 'a+')
            f.write('[2019-08-06 13:22:32 +0000] [1574] [ERROR] Error /logs\r')
            f.write('Traceback (most recent call last):\r')
            f.write('if pattern.search(line):\r')

    def get_admin_location_id(self):
        payload = jwt.decode(ADMIN_TOKEN, verify=False)
        email = payload['UserInfo']['email']
        user = User.query.filter_by(email=email).first()
        location = Location.query.filter_by(name=user.location).first()
        return location.id

    def tearDown(self):
        app = self.create_app()
        with app.app_context():
            command.stamp(self.alembic_configuration, 'base')
            db_session.remove()
            Base.metadata.drop_all(bind=engine)
예제 #3
0
def get_alembic_config():
    config = alembic_config.Config(
        os.path.join(os.path.dirname(__file__), 'alembic.ini'))
    config.set_main_option('script_location',
                           'midonet.neutron.db.migration:alembic_migration')
    return config
예제 #4
0
def initialize():
    """ Initialize this module, preparing all of the variables. """

    # region General
    global base_dir

    base_dir = os.environ.get('BASE_DIR', None)

    # endregion

    # region Database
    global database_url, Session

    # Get the database url saved in the environment variable
    database_url = os.environ.get('DATABASE_URL', None)

    # Get the database pool recycle
    pool_recycle = os.environ.get('DB_POOl_RECYCLE', 280)

    if database_url is None:
        print('Warning: Unable to find database url!')
        exit(1)

    engine = create_engine(database_url,
                           encoding='utf-8',
                           pool_recycle=pool_recycle,
                           pool_pre_ping=True)
    Session = sessionmaker(bind=engine)

    MIGRATIONS_DIR = os.path.join(base_dir, 'migrations')

    config = aleconf.Config(file_=os.path.join(MIGRATIONS_DIR, 'alembic.ini'))
    config.set_main_option('script_location', MIGRATIONS_DIR)
    config.set_main_option('sqlalchemy.url', database_url)

    # Create tables if they don't exist
    if not os.path.isdir(MIGRATIONS_DIR):
        alecomm.init(config, MIGRATIONS_DIR)

        env_file = open('%s/env.py' % MIGRATIONS_DIR, 'r+')
        text = env_file.read()
        text = text.replace(
            'target_metadata=target_metadata',
            'target_metadata=target_metadata, compare_type=True')
        text = text.replace(
            'target_metadata = None',
            'import models\ntarget_metadata = models.Base.metadata')
        env_file.seek(0)
        env_file.write(text)
        env_file.close()

    # Makes sure the database is up to date
    alecomm.upgrade(config, 'head')

    # Check for changes in the database
    mc = alemig.MigrationContext.configure(engine.connect())
    diff_list = aleauto.compare_metadata(mc, models.Base.metadata)

    # Update the database
    if diff_list:
        alecomm.revision(config, None, autogenerate=True)
        alecomm.upgrade(config, 'head')

    # endregion

    # region Data Gathering
    global selected_epg, channels_url, shows_url, max_channels_request, show_sessions_validity_days, max_number_retries

    # Get the selected EPG
    selected_epg = os.environ.get('EPG', None)

    # Get urls for the selected EPG
    channels_url = os.environ.get('CHANNELS_URL', None)
    shows_url = os.environ.get('SHOWS_URL', None)

    # Max channels per request
    max_channels_request = os.environ.get('MAX_CHANNELS_REQUEST', '90')

    # Number of days after the date in which the sessions are still kept
    show_sessions_validity_days = os.environ.get('SHOW_SESSIONS_VALIDITY_DAYS',
                                                 7)

    # Number of days after the date in which the sessions are still kept
    max_number_retries = os.environ.get('MAX_NUMBER_RETRIES', 5)

    # endregion

    # region Data Gathering from file
    global same_session_minutes

    # Get the number of minutes to be used for searching for changes in a session
    same_session_minutes = os.environ.get('SAME_SESSION_MINUTES', None)

    if same_session_minutes is None:
        same_session_minutes = 30
    else:
        same_session_minutes = int(same_session_minutes)

    # endregion

    # region Shows Information Services
    global trakt_key, cache_validity_days, omdb_key, tmdb_key, tmdb_max_mb_pages

    # Get the api key for trakt
    trakt_key = os.environ.get('TRAKT_KEY', None)

    if trakt_key is None:
        print('Warning: Unable to find trakt key!')

    # Cache validity, in days
    cache_validity_days = os.environ.get('CACHE_VALIDITY', None)

    if cache_validity_days is None:
        print('Warning: Unable to find CACHE_VALIDITY key!')

        # Set 1 days as the default value
        cache_validity_days = 1

    # Get the api key for omdb
    omdb_key = os.environ.get('OMDB_KEY', None)

    if omdb_key is None:
        print('Warning: Unable to find omdb key!')

    # Get the api key for omdb
    tmdb_key = os.environ.get('TMDB_KEY', None)

    if tmdb_key is None:
        print('Warning: Unable to find tmdb key!')
        exit(1)

    # Get the number of pages that should be retrieved from TMDB
    tmdb_max_mb_pages = os.environ.get('TMDB_MAX_NB_PAGES', None)

    if tmdb_max_mb_pages is None:
        print('Warning: Unable to find tmdb max number of pages!')

        # Set 2 pages as the default value
        tmdb_max_mb_pages = 2

    # endregion

    # region Information Security
    global bcrypt_rounds, secret_key, REFRESH_TOKEN_VALIDITY_DAYS, ACCESS_TOKEN_VALIDITY_HOURS, \
        VERIFICATION_TOKEN_VALIDITY_DAYS, DELETION_TOKEN_VALIDITY_DAYS, CHANGE_EMAIL_TOKEN_VALIDITY_DAYS, \
        PASSWORD_RECOVERY_TOKEN_VALIDITY_DAYS

    # Get the configuration for the number of rounds used in the bcrypt
    bcrypt_rounds = os.environ.get('BCRYPT_ROUNDS', None)

    if bcrypt_rounds is None:
        bcrypt_rounds = 10

    # Get the secret key used to generate tokens
    secret_key = os.environ.get('SECRET_KEY', None)

    if secret_key is None:
        print('Warning: Unable to find secret key!')
        exit(1)

    # Validity of the different types of token
    REFRESH_TOKEN_VALIDITY_DAYS = 365
    ACCESS_TOKEN_VALIDITY_HOURS = 1
    VERIFICATION_TOKEN_VALIDITY_DAYS = 2
    DELETION_TOKEN_VALIDITY_DAYS = 2
    CHANGE_EMAIL_TOKEN_VALIDITY_DAYS = 2
    PASSWORD_RECOVERY_TOKEN_VALIDITY_DAYS = 2

    # endregion

    # region External Login

    # Get the client id for Google
    global google_client_id

    google_client_id = os.environ.get('GOOGLE_CLIENT_ID', None)

    if google_client_id is None:
        print('Warning: Unable to find google client id!')
        exit(1)

    # endregion

    # region Email
    global email_domain, email_account, email_user, email_password

    email_domain = os.environ.get('EMAIL_DOMAIN', None)
    email_account = os.environ.get('EMAIL_ACCOUNT', None)
    email_user = os.environ.get('EMAIL_USER', None)
    email_password = os.environ.get('EMAIL_PASSWORD', None)

    # endregion

    # region Highlights
    global score_highlight_counter, new_highlight_counter, minimum_number_votes

    score_highlight_counter = os.environ.get('SCORE_HIGHLIGHT_COUNTER', 5)

    new_highlight_counter = os.environ.get('NEW_HIGHLIGHT_COUNTER', 50)

    minimum_number_votes = os.environ.get('MINIMUM_NUMBER_VOTES', 20)

    # endregion

    # region Application
    global application_name, application_link, AVAILABLE_LANGUAGES

    application_name = os.environ.get('APPLICATION_NAME', None)
    application_link = os.environ.get('APPLICATION_LINK', None)

    AVAILABLE_LANGUAGES = [item.value for item in AvailableLanguage]
예제 #5
0
 def test_get_root_versions_dir(self):
     config = alembic_config.Config()
     config.set_main_option('script_location', 'a.b.c:d')
     versions_dir = cli._get_root_versions_dir(config)
     self.assertEqual('/fake/dir/a/a/b/c/d/versions', versions_dir)
예제 #6
0
class BaseWalkMigrationTestCase(object):

    ALEMBIC_CONFIG = alembic_config.Config(
        os.path.join(os.path.dirname(murano.db.migration.__file__),
                     'alembic.ini'))

    ALEMBIC_CONFIG.murano_config = CONF

    def _configure(self, engine):
        """Repo and database configuration

        For each type of repository we should do some configuration steps.
        For migrate_repo we should set our database under version control.
        For alembic we should configure database settings. For this goal we
        should use oslo.config and openstack.commom.db.sqlalchemy.session with
        database functionality (reset default settings and session cleanup).
        """
        CONF.set_override('connection', str(engine.url), group='database')

    def _alembic_command(self, alembic_command, engine, *args, **kwargs):
        """Redefines alembic command data return setting

        Most of alembic command return data into output.
        We should redefine this setting for getting info.
        """
        self.ALEMBIC_CONFIG.stdout = buf = io.StringIO()
        CONF.set_override('connection', str(engine.url), group='database')
        getattr(command, alembic_command)(*args, **kwargs)
        res = buf.getvalue().strip()
        LOG.debug('Alembic command `{command}` returns: {result}'.format(
            command=alembic_command, result=res))
        return res

    def _up_and_down_versions(self):
        """Store tuple of versions for successful testing

        Since alembic version has a random algorithm of generation
        (SA-migrate has an ordered autoincrement naming) we should store
        a tuple of versions (version for upgrade and version for downgrade)
        for successful testing of migrations in up>down>up mode.
        """

        env = alembic_script.ScriptDirectory.from_config(self.ALEMBIC_CONFIG)
        versions = []
        for rev in env.walk_revisions():
            versions.append((rev.revision, rev.down_revision or '-1'))

        versions.reverse()
        return versions

    def walk_versions(self, engine=None, snake_walk=False, downgrade=True):
        # Determine latest version script from the repo, then
        # upgrade from 1 through to the latest, with no data
        # in the databases. This just checks that the schema itself
        # upgrades successfully.

        self._configure(engine)
        up_and_down_versions = self._up_and_down_versions()
        for ver_up, ver_down in up_and_down_versions:
            # upgrade -> downgrade -> upgrade
            self._migrate_up(engine, ver_up, with_data=True)
            if snake_walk:
                downgraded = self._migrate_down(engine,
                                                ver_down,
                                                with_data=True,
                                                next_version=ver_up)
                if downgraded:
                    self._migrate_up(engine, ver_up)

        if downgrade:
            # Now walk it back down to 0 from the latest, testing
            # the downgrade paths.
            up_and_down_versions.reverse()
            for ver_up, ver_down in up_and_down_versions:
                # downgrade -> upgrade -> downgrade
                downgraded = self._migrate_down(engine,
                                                ver_down,
                                                next_version=ver_up)

                if snake_walk and downgraded:
                    self._migrate_up(engine, ver_up)
                    self._migrate_down(engine, ver_down, next_version=ver_up)

    def _get_version_from_db(self, engine):
        """Fetches latest version of migrate repo from database

        For each type of migrate repo, the latest version from db
        will be returned.
        """
        conn = engine.connect()
        try:
            context = migration.MigrationContext.configure(conn)
            version = context.get_current_revision() or '-1'
        finally:
            conn.close()
        return version

    def _migrate(self, engine, version, cmd):
        """Base method for manipulation with migrate repo

        It will upgrade or downgrade the actual database.
        """

        self._alembic_command(cmd, engine, self.ALEMBIC_CONFIG, version)

    def _migrate_down(self,
                      engine,
                      version,
                      with_data=False,
                      next_version=None):
        try:
            self._migrate(engine, version, 'downgrade')
        except NotImplementedError:
            # NOTE(sirp): some migrations, namely release-level
            # migrations, don't support a downgrade.
            return False
        self.assertEqual(version, self._get_version_from_db(engine))

        # NOTE(sirp): `version` is what we're downgrading to (i.e. the 'target'
        # version). So if we have any downgrade checks, they need to be run for
        # the previous (higher numbered) migration.
        if with_data:
            post_downgrade = getattr(self, "_post_downgrade_%s" % next_version,
                                     None)
            if post_downgrade:
                post_downgrade(engine)

        return True

    def _migrate_up(self, engine, version, with_data=False):
        """Migrate up to a new version of the db.

        We allow for data insertion and post checks at every
        migration version with special _pre_upgrade_### and
        _check_### functions in the main test.
        """
        # NOTE(sdague): try block is here because it's impossible to debug
        # where a failed data migration happens otherwise
        check_version = version
        try:
            if with_data:
                data = None
                pre_upgrade = getattr(self, "_pre_upgrade_%s" % check_version,
                                      None)
                if pre_upgrade:
                    data = pre_upgrade(engine)
            self._migrate(engine, version, 'upgrade')
            self.assertEqual(version, self._get_version_from_db(engine))
            if with_data:
                check = getattr(self, "_check_%s" % check_version, None)
                if check:
                    check(engine, data)
        except Exception:
            LOG.error(
                "Failed to migrate to version {ver} on engine {eng}".format(
                    ver=version, eng=engine))
            raise
예제 #7
0
    def test_config_set_section_option_percent(self):
        cfg = config.Config()
        cfg.set_section_option("some_section", "foob", "a %% percent")

        eq_(cfg.get_section_option("some_section", "foob"), "a % percent")
예제 #8
0
 def _get_alembic_config(self):
     cfg = config.Config("%s/sqlalchemy/alembic/alembic.ini" %
                         os.path.dirname(__file__))
     cfg.set_main_option('sqlalchemy.url',
                         self.conf.database.connection.replace("%", "%%"))
     return cfg
예제 #9
0
파일: base.py 프로젝트: dnuwa/mrm_api
class BaseTestCase(TestCase):
    alembic_configuration = config.Config("./alembic.ini")

    def create_app(self):
        app = create_app('testing')
        self.base_url = 'https://127.0.0.1:5000/mrm'
        self.headers = {'content-type': 'application/json'}
        self.client = Client(schema)
        return app

    def setUp(self):
        app = self.create_app()
        self.app_test = app.test_client()
        with app.app_context():
            Base.metadata.create_all(bind=engine)

            command.stamp(self.alembic_configuration, 'head')
            command.downgrade(self.alembic_configuration, '-1')
            command.upgrade(self.alembic_configuration, 'head')

            admin_user = User(email="*****@*****.**",
                              location="Kampala",
                              name="Peter Walugembe",
                              picture="https://www.andela.com/walugembe")
            admin_user.save()
            lagos_admin = User(email="*****@*****.**",
                               location="Lagos",
                               name="Peter Adeoye",
                               picture="https://www.andela.com/adeoye")
            lagos_admin.save()
            global role
            role = Role(role="Admin")
            role.save()
            admin_user.roles.append(role)
            lagos_admin.roles.append(role)
            tag = Tag(name='Block-B',
                      color='green',
                      description='The description')
            tag.save()
            root_node = OfficeStructure(name='location', tag_id=1)
            root_node.save()
            leaf_node = OfficeStructure(name='wings', parent_id=1)
            leaf_node.save()

            location = Location(name='Kampala',
                                abbreviation='KLA',
                                structure_id=1)
            location.save()
            location_two = Location(name='Nairobi',
                                    abbreviation='NBO',
                                    structure_id=1)
            location_two.save()
            location_three = Location(name='Lagos',
                                      abbreviation='LOS',
                                      structure_id=1)
            location_three.save()
            tag_two = Tag(name='Block-C',
                          color='blue',
                          description='The description')
            tag_two.save()
            room = Room(
                name='Entebbe',
                room_type='meeting',
                capacity=6,
                location_id=location.id,
                calendar_id=
                '*****@*****.**',  # noqa: E501
                image_url=
                "https://www.officelovin.com/wp-content/uploads/2016/10/andela-office-main-1.jpg",  # noqa: E501
                room_labels=["1st Floor", "Wing A"])
            room.save()
            room.room_tags.append(tag)
            resource = Resource(name='Markers', quantity=3)
            resource.save()
            device = Devices(last_seen="2018-06-08T11:17:58.785136",
                             date_added="2018-06-08T11:17:58.785136",
                             name="Samsung",
                             location="Nairobi",
                             device_type="External Display")
            device.save()
            question_1 = Question(
                question_type="rate",
                question_title="Rating Feedback",
                question="How will you rate the brightness of the room",
                start_date="20 Nov 2018",
                end_date="28 Nov 2018",
                is_active=True)
            question_1.save()
            question_2 = Question(
                question_type="check",
                question_title="check Feedback",
                question="Is there anything missing in the room",
                start_date="20 Nov 2018",
                end_date="30 Nov 2018",
                is_active=True)
            event = Events(event_id="test_id5",
                           room_id=1,
                           event_title="Onboarding",
                           start_time="2018-07-11T09:00:00Z",
                           end_time="2018-07-11T09:45:00Z",
                           number_of_participants=4,
                           checked_in=False,
                           cancelled=False)
            event.save()
            question_2.save()
            question_3 = Question(question_type="input",
                                  question_title="input Feedback",
                                  question="Any other suggestion",
                                  start_date="20 Nov 2018",
                                  end_date="28 Nov 2018")
            question_3.save()
            response_1 = Response(
                question_id=1,
                room_id=1,
                rate=2,
                created_date=datetime.now(),
                resolved=False,
            )
            response_1.save()
            response_2 = Response(
                question_id=question_2.id,
                room_id=room.id,
                check=True,
                created_date=datetime.now(),
                resolved=True,
            )
            response_2.save()
            response_2.missing_resources.append(resource)
            structure = Structure(
                structure_id='b05fc5f2-b4aa-4f48-a8fb-30bdcc3fc968',
                level=1,
                name='Epic tower',
                parent_id="1",
                parent_title="parent_title",
                tag='Building',
                location_id=1,
                position=1,
            )
            structure.save()
            db_session.commit()

    def get_admin_location_id(self):
        payload = jwt.decode(ADMIN_TOKEN, verify=False)
        email = payload['UserInfo']['email']
        user = User.query.filter_by(email=email).first()
        location = Location.query.filter_by(name=user.location).first()
        return location.id

    def tearDown(self):
        app = self.create_app()
        with app.app_context():
            command.stamp(self.alembic_configuration, 'base')
            db_session.remove()
            Base.metadata.drop_all(bind=engine)
예제 #10
0
    def test_config_no_file_main_option(self):
        cfg = config.Config()
        cfg.set_main_option("url", "postgresql://foo/bar")

        eq_(cfg.get_main_option("url"), "postgresql://foo/bar")
예제 #11
0
 def __init__(self, databaseUrl):
     self.ctx = config.Config()
     self.ctx.set_main_option(
         "script_location",
         path.join(path.dirname(path.realpath(__file__)), 'updates'))
     self.ctx.set_main_option("sqlalchemy.url", databaseUrl)
예제 #12
0
파일: __init__.py 프로젝트: tsipa/fuel-web
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.

import os

from alembic import command as alembic_command
from alembic import config as alembic_config
from alembic import util as alembic_util

from nailgun.db.sqlalchemy import db_str

ALEMBIC_CONFIG = alembic_config.Config(
    os.path.join(os.path.dirname(__file__), 'alembic.ini'))
ALEMBIC_CONFIG.set_main_option('script_location',
                               'nailgun.db.migration:alembic_migrations')
ALEMBIC_CONFIG.set_main_option('sqlalchemy.url', db_str)


def do_alembic_command(cmd, *args, **kwargs):
    try:
        getattr(alembic_command, cmd)(ALEMBIC_CONFIG, *args, **kwargs)
    except alembic_util.CommandError as e:
        alembic_util.err(str(e))


def do_stamp(cmd):
    do_alembic_command(cmd,
                       ALEMBIC_CONFIG.params.revision,
예제 #13
0
def get_alembic_config():
    config = alembic_config.Config(
        os.path.join(os.path.dirname(__file__), 'alembic.ini'))
    config.set_main_option('script_location', get_script_location(CONF))
    _set_version_locations(config)
    return config
예제 #14
0
ini_file_path = os.path.join(os.path.dirname(__file__), '..', 'alembic.ini')

# This indicates whether we are running with a viable Alembic
# context (necessary to skip run_migrations_online() below
# if sphinx imports this file without a viable Alembic
# context)
have_context = True

try:
    config = context.config
    # Only load Monasca configuration if imported by alembic CLI tool (the
    # monasca_db command will handle this on its own).
    if os.path.basename(sys.argv[0]) == 'alembic':
        monasca_api.config.parse_args(argv=[])
except AttributeError:
    config = alembic_config.Config(ini_file_path)
    have_context = False

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# Model metadata. This is needed for 'autogenerate' support. If you add new
# tables, you will need to add them to the get_all_metadata() method as well.
target_metadata = models.get_all_metadata()

nc = {
    "ix": "ix_%(column_0_label)s",
    "uq": "uq_%(table_name)s_%(column_0_name)s",
    "fk": "fk_%(table_name)s_%(column_0_name)s",
    "pk": "pk_%(table_name)s"
예제 #15
0
 def __init__(self):
     self.ALEMBIC_CONFIG = alembic_config.Config(
         os.path.join(os.path.dirname(sahara.db.migration.__file__),
                      'alembic.ini')
     )
     self.ALEMBIC_CONFIG.sahara_config = CONF
예제 #16
0
def get_alembic_config():
    config = alembic_config.Config(
        os.path.join(os.path.dirname(__file__), 'alembic.ini'))
    return config
예제 #17
0
 def upgrade(schema: str) -> None:
     cfg = alembic_config.Config(alembic_ini, ini_section=schema)
     cfg.attributes['connection'] = connection  # pylint: disable=unsupported-assignment-operation
     command.upgrade(cfg, 'head')
예제 #18
0
 def test_get_project_base(self):
     config = alembic_config.Config()
     config.set_main_option('script_location', 'a.b.c:d')
     proj_base = cli._get_project_base(config)
     self.assertEqual('a', proj_base)
예제 #19
0
 def config(self):
     config = alembic_config.Config(
         os.path.join(os.path.dirname(migration.__file__), 'alembic.ini')
     )
     config.connection = self.connection
     return config
예제 #20
0
def _get_alembic_config():
    base_path = os.path.split(os.path.dirname(__file__))[0]
    return alembic_config.Config(os.path.join(base_path, 'alembic.ini'))
예제 #21
0
class BaseWalkMigrationMixin(object):

    ALEMBIC_CONFIG = alembic_config.Config(
        os.path.join(os.path.dirname(db.schema.__file__), "alembic.ini"))

    ALEMBIC_CONFIG.rally_config = CONF

    def _configure(self, engine):
        """Configure database connection.

        For each type of repository we should do some of configure steps.
        For migrate_repo we should set under version control our database.
        For alembic we should configure database settings. For this goal we
        should use oslo.config and openstack.commom.db.sqlalchemy.session with
        database functionality (reset default settings and session cleanup).
        """
        CONF.set_override("connection", str(engine.url), group="database")

    def _alembic_command(self, alembic_command, engine, *args, **kwargs):
        """Call alembic command.

        Most of alembic command return data into output.
        We should redefine this setting for getting info.
        """
        self.ALEMBIC_CONFIG.stdout = buf = io.StringIO()
        CONF.set_override("connection", str(engine.url), group="database")
        getattr(command, alembic_command)(*args, **kwargs)
        res = buf.getvalue().strip()
        LOG.debug("Alembic command `{command}` returns: {result}".format(
            command=alembic_command, result=res))
        return res

    def _up_and_down_versions(self):
        """Get revisions versions.

        Since alembic version has a random algorithm of generation
        (SA-migrate has an ordered autoincrement naming) we should store
        a tuple of versions (version for upgrade and version for downgrade)
        for successful testing of migrations.
        """

        env = alembic_script.ScriptDirectory.from_config(self.ALEMBIC_CONFIG)
        versions = []
        for rev in env.walk_revisions():
            if rev.revision == db.schema.INITIAL_REVISION_UUID:
                # NOTE(rpromyshlennikov): we skip initial migration here
                continue
            versions.append((rev.revision, rev.down_revision or "-1"))

        versions.reverse()
        return versions

    def walk_versions(self, engine=None):
        """Walk through versions.

        Determine latest version script from the repo, then
        upgrade from 1 through to the latest, with no data
        in the databases. This just checks that the schema itself
        upgrades successfully.
        """

        self._configure(engine)
        # NOTE(ikhudoshyn): Now DB contains certain schema
        # so we can not execute all migrations starting from
        # init. So we cleanup the DB.
        db.schema.schema_cleanup()
        up_and_down_versions = self._up_and_down_versions()
        for ver_up, ver_down in up_and_down_versions:
            self._migrate_up(engine, ver_up, with_data=True)

    def _get_version_from_db(self, engine):
        """Return latest version for each type of migrate repo from db."""
        conn = engine.connect()
        try:
            context = migration.MigrationContext.configure(conn)
            version = context.get_current_revision() or "-1"
        finally:
            conn.close()
        return version

    def _migrate(self, engine, version, cmd):
        """Base method for manipulation with migrate repo.

        It will upgrade the actual database.
        """

        self._alembic_command(cmd, engine, self.ALEMBIC_CONFIG, version)

    def _migrate_up(self, engine, version, with_data=False):
        """Migrate up to a new version of the db.

        We allow for data insertion and post checks at every
        migration version with special _pre_upgrade_### and
        _check_### functions in the main test.
        """
        # NOTE(sdague): try block is here because it's impossible to debug
        # where a failed data migration happens otherwise
        check_version = version
        try:
            if with_data:
                data = None
                pre_upgrade = getattr(self, "_pre_upgrade_%s" % check_version,
                                      None)
                if pre_upgrade:
                    data = pre_upgrade(engine)
            self._migrate(engine, version, "upgrade")
            self.assertEqual(version, self._get_version_from_db(engine))
            if with_data:
                check = getattr(self, "_check_%s" % check_version, None)
                if check:
                    check(engine, data)
        except Exception:
            LOG.error(
                "Failed to migrate to version %(ver)s on engine %(eng)s" % {
                    "ver": version,
                    "eng": engine
                })
            raise
예제 #22
0
def init_config(d):
    conf = config.Config(os.path.join(d, 'alembic.ini'))
    conf.set_main_option('script_location', d)
    conf.config_file_name = os.path.join(d, 'alembic.ini')
    return conf
예제 #23
0
def update_database(args):
    alembic_config = config.Config(args.configuration, ini_section='app:main')
    alembic_config.set_section_option('app:main', 'script_location',
                                      'ess:migrations')
    alembic_config.set_section_option('app:main', 'url', 'hm')
    command.upgrade(alembic_config, DB_VERSION)
예제 #24
0
class BaseWalkMigrationTestCase(object):

    ALEMBIC_CONFIG = alembic_config.Config(
        os.path.join(os.path.dirname(sahara.db.migration.__file__),
                     'alembic.ini'))

    ALEMBIC_CONFIG.sahara_config = CONF

    def _configure(self, engine):
        """For each type of repository we should do some of configure steps.

        For migrate_repo we should set under version control our database.
        For alembic we should configure database settings. For this goal we
        should use oslo_config and openstack.commom.db.sqlalchemy.session with
        database functionality (reset default settings and session cleanup).
        """
        CONF.set_override('connection',
                          str(engine.url),
                          group='database',
                          enforce_type=True)
        sa.cleanup()

    def _alembic_command(self, alembic_command, engine, *args, **kwargs):
        """Most of alembic command return data into output.

        We should redefine this setting for getting info.
        """
        self.ALEMBIC_CONFIG.stdout = buf = io.StringIO()
        CONF.set_override('connection',
                          str(engine.url),
                          group='database',
                          enforce_type=True)
        sa.cleanup()
        getattr(command, alembic_command)(*args, **kwargs)
        res = buf.getvalue().strip()
        LOG.debug('Alembic command {command} returns: {result}'.format(
            command=alembic_command, result=res))
        sa.cleanup()
        return res

    def _get_versions(self):
        """Stores a list of versions.

        Since alembic version has a random algorithm of generation
        (SA-migrate has an ordered autoincrement naming) we should store
        a list of versions (version for upgrade)
        for successful testing of migrations in up mode.
        """

        env = alembic_script.ScriptDirectory.from_config(self.ALEMBIC_CONFIG)
        versions = []
        for rev in env.walk_revisions():
            versions.append(rev.revision)

        versions.reverse()
        return versions

    def walk_versions(self, engine=None):
        # Determine latest version script from the repo, then
        # upgrade from 1 through to the latest, with no data
        # in the databases. This just checks that the schema itself
        # upgrades successfully.

        self._configure(engine)
        versions = self._get_versions()
        for ver in versions:
            self._migrate_up(engine, ver, with_data=True)

    def _get_version_from_db(self, engine):
        """Returns latest version from db for each type of migrate repo."""

        conn = engine.connect()
        try:
            context = migration.MigrationContext.configure(conn)
            version = context.get_current_revision() or '-1'
        finally:
            conn.close()
        return version

    def _migrate(self, engine, version, cmd):
        """Base method for manipulation with migrate repo.

        It will upgrade or downgrade the actual database.
        """

        self._alembic_command(cmd, engine, self.ALEMBIC_CONFIG, version)

    def _migrate_up(self, engine, version, with_data=False):
        """migrate up to a new version of the db.

        We allow for data insertion and post checks at every
        migration version with special _pre_upgrade_### and
        _check_### functions in the main test.
        """
        # NOTE(sdague): try block is here because it's impossible to debug
        # where a failed data migration happens otherwise
        check_version = version
        try:
            if with_data:
                data = None
                pre_upgrade = getattr(self, "_pre_upgrade_%s" % check_version,
                                      None)
                if pre_upgrade:
                    data = pre_upgrade(engine)
            self._migrate(engine, version, 'upgrade')
            self.assertEqual(version, self._get_version_from_db(engine))
            if with_data:
                check = getattr(self, "_check_%s" % check_version, None)
                if check:
                    check(engine, data)
        except Exception:
            LOG.error("Failed to migrate to version {version} on engine "
                      "{engine}".format(version=version, engine=engine))
            raise
예제 #25
0
def downgrade_database(args):
    alembic_config = config.Config(args.configuration, ini_section='app:main')
    alembic_config.set_section_option('app:main', 'script_location',
                                      'ess:migrations')
    alembic_config.set_section_option('app:main', 'url', 'hm')
    command.downgrade(alembic_config, '-1')
예제 #26
0
def _alembic_config():
    path = os.path.join(os.path.dirname(__file__), 'alembic.ini')
    config = alembic_config.Config(path)
    return config
예제 #27
0
from art17.app import create_app
from art17 import models


TEST_CONFIG = {
    'SERVER_NAME': 'localhost',
    'SECRET_KEY': 'test',
    'ASSETS_DEBUG': True,
    'EEA_LDAP_SERVER': 'test_ldap_server',
    'EEA_PASSWORD_RESET': '',
}


alembic_cfg_path = path(__file__).dirname() / '..' / 'alembic.ini'
alembic_cfg = config.Config(alembic_cfg_path.abspath())


def create_generic_fixtures():
    models.db.drop_all()
    models.db.create_all()
    models.db.session.execute(
        "insert into roles (name, description) "
        "values ('admin', 'Administrator')"
    )
    models.db.session.execute(
        "insert into roles (name, description) "
        "values ('etc', 'European topic center')"
    )
    models.db.session.execute(
        "insert into roles (name, description) "