Пример #1
0
    def prepare_database_template(self):

        # --- Check template database cache ---
        current_mhash = checksumdir.dirhash('./scripts/migrations')
        cache_connector = db_connector(self.template_cache_name)
        try:
            latest_mhash = self.read_migrations_hash(cache_connector)
            if latest_mhash == current_mhash:
                logger.info("Using template database cache.")
                # Cache is still valid, copy it to the template database
                # NOTE: In theory, 'get_migrations_hash:create_database'
                # should form one transaction. If another execution interrupts
                # us at this point, we will be lost ... 😨
                self.create_database(self.template_name,
                                     self.template_cache_name)
                return
        except psycopg2.OperationalError:
            pass  # Database does not exist
        except psycopg2.errors.UndefinedObject:
            pass  # No hash specified for historical reasons

        # --- Cache invalidation, recreating template database ---
        logger.info("Recreating template database ...")
        _perform_query(f'CREATE DATABASE {self.template_name}')
        connector = db_connector(self.template_name)
        # Apply migrations
        sp.run('./scripts/migrations/migrate.sh',
               check=True,
               env=dict(os.environ, POSTGRES_DB=self.template_name))
        # Seal template
        self.store_migrations_hash(connector, current_mhash)
        connector.execute(f'''
            UPDATE pg_database SET datistemplate = TRUE
            WHERE datname = '{self.template_name}'
        ''')
        logger.info("Template database was recreated.")

        # --- Update cache database ---
        # NOTE: In theory, this block should form one transaction. If another
        # execution interrupts us at this point, we will be lost ... 😨
        self.drop_database(self.template_cache_name)
        self.create_database(self.template_cache_name, self.template_name)
        # Copy the hash manually (not done automatically by Postgres)
        self.store_migrations_hash(cache_connector, current_mhash)
        logger.info("Template database was stored into cache.")
    def __init__(self, *args, **kwargs):

        super().__init__(*args, **kwargs)

        # Set db connection parameters using env vars
        self.db_connector = _utils.db_connector()
        self.host = self.db_connector.host
        self.database = self.db_connector.database
        self.user = self.db_connector.user
        self.password = self.db_connector.password

        # lazy properties
        self._columns = None
        self._primary_constraint_name = None
Пример #3
0
    def setup_database(self):
        """
        Provide throw-away database during the execution of the test case.

        The database instance is created from the template which was prepared
        in DatabaseTestSuite.setup_database_template().
        """
        # Generate "unique" database name
        outer_db = os.getenv('POSTGRES_DB')
        os.environ['POSTGRES_DB'] = self.str_id
        self.addCleanup(os.environ.update, POSTGRES_DB=outer_db)
        # Create database
        _perform_query(f'''
                CREATE DATABASE {os.environ['POSTGRES_DB']}
                TEMPLATE {os.environ['POSTGRES_DB_TEMPLATE']}
            ''')
        # Instantiate connector
        self.db_connector = db_connector()

        # Register cleanup
        self.addCleanup(_perform_query,
                        f'DROP DATABASE {self.db_connector.database}')
Пример #4
0
#!/usr/bin/env python3
"""
Truncate facebook posts (#213).

We want to do this because in the past, we have collected a number of old
posts that are not necessarily authored by the museum itself. To avoid wrong
is_from_museum attributions, we are going to drop all posts so that posts that
are indeed by the museum will be fetched again automatically (see #184).
"""

import logging
import os
import subprocess as sp

from _utils import db_connector, logger
CONNECTOR = db_connector()

logging.basicConfig(level=logging.INFO)

REFERENCING_TABLES = ['fb_post_comment', 'fb_post_performance']

# Are there any existing data to preserve?
if not any(
        CONNECTOR.exists(f'SELECT * FROM {table}')
        for table in REFERENCING_TABLES):
    # Nothing to preserve, get into the fast lane
    logger.info("Truncating fb_post in the fast line")
    CONNECTOR.execute('''
        TRUNCATE TABLE fb_post CASCADE
    ''')
    exit(0)
    def __init__(self, *args, **kwargs):

        super().__init__(*args, **kwargs)
        self.db_connector = _utils.db_connector()