示例#1
0
def build_topic_mute_checker(
    cursor: CursorWrapper, user_profile: UserProfile
) -> Callable[[int, str], bool]:
    """
    This function is similar to the function of the same name
    in zerver/lib/topic_mutes.py, but it works without the ORM,
    so that we can use it in migrations.
    """
    query = SQL(
        """
        SELECT
            recipient_id,
            topic_name
        FROM
            zerver_usertopic
        WHERE
            user_profile_id = %s
    """
    )
    cursor.execute(query, [user_profile.id])
    rows = cursor.fetchall()

    tups = {(recipient_id, topic_name.lower()) for (recipient_id, topic_name) in rows}

    def is_muted(recipient_id: int, topic: str) -> bool:
        return (recipient_id, topic.lower()) in tups

    return is_muted
示例#2
0
    def _quote_values(self, connection: Connection, cursor: Cursor,
                      values: list[Any]) -> list[str]:
        """ Quotes a list of values for inclusion in a different sql value """

        # make a statement SELECT quote(%s), quote(%s), ... for each of the values
        sql = "SELECT {}".format(", ".join(["quote(%s)"] * len(values)))
        cursor.execute(sql, values)
        return cursor.fetchone()
示例#3
0
 def migrate_data(cursor: CursorWrapper):
     cursor.execute("""
         insert into video_progress registration_id, category_id
         select r.id, c.id
         from registrations as r
         inner join course co on co.id = r.course_id
         inner join categories c on c.id=co.category_id;
         """)
示例#4
0
def update_unread_flags(cursor: CursorWrapper,
                        user_message_ids: List[int]) -> None:
    query = SQL("""
        UPDATE zerver_usermessage
        SET flags = flags | 1
        WHERE id IN %(user_message_ids)s
    """)

    cursor.execute(query, {"user_message_ids": tuple(user_message_ids)})
示例#5
0
 def remove(self, using: CursorWrapper):
     chunk_size = self._find_ideal_chunk_size()
     using.execute(
         '''
         DELETE FROM `performance_datum`
         WHERE push_timestamp <= %s
         LIMIT %s
     ''',
         [self._max_timestamp, chunk_size],
     )
示例#6
0
def copy_from_temp_table(cursor: CursorWrapper):
    cursor.execute('''
        INSERT INTO backend_datapoint(plant_id, datetime_generated, energy_expected,
        energy_observed, irradiation_expected, irradiation_observed)
        SELECT td.plant_id, td.datetime_generated, td.energy_expected, td.energy_observed,
        td.irradiation_expected, td.irradiation_observed
        FROM temp_datapoint td
        ON CONFLICT(plant_id, datetime_generated) DO UPDATE SET
            energy_expected = EXCLUDED.energy_expected, energy_observed = EXCLUDED.energy_observed,
            irradiation_expected = EXCLUDED.irradiation_observed
        ''')
示例#7
0
def create_destroy_datapoint_table(cursor: CursorWrapper):
    """Context manager for creating and dropping temp tables"""
    cursor.execute('''
        DROP TABLE IF EXISTS temp_datapoint;

        CREATE TEMPORARY TABLE temp_datapoint AS
        SELECT * FROM backend_datapoint LIMIT 0;
        ''')
    try:
        yield
    finally:
        cursor.execute('''
            DROP TABLE IF EXISTS temp_datapoint;
            ''')
示例#8
0
def populate_temp_table(cursor: CursorWrapper,
                        datapoints: List[DatapointModel]):
    def generate_rows_from_datapoints():
        for datapoint in datapoints:
            yield (str(datapoint.plant_id), datapoint.datetime_generated,
                   datapoint.energy_expected, datapoint.energy_observed,
                   datapoint.irradiation_expected,
                   datapoint.irradiation_observed)

    tsv_file = create_tsv_file(generate_rows_from_datapoints())
    cursor.copy_from(tsv_file,
                     'temp_datapoint',
                     columns=('plant_id', 'datetime_generated',
                              'energy_expected', 'energy_observed',
                              'irradiation_expected', 'irradiation_observed'))
示例#9
0
    def remove(self, using: CursorWrapper):
        chunk_size = self._find_ideal_chunk_size()

        using.execute(
            '''
                DELETE FROM `performance_datum`
                WHERE (repository_id NOT IN %s) AND push_timestamp <= %s
                LIMIT %s
            ''',
            [
                tuple(self.relevant_repositories),
                self._max_timestamp,
                chunk_size,
            ],
        )
示例#10
0
    def remove(self, using: CursorWrapper):
        chunk_size = self._find_ideal_chunk_size()

        # Django's queryset API doesn't support MySQL's
        # DELETE statements with LIMIT constructs,
        # even though this database is capable of doing that.
        #
        # If ever this support is added in Django, replace
        # raw SQL bellow with equivalent queryset commands.
        using.execute(
            '''
            DELETE FROM `performance_datum`
            WHERE push_timestamp <= %s
            LIMIT %s
        ''',
            [self._max_timestamp, chunk_size],
        )
示例#11
0
    def exists(self, connection: Connection, cursor: Cursor) -> bool:
        """ Checks if this view exists """

        exists_sql, exists_params = self._exists_sql(connection)
        the_view = self._execute(connection, cursor, exists_sql, exists_params)

        # check if the value returned is not None
        return cursor.fetchone() is not None
示例#12
0
 def __attempt_remove(self, using: CursorWrapper):
     # Django's queryset API doesn't support MySQL's
     # DELETE statements with LIMIT constructs,
     # even though this database is capable of doing that.
     #
     # If ever this support is added in Django, replace
     # raw SQL bellow with equivalent queryset commands.
     using.execute(
         '''
             DELETE FROM `performance_datum`
             WHERE repository_id = %s AND signature_id = %s AND push_timestamp <= %s
             LIMIT %s
         ''',
         [
             self.target_signature.repository_id,
             self.target_signature.id,
             self._max_timestamp,
             self._chunk_size,
         ],
     )
示例#13
0
        def ensure_connection(self):
            if self.connection is not None:
                try:
                    with CursorWrapper(self.create_cursor(), self) as cursor:
                        cursor.execute('SELECT 1;')
                    return
                except Exception:
                    pass

            with self.wrap_database_errors:
                self.connect()
示例#14
0
def prepare_env():
    # Update the default settings environment variable based on current mode.
    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
    # Hide DeprecationWarnings when running in production.  Need to first load
    # settings to apply our filter after Django's own warnings filter.
    from django.conf import settings

    if not settings.DEBUG:  # pragma: no cover
        warnings.simplefilter('ignore', DeprecationWarning)
    # Monkeypatch Django find_commands to also work with .pyc files.
    import django.core.management

    django.core.management.find_commands = find_commands

    # Monkeypatch Oauth2 toolkit settings class to check for settings
    # in django.conf settings each time, not just once during import
    import oauth2_provider.settings

    oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute

    # Use the AWX_TEST_DATABASE_* environment variables to specify the test
    # database settings to use when management command is run as an external
    # program via unit tests.
    for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST',
                'PORT'):  # pragma: no cover
        if os.environ.get('AWX_TEST_DATABASE_%s' % opt, None):
            settings.DATABASES['default'][opt] = os.environ[
                'AWX_TEST_DATABASE_%s' % opt]
    # Disable capturing all SQL queries in memory when in DEBUG mode.
    if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
        from django.db.backends.base.base import BaseDatabaseWrapper
        from django.db.backends.utils import CursorWrapper

        BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(
            cursor, self)

    # Use the default devserver addr/port defined in settings for runserver.
    default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
    default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
    from django.core.management.commands import runserver as core_runserver

    original_handle = core_runserver.Command.handle

    def handle(self, *args, **options):
        if not options.get('addrport'):
            options['addrport'] = '%s:%d' % (default_addr, int(default_port))
        elif options.get('addrport').isdigit():
            options['addrport'] = '%s:%d' % (default_addr,
                                             int(options['addrport']))
        return original_handle(self, *args, **options)

    core_runserver.Command.handle = handle
示例#15
0
    def _reconfigureLogging(self):
        # Reconfigure the logging based on the debug mode of Django.
        from django.conf import settings
        if settings.DEBUG:
            # In debug mode, force logging to debug mode.
            logger.set_verbosity(3)

            # When not in the developer environment, patch Django to not
            # use the debug cursor. This is needed or Django will store in
            # memory every SQL query made.
            from provisioningserver.config import is_dev_environment
            if not is_dev_environment():
                from django.db.backends.base import base
                from django.db.backends.utils import CursorWrapper
                base.BaseDatabaseWrapper.make_debug_cursor = (
                    lambda self, cursor: CursorWrapper(cursor, self))
示例#16
0
def do_batch_update(
    cursor: CursorWrapper,
    table: str,
    assignments: List[Composable],
    batch_size: int = 10000,
    sleep: float = 0.1,
) -> None:  # nocoverage
    # The string substitution below is complicated by our need to
    # support multiple PostgreSQL versions.
    stmt = SQL("""
        UPDATE {}
        SET {}
        WHERE id >= %s AND id < %s
    """).format(
        Identifier(table),
        SQL(", ").join(assignments),
    )

    cursor.execute(
        SQL("SELECT MIN(id), MAX(id) FROM {}").format(Identifier(table)))
    (min_id, max_id) = cursor.fetchone()
    if min_id is None:
        return

    print(f"\n    Range of rows to update: [{min_id}, {max_id}]")
    while min_id <= max_id:
        lower = min_id
        upper = min_id + batch_size
        print(f"    Updating range [{lower},{upper})")
        cursor.execute(stmt, [lower, upper])

        min_id = upper
        time.sleep(sleep)

        # Once we've finished, check if any new rows were inserted to the table
        if min_id > max_id:
            cursor.execute(
                SQL("SELECT MAX(id) FROM {}").format(Identifier(table)))
            (max_id, ) = cursor.fetchone()

    print("    Finishing...", end="")
示例#17
0
def prepare_env():
    # Hide DeprecationWarnings when running in production.  Need to first load
    # settings to apply our filter after Django's own warnings filter.
    from django.conf import settings
    if not settings.DEBUG:
        warnings.simplefilter('ignore', DeprecationWarning)
    # Monkeypatch Django find_commands to also work with .pyc files.
    import django.core.management
    django.core.management.find_commands = find_commands
    # Fixup sys.modules reference to django.utils.six to allow jsonfield to
    # work when using Django 1.4.
    import django.utils
    try:
        import django.utils.six
    except ImportError:
        import six
        sys.modules['django.utils.six'] = sys.modules['six']
        django.utils.six = sys.modules['django.utils.six']
        from django.utils import six # noqa
    # Disable capturing all SQL queries in memory when in DEBUG mode.
    if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
        from django.db.backends.base.base import BaseDatabaseWrapper as b
        from django.db.backends.utils import CursorWrapper
        b.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)

    # Use the default devserver addr/port defined in settings for runserver.
    default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
    default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
    from django.core.management.commands import runserver as core_runserver
    original_handle = core_runserver.Command.handle

    def handle(self, *args, **options):
        if not options.get('addrport'):
            options['addrport'] = '%s:%d' % (default_addr, int(default_port))
        elif options.get('addrport').isdigit():
            options['addrport'] = '%s:%d' % (
                default_addr,
                int(options['addrport'])
            )
        return original_handle(self, *args, **options)

    core_runserver.Command.handle = handle
示例#18
0
    def _execute(self,
                 connection: Connection,
                 cursor: Cursor,
                 sql: str,
                 args: list[Any],
                 force_manual_escape: bool = False) -> None:
        if force_manual_escape:
            # in a manual escape sitatuon, we use the quote() SQL function to manually escape each parameter
            escape_args = self._quote_values(connection, cursor, args)

            # split the query and add in the escaped values
            split_query = sql.split("%s")
            substiuted_query_parts = [
                query_part + escaped_arg
                for (escaped_arg,
                     query_part) in zip(escape_args, split_query[:-1])
            ]
            sql = "".join(substiuted_query_parts) + split_query[-1]

            # we subtituted in the variables, so we no longer need any argument
            args = []

        # execute using the vanilla cursor
        return cursor.execute(sql, args)
示例#19
0
import re
import pprint
import pytz

# Set up logging
logger = logging.getLogger('loaders')
logger.setLevel(logging.ERROR)

# When settings.DEBUG is True Django will fill up a hash with stats on every insert done to the database.
# "Monkey patch" the CursorWrapper to prevent this.  Otherwise we can't load large amounts of data.
# See http://stackoverflow.com/questions/7768027/turn-off-sql-logging-while-keeping-settings-debug
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.utils import CursorWrapper

if settings.DEBUG:
    BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(
        cursor, self)


class ClosestTimeNotFoundException(Exception):
    pass


class SingleActivityNotFound(Exception):
    pass


def get_closest_instantpoint(aName, tv, dbAlias):
    '''
        Start with a tolerance of 1 second and double it until we get a non-zero count,
        get the values and find the closest one by finding the one with minimum absolute difference.
        '''
示例#20
0
文件: settings.py 项目: huzekang/hue
    def disable_database_logging():
        from django.db.backends.base.base import BaseDatabaseWrapper
        from django.db.backends.utils import CursorWrapper

        BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(
            cursor, self)
示例#21
0
def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]:
    "Returns all rows from a cursor as a dict"
    desc = cursor.description
    return [
        dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()
    ]