コード例 #1
0
ファイル: collectors.py プロジェクト: fsspnso54/databaser
    async def _prepare_content_type_tables(self):
        """
        Подготавливает соответствие content_type_id и наименование таблицы в БД
        """
        logger.info("prepare content type tables")

        content_type_table_list = await self._dst_database.fetch_raw_sql(
            SQLRepository.get_content_type_table_sql()
        )

        content_type_table_dict = {
            (app_label, model): table_name
            for table_name, app_label, model in content_type_table_list
        }

        content_type_list = await self._src_database.fetch_raw_sql(
            SQLRepository.get_content_type_sql()
        )

        content_type_dict = {
            (app_label, model): content_type_id
            for content_type_id, app_label, model in content_type_list
        }

        for key in content_type_table_dict.keys():
            self.content_type_table[content_type_table_dict[key]] = (
                content_type_dict[key]
            )

        del content_type_table_list[:]
        del content_type_table_dict
        del content_type_list[:]
        del content_type_dict
コード例 #2
0
ファイル: db_entities.py プロジェクト: fsspnso54/databaser
    async def set_max_sequence(self, dst_pool: Pool):
        async with dst_pool.acquire() as connection:
            try:
                get_serial_sequence_sql = SQLRepository.get_serial_sequence_sql(
                    table_name=self.name,
                    pk_column_name=self.primary_key.name,
                )
            except AttributeError:
                logger.warning(
                    f'AttributeError --- {self.name} --- set_max_sequence')
                return

            serial_seq_name = await connection.fetchrow(get_serial_sequence_sql
                                                        )

            if serial_seq_name and serial_seq_name[0]:
                serial_seq_name = serial_seq_name[0]

                max_val = self.max_pk + 100000

                set_sequence_val_sql = (
                    SQLRepository.get_set_sequence_value_sql(
                        seq_name=serial_seq_name,
                        seq_val=max_val,
                    ))

                await connection.execute(set_sequence_val_sql)
コード例 #3
0
    async def enable(self):
        create_fdw_extension_sql = SQLRepository.get_create_fdw_extension_sql()

        create_server_sql = SQLRepository.get_create_server_sql(
            src_host=self._src_database.db_connection_parameters.host,
            src_port=self._src_database.db_connection_parameters.port,
            src_dbname=self._src_database.db_connection_parameters.dbname,
        )

        create_user_mapping_sql = SQLRepository.get_create_user_mapping_sql(
            dst_user=self._dst_database.db_connection_parameters.user,
            src_user=self._src_database.db_connection_parameters.user,
            src_password=self._src_database.db_connection_parameters.password,
        )

        create_temp_src_schema_sql = (
            SQLRepository.get_create_temp_src_schema_sql(
                dst_user=self._dst_database.db_connection_parameters.user,
            )
        )

        import_foreign_schema_sql = SQLRepository.get_import_foreign_schema_sql(
            src_schema=self._src_database.db_connection_parameters.schema,
            tables=self._dst_database.table_names,
        )

        async with self._dst_pool.acquire() as connection:
            await asyncio.wait([connection.execute(create_fdw_extension_sql)])
            await asyncio.wait([connection.execute(create_server_sql)])
            await asyncio.wait([connection.execute(create_user_mapping_sql)])
            await asyncio.wait([connection.execute(create_temp_src_schema_sql)])
            await asyncio.wait([connection.execute(import_foreign_schema_sql)])
コード例 #4
0
    async def disable(self):
        drop_temp_src_schema_sql = SQLRepository.get_drop_temp_src_schema_sql()
        drop_user_mapping_sql = SQLRepository.get_drop_user_mapping_sql(
            dst_user=self._dst_database.db_connection_parameters.user,
        )
        drop_fdw_extension_sql = SQLRepository.get_drop_fdw_extension_sql()

        async with self._dst_pool.acquire() as connection:
            await asyncio.wait([connection.execute(drop_temp_src_schema_sql)])
            await asyncio.wait([connection.execute(drop_user_mapping_sql)])
            await asyncio.wait([connection.execute(drop_fdw_extension_sql)])
コード例 #5
0
ファイル: db_entities.py プロジェクト: fsspnso54/databaser
    async def truncate_tables(self):
        """
        Truncating tables
        """
        if settings.IS_TRUNCATE_TABLES:
            logger.info('start truncating tables..')

            if settings.TABLES_TRUNCATE_INCLUDED:
                table_names = settings.TABLES_TRUNCATE_INCLUDED
            else:
                table_names = tuple(
                    filter(
                        lambda table_name: (table_name not in settings.
                                            TABLES_WITH_GENERIC_FOREIGN_KEY),
                        self.table_names,
                    ))

            if settings.TABLES_TRUNCATE_EXCLUDED:
                table_names = tuple(
                    filter(
                        lambda table_name:
                        (table_name not in settings.TABLES_TRUNCATE_EXCLUDED),
                        table_names,
                    ))

            truncate_table_queries = SQLRepository.get_truncate_table_queries(
                table_names=table_names, )

            for query in truncate_table_queries:
                await self.execute_raw_sql(query)

            logger.info('truncating tables finished.')
コード例 #6
0
ファイル: transporters.py プロジェクト: fsspnso54/databaser
    async def _transfer_chunk_table_data(
        self,
        table: DBTable,
        need_import_ids_chunk: List[Union[int, str]],
    ):
        """
        Порционный перенос данных таблицы в целевую БД
        """
        transfer_sql = SQLRepository.get_transfer_records_sql(
            table=table,
            connection_params_str=self._src_database.connection_str,
            primary_key_ids=need_import_ids_chunk,
        )

        logger.info(f'transfer chunk table data - {table.name}')

        transferred_ids = None
        async with self._dst_database.connection_pool.acquire() as connection:
            try:
                transferred_ids = await connection.fetch(transfer_sql)
            except (
                    UndefinedColumnError,
                    NotNullViolationError,
                    PostgresSyntaxError,
            ) as e:
                raise PostgresError(
                    f'{str(e)}, table - {table.name}, '
                    f'sql - {transfer_sql} --- _transfer_chunk_table_data')

        if transferred_ids:
            transferred_ids = [tr[0] for tr in transferred_ids]
            table.transferred_pks.update(transferred_ids)

        del transfer_sql
コード例 #7
0
ファイル: managers.py プロジェクト: fsspnso54/databaser
    async def _set_table_counters(self, table_name: str):
        """
        Filling table max pk and count of records
        """
        async with self._src_database.connection_pool.acquire() as connection:
            table = self._dst_database.tables[table_name]

            try:
                count_table_records_sql = (
                    SQLRepository.get_count_table_records(
                        primary_key=table.primary_key, ))
            except AttributeError as e:
                logger.warning(f'{str(e)} --- _set_table_counters {"-"*10} - '
                               f"{table.name}")
                raise AttributeError
            except UndefinedFunctionError:
                raise UndefinedFunctionError

            res = await connection.fetchrow(count_table_records_sql)

            if res and res[0] and res[1]:
                logger.debug(f"table {table_name} with full count {res[0]}, "
                             f"max pk - {res[1]}")

                table.full_count = int(res[0])

                table.max_pk = (int(res[1]) if isinstance(res[1], int) else
                                table.full_count + 100000)

            del count_table_records_sql
コード例 #8
0
ファイル: db_entities.py プロジェクト: fsspnso54/databaser
    async def enable_triggers(self):
        """
        Enable database triggers
        """
        enable_triggers_sql = SQLRepository.get_enable_triggers_sql()

        await self.execute_raw_sql(enable_triggers_sql)

        logger.info('triggers enabled.')
コード例 #9
0
ファイル: db_entities.py プロジェクト: fsspnso54/databaser
    async def disable_triggers(self):
        """
        Disable database triggers
        """
        disable_triggers_sql = SQLRepository.get_disable_triggers_sql()

        await self.execute_raw_sql(disable_triggers_sql)

        logger.info('trigger disabled.')
コード例 #10
0
ファイル: db_entities.py プロジェクト: fsspnso54/databaser
    async def prepare_table_names(self):
        """
        Preparing database table names list
        """
        select_tables_names_list_sql = SQLRepository.get_select_tables_names_list_sql(  # noqa
            excluded_tables=settings.EXCLUDED_TABLES, )

        async with self._connection_pool.acquire() as connection:
            table_names = await connection.fetch(
                query=select_tables_names_list_sql, )

            self.table_names = [
                table_name_rec[0] for table_name_rec in table_names
            ]
コード例 #11
0
ファイル: db_entities.py プロジェクト: fsspnso54/databaser
    async def _prepare_chunk_tables(
        self,
        chunk_table_names: Iterable[str],
    ):
        """
        Preparing tables of chunk table names
        """
        getting_tables_columns_sql = SQLRepository.get_table_columns_sql(
            table_names=make_str_from_iterable(
                iterable=chunk_table_names,
                with_quotes=True,
                quote='\'',
            ), )

        async with self._connection_pool.acquire() as connection:
            records = await connection.fetch(
                query=getting_tables_columns_sql, )

        coroutines = [
            self.tables[table_name].append_column(
                column_name=column_name,
                data_type=data_type,
                ordinal_position=ordinal_position,
                constraint_table=self.tables.get(constraint_table_name),
                constraint_type=constraint_type,
            ) for (
                table_name,
                column_name,
                data_type,
                ordinal_position,
                constraint_table_name,
                constraint_type,
            ) in records
        ]

        if coroutines:
            await asyncio.gather(*coroutines)

        self.clear_cache()