示例#1
0
    def test_import_postgres_db(self):

        # create a user
        db_user.create(1, 'test_user')
        user_count = db_user.get_user_count()
        self.assertEqual(user_count, 1)

        # do a db dump and reset the db
        private_dump, public_dump = db_dump.dump_postgres_db(self.tempdir)
        self.reset_db()
        user_count = db_user.get_user_count()
        self.assertEqual(user_count, 0)

        # import the dump
        db_dump.import_postgres_dump(private_dump, public_dump)
        user_count = db_user.get_user_count()
        self.assertEqual(user_count, 1)

        # reset again, and use more threads to import
        self.reset_db()
        user_count = db_user.get_user_count()
        self.assertEqual(user_count, 0)

        db_dump.import_postgres_dump(private_dump, public_dump, threads=2)
        user_count = db_user.get_user_count()
        self.assertEqual(user_count, 1)
示例#2
0
    def test_dump_recording_feedback(self):

        # create a user
        with self.app.app_context():
            one_id = db_user.create(1, 'test_user')
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 1)

            # insert a feedback record
            feedback = Feedback(
                user_id=one_id,
                recording_msid="d23f4719-9212-49f0-ad08-ddbfbfc50d6f",
                score=1)
            db_feedback.insert(feedback)

            # do a db dump and reset the db
            private_dump, private_ts_dump, public_dump, public_ts_dump = db_dump.dump_postgres_db(
                self.tempdir)
            self.reset_db()
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 0)
            self.assertEqual(
                db_feedback.get_feedback_count_for_user(user_id=one_id), 0)

            # import the dump and check the records are inserted
            db_dump.import_postgres_dump(private_dump, None, public_dump, None)
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 1)

            dumped_feedback = db_feedback.get_feedback_for_user(user_id=one_id,
                                                                limit=1,
                                                                offset=0)
            self.assertEqual(len(dumped_feedback), 1)
            self.assertEqual(dumped_feedback[0].user_id, feedback.user_id)
            self.assertEqual(dumped_feedback[0].recording_msid,
                             feedback.recording_msid)
            self.assertEqual(dumped_feedback[0].score, feedback.score)

            # reset again, and use more threads to import
            self.reset_db()
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 0)
            dumped_feedback = []

            db_dump.import_postgres_dump(private_dump,
                                         None,
                                         public_dump,
                                         None,
                                         threads=2)
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 1)

            dumped_feedback = db_feedback.get_feedback_for_user(user_id=one_id,
                                                                limit=1,
                                                                offset=0)
            self.assertEqual(len(dumped_feedback), 1)
            self.assertEqual(dumped_feedback[0].user_id, feedback.user_id)
            self.assertEqual(dumped_feedback[0].recording_msid,
                             feedback.recording_msid)
            self.assertEqual(dumped_feedback[0].score, feedback.score)
示例#3
0
    def test_import_postgres_db(self):

        # create a user
        with self.app.app_context():
            one_id = db_user.create(1, 'test_user')
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 1)

            # do a db dump and reset the db
            private_dump, private_ts_dump, public_dump, public_ts_dump = db_dump.dump_postgres_db(
                self.tempdir)
            self.reset_db()
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 0)

            # import the dump
            db_dump.import_postgres_dump(private_dump, None, public_dump, None)
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 1)

            # reset again, and use more threads to import
            self.reset_db()
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 0)

            db_dump.import_postgres_dump(private_dump,
                                         None,
                                         public_dump,
                                         None,
                                         threads=2)
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 1)
            two_id = db_user.create(2, 'vnskprk')
            self.assertGreater(two_id, one_id)
    def test_import_postgres_db(self):

        # create a user
        db_user.create('test_user')
        user_count = db_user.get_user_count()
        self.assertEqual(user_count, 1)

        # do a db dump and reset the db
        location = db_dump.dump_postgres_db(self.tempdir)
        self.reset_db()
        user_count = db_user.get_user_count()
        self.assertEqual(user_count, 0)

        # import the dump
        db_dump.import_postgres_dump(location)
        user_count = db_user.get_user_count()
        self.assertEqual(user_count, 1)

        # reset again, and use more threads to import
        self.reset_db()
        user_count = db_user.get_user_count()
        self.assertEqual(user_count, 0)

        db_dump.import_postgres_dump(location, threads=2)
        user_count = db_user.get_user_count()
        self.assertEqual(user_count, 1)
    def test_import_postgres_db(self):

        # create a user
        with self.app.app_context():
            one_id = db_user.create(1, 'test_user')
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 1)

            # do a db dump and reset the db
            private_dump, public_dump = db_dump.dump_postgres_db(self.tempdir)
            self.reset_db()
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 0)

            # import the dump
            db_dump.import_postgres_dump(private_dump, public_dump)
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 1)

            # reset again, and use more threads to import
            self.reset_db()
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 0)

            db_dump.import_postgres_dump(private_dump, public_dump, threads=2)
            user_count = db_user.get_user_count()
            self.assertEqual(user_count, 1)
            two_id = db_user.create(2, 'vnskprk')
            self.assertGreater(two_id, one_id)
示例#6
0
def import_dump(private_archive, public_archive, listen_archive, threads):
    """ Import a ListenBrainz dump into the database.

        Note: This method tries to import the private db dump first, followed by the public db
            dump. However, in absence of a private dump, it imports sanitized versions of the
            user table in the public dump in order to satisfy foreign key constraints.

        Then it imports the listen dump.

        Args:
            private_archive (str): the path to the ListenBrainz private dump to be imported
            public_archive (str): the path to the ListenBrainz public dump to be imported
            listen_archive (str): the path to the ListenBrainz listen dump archive to be imported
            threads (int): the number of threads to use during decompression, defaults to 1
    """
    if not private_archive and not public_archive and not listen_archive:
        print('You need to enter a path to the archive(s) to import!')
        sys.exit(1)

    app = create_app()
    with app.app_context():
        db_dump.import_postgres_dump(private_archive, public_archive, threads)

        from listenbrainz.webserver.timescale_connection import _ts as ls
        try:
            ls.import_listens_dump(listen_archive, threads)
        except psycopg2.OperationalError as e:
            current_app.logger.critical('OperationalError while trying to import data: %s', str(e), exc_info=True)
            raise
        except IOError as e:
            current_app.logger.critical('IOError while trying to import data: %s', str(e), exc_info=True)
            raise
        except Exception as e:
            current_app.logger.critical('Unexpected error while importing data: %s', str(e), exc_info=True)
            raise
示例#7
0
def import_dump(private_archive, public_archive, listen_archive, threads):
    """ Import a ListenBrainz dump into the database.

        Note: This method tries to import the private db dump first, followed by the public db
            dump. However, in absence of a private dump, it imports sanitized versions of the
            user table in the public dump in order to satisfy foreign key constraints.

        Then it imports the listen dump.

        Args:
            private_archive (str): the path to the ListenBrainz private dump to be imported
            public_archive (str): the path to the ListenBrainz public dump to be imported
            listen_archive (str): the path to the ListenBrainz listen dump archive to be imported
            threads (int): the number of threads to use during decompression, defaults to 1
    """
    if not private_archive and not public_archive and not listen_archive:
        print('You need to enter a path to the archive(s) to import!')
        sys.exit(1)

    app = create_app()
    with app.app_context():
        db_dump.import_postgres_dump(private_archive, public_archive, threads)

        ls = init_influx_connection(
            current_app.logger, {
                'REDIS_HOST': current_app.config['REDIS_HOST'],
                'REDIS_PORT': current_app.config['REDIS_PORT'],
                'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
                'INFLUX_HOST': current_app.config['INFLUX_HOST'],
                'INFLUX_PORT': current_app.config['INFLUX_PORT'],
                'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
            })

        try:
            ls.import_listens_dump(listen_archive, threads)
        except IOError as e:
            current_app.logger.critical(
                'IOError while trying to import data into Influx: %s',
                str(e),
                exc_info=True)
            raise
        except InfluxDBClientError as e:
            current_app.logger.critical(
                'Error while sending data to Influx: %s',
                str(e),
                exc_info=True)
            raise
        except InfluxDBServerError as e:
            current_app.logger.critical(
                'InfluxDB Server Error while importing data: %s',
                str(e),
                exc_info=True)
            raise
        except Exception as e:
            current_app.logger.critical(
                'Unexpected error while importing data: %s',
                str(e),
                exc_info=True)
            raise
def import_db(location, threads=None):
    """ Import a ListenBrainz PostgreSQL dump into the PostgreSQL database.

        Note: This method tries to import the private dump first, followed by the statistics
            dump. However, in absence of a private dump, it imports sanitized versions of the
            user table in the statistics dump in order to satisfy foreign key constraints.

        Args:
            location (str): path to the directory which contains the private and the stats dump
            threads (int): the number of threads to use during decompression, defaults to 1
    """
    db.init_db_connection(config.SQLALCHEMY_DATABASE_URI)
    db_dump.import_postgres_dump(location, threads)
def import_dump(private_archive, public_archive, listen_archive, threads):
    """ Import a ListenBrainz dump into the database.

        Note: This method tries to import the private db dump first, followed by the public db
            dump. However, in absence of a private dump, it imports sanitized versions of the
            user table in the public dump in order to satisfy foreign key constraints.

        Then it imports the listen dump.

        Args:
            private_archive (str): the path to the ListenBrainz private dump to be imported
            public_archive (str): the path to the ListenBrainz public dump to be imported
            listen_archive (str): the path to the ListenBrainz listen dump archive to be imported
            threads (int): the number of threads to use during decompression, defaults to 1
    """
    if not private_archive and not public_archive and not listen_archive:
        print('You need to enter a path to the archive(s) to import!')
        sys.exit(1)

    app = create_app()
    with app.app_context():
        db_dump.import_postgres_dump(private_archive, public_archive, threads)

        ls = init_influx_connection(current_app.logger,  {
            'REDIS_HOST': current_app.config['REDIS_HOST'],
            'REDIS_PORT': current_app.config['REDIS_PORT'],
            'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
            'INFLUX_HOST': current_app.config['INFLUX_HOST'],
            'INFLUX_PORT': current_app.config['INFLUX_PORT'],
            'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
        })

        try:
            ls.import_listens_dump(listen_archive, threads)
        except IOError as e:
            current_app.logger.critical('IOError while trying to import data into Influx: %s', str(e), exc_info=True)
            raise
        except InfluxDBClientError as e:
            current_app.logger.critical('Error while sending data to Influx: %s', str(e), exc_info=True)
            raise
        except InfluxDBServerError as e:
            current_app.logger.critical('InfluxDB Server Error while importing data: %s', str(e), exc_info=True)
            raise
        except Exception as e:
            current_app.logger.critical('Unexpected error while importing data: %s', str(e), exc_info=True)
            raise