コード例 #1
0
    def setUp(self):
        super(TestAPICompatUserClass, self).setUp()
        self.log = logging.getLogger(__name__)
        self.logstore = init_influx_connection(
            self.log, {
                'INFLUX_HOST': config.INFLUX_HOST,
                'INFLUX_PORT': config.INFLUX_PORT,
                'INFLUX_DB_NAME': config.INFLUX_DB_NAME,
                'REDIS_HOST': config.REDIS_HOST,
                'REDIS_PORT': config.REDIS_PORT,
            })

        # Create a user
        uid = db_user.create("test_api_compat_user")
        self.assertIsNotNone(db_user.get(uid))
        with db.engine.connect() as connection:
            result = connection.execute(
                text("""
                SELECT *
                  FROM "user"
                 WHERE id = :id
            """), {
                    "id": uid,
                })
            row = result.fetchone()
            self.user = User(row['id'], row['created'], row['musicbrainz_id'],
                             row['auth_token'])

        # Insert some listens
        date = datetime(2015, 9, 3, 0, 0, 0)
        self.log.info("Inserting test data...")
        test_data = generate_data(date, 100, self.user.name)
        self.logstore.insert(test_data)
        self.log.info("Test data inserted")
コード例 #2
0
def create(location, threads):
    """ Create a ListenBrainz data dump which includes a private dump, a statistics dump
        and a dump of the actual listens from InfluxDB

        Args:
            location (str): path to the directory where the dump should be made
            threads (int): the number of threads to be used while compression
    """
    app = create_app()
    with app.app_context():
        ls = init_influx_connection(current_app.logger,  {
            'REDIS_HOST': current_app.config['REDIS_HOST'],
            'REDIS_PORT': current_app.config['REDIS_PORT'],
            'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
            'INFLUX_HOST': current_app.config['INFLUX_HOST'],
            'INFLUX_PORT': current_app.config['INFLUX_PORT'],
            'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
        })
        time_now = datetime.today()
        dump_path = os.path.join(location, 'listenbrainz-dump-{time}'.format(time=time_now.strftime('%Y%m%d-%H%M%S')))
        create_path(dump_path)
        db_dump.dump_postgres_db(dump_path, time_now, threads)
        ls.dump_listens(dump_path, time_now, threads)
        try:
            write_hashes(dump_path)
        except IOError as e:
            current_app.logger.error('Unable to create hash files! Error: %s', str(e), exc_info=True)
            return
        current_app.logger.info('Dumps created and hashes written at %s' % dump_path)
コード例 #3
0
    def setUp(self):
        ServerTestCase.setUp(self)
        DatabaseTestCase.setUp(self)

        self.log = logging.getLogger(__name__)
        self.influx = InfluxDBClient(
            host=current_app.config['INFLUX_HOST'],
            port=current_app.config['INFLUX_PORT'],
            database=current_app.config['INFLUX_DB_NAME'],
        )

        self.influx.query('''create database %s''' % current_app.config['INFLUX_DB_NAME'])

        self.logstore = init_influx_connection(self.log, {
            'REDIS_HOST': current_app.config['REDIS_HOST'],
            'REDIS_PORT': current_app.config['REDIS_PORT'],
            'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
            'INFLUX_HOST': current_app.config['INFLUX_HOST'],
            'INFLUX_PORT': current_app.config['INFLUX_PORT'],
            'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
        })

        user = db_user.get_or_create(1, 'iliekcomputers')
        self.user = User.from_dbrow(user)

        weirduser = db_user.get_or_create(2, 'weird\\user name')
        self.weirduser = User.from_dbrow(weirduser)
コード例 #4
0
def create(location, threads):
    """ Create a ListenBrainz data dump which includes a private dump, a statistics dump
        and a dump of the actual listens from InfluxDB

        Args:
            location (str): path to the directory where the dump should be made
            threads (int): the number of threads to be used while compression
    """
    db.init_db_connection(config.SQLALCHEMY_DATABASE_URI)
    ls = init_influx_connection(log,  {
        'REDIS_HOST': config.REDIS_HOST,
        'REDIS_PORT': config.REDIS_PORT,
        'REDIS_NAMESPACE': config.REDIS_NAMESPACE,
        'INFLUX_HOST': config.INFLUX_HOST,
        'INFLUX_PORT': config.INFLUX_PORT,
        'INFLUX_DB_NAME': config.INFLUX_DB_NAME,
    })
    time_now = datetime.today()
    dump_path = os.path.join(location, 'listenbrainz-dump-{time}'.format(time=time_now.strftime('%Y%m%d-%H%M%S')))
    create_path(dump_path)
    db_dump.dump_postgres_db(dump_path, time_now, threads)
    ls.dump_listens(dump_path, time_now, threads)
    try:
        write_hashes(dump_path)
    except IOError as e:
        log.error('Unable to create hash files! Error: %s', str(e))
        return
    log.info('Dumps created and hashes written at %s' % dump_path)
コード例 #5
0
    def setUp(self):
        ServerTestCase.setUp(self)
        DatabaseTestCase.setUp(self)

        self.log = logging.getLogger(__name__)
        self.influx = InfluxDBClient(
            host=current_app.config['INFLUX_HOST'],
            port=current_app.config['INFLUX_PORT'],
            database=current_app.config['INFLUX_DB_NAME'],
        )

        self.influx.query('''create database %s''' %
                          current_app.config['INFLUX_DB_NAME'])

        self.logstore = init_influx_connection(
            self.log, {
                'REDIS_HOST': current_app.config['REDIS_HOST'],
                'REDIS_PORT': current_app.config['REDIS_PORT'],
                'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
                'INFLUX_HOST': current_app.config['INFLUX_HOST'],
                'INFLUX_PORT': current_app.config['INFLUX_PORT'],
                'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
            })

        user = db_user.get_or_create(1, 'iliekcomputers')
        db_user.agree_to_gdpr(user['musicbrainz_id'])
        self.user = User.from_dbrow(user)

        weirduser = db_user.get_or_create(2, 'weird\\user name')
        self.weirduser = User.from_dbrow(weirduser)
コード例 #6
0
def create_spark_dump(location, threads):
    with create_app().app_context():
        ls = init_influx_connection(
            current_app.logger, {
                'REDIS_HOST': current_app.config['REDIS_HOST'],
                'REDIS_PORT': current_app.config['REDIS_PORT'],
                'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
                'INFLUX_HOST': current_app.config['INFLUX_HOST'],
                'INFLUX_PORT': current_app.config['INFLUX_PORT'],
                'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
            })
        time_now = datetime.today()
        dump_path = os.path.join(
            location, 'listenbrainz-spark-dump-{time}'.format(
                time=time_now.strftime('%Y%m%d-%H%M%S')))
        create_path(dump_path)
        ls.dump_listens(dump_path, time_now, threads, spark_format=True)
        try:
            write_hashes(dump_path)
        except IOError as e:
            current_app.logger.error('Unable to create hash files! Error: %s',
                                     str(e),
                                     exc_info=True)
            return
        current_app.logger.info('Dump created and hash written at %s',
                                dump_path)
コード例 #7
0
    def setUp(self):
        super(TestAPICompatUserClass, self).setUp()
        self.log = logging.getLogger(__name__)
        self.logstore = init_influx_connection(self.log, {
            'INFLUX_HOST': config.INFLUX_HOST,
            'INFLUX_PORT': config.INFLUX_PORT,
            'INFLUX_DB_NAME': config.INFLUX_DB_NAME,
            'REDIS_HOST': config.REDIS_HOST,
            'REDIS_PORT': config.REDIS_PORT,
            'REDIS_NAMESPACE': config.REDIS_NAMESPACE,
        })

        # Create a user
        uid = db_user.create(1, "test_api_compat_user")
        self.assertIsNotNone(db_user.get(uid))
        with db.engine.connect() as connection:
            result = connection.execute(text("""
                SELECT *
                  FROM "user"
                 WHERE id = :id
            """), {
                "id": uid,
            })
            row = result.fetchone()
            self.user = User(row['id'], row['created'], row['musicbrainz_id'], row['auth_token'])
コード例 #8
0
    def find_users(self):
        with self.app.app_context():
            self.ls = init_influx_connection(
                current_app.logger, {
                    'REDIS_HOST': current_app.config['REDIS_HOST'],
                    'REDIS_PORT': current_app.config['REDIS_PORT'],
                    'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
                    'INFLUX_HOST': current_app.config['INFLUX_HOST'],
                    'INFLUX_PORT': current_app.config['INFLUX_PORT'],
                    'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
                })

            for _ in range(CONNECTION_RETRY_COUNT):
                try:
                    users = db_user.get_all_users()
                    break
                except DatabaseError as e:
                    current_app.logger.error(
                        'Error while getting users list: %s',
                        str(e),
                        exc_info=True)
                    time.sleep(1)
            else:
                current_app.logger.critical(
                    "Cannot connect to PostgreSQL, exiting...")
                raise DatabaseError("Cannot connect to PostgreSQL, exiting")

            return [
                user['musicbrainz_id'] for user in users
                if self.condition(user['musicbrainz_id'])
            ]
コード例 #9
0
def create(location, threads):
    """ Create a ListenBrainz data dump which includes a private dump, a statistics dump
        and a dump of the actual listens from InfluxDB

        Args:
            location (str): path to the directory where the dump should be made
            threads (int): the number of threads to be used while compression
    """
    app = create_app()
    with app.app_context():
        ls = init_influx_connection(
            current_app.logger, {
                'REDIS_HOST': current_app.config['REDIS_HOST'],
                'REDIS_PORT': current_app.config['REDIS_PORT'],
                'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
                'INFLUX_HOST': current_app.config['INFLUX_HOST'],
                'INFLUX_PORT': current_app.config['INFLUX_PORT'],
                'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
            })
        time_now = datetime.today()
        dump_path = os.path.join(
            location, 'listenbrainz-dump-{time}'.format(
                time=time_now.strftime('%Y%m%d-%H%M%S')))
        create_path(dump_path)
        db_dump.dump_postgres_db(dump_path, time_now, threads)
        ls.dump_listens(dump_path, time_now, threads)
        try:
            write_hashes(dump_path)
        except IOError as e:
            current_app.logger.error('Unable to create hash files! Error: %s',
                                     str(e),
                                     exc_info=True)
            return
        current_app.logger.info('Dumps created and hashes written at %s' %
                                dump_path)
コード例 #10
0
    def setUp(self):
        super(TestAPICompatUserClass, self).setUp()
        self.log = logging.getLogger(__name__)
        self.logstore = init_influx_connection(
            self.log, {
                'INFLUX_HOST': config.INFLUX_HOST,
                'INFLUX_PORT': config.INFLUX_PORT,
                'INFLUX_DB_NAME': config.INFLUX_DB_NAME,
                'REDIS_HOST': config.REDIS_HOST,
                'REDIS_PORT': config.REDIS_PORT,
                'REDIS_NAMESPACE': config.REDIS_NAMESPACE,
            })

        # Create a user
        uid = db_user.create("test_api_compat_user")
        self.assertIsNotNone(db_user.get(uid))
        with db.engine.connect() as connection:
            result = connection.execute(
                text("""
                SELECT *
                  FROM "user"
                 WHERE id = :id
            """), {
                    "id": uid,
                })
            row = result.fetchone()
            self.user = User(row['id'], row['created'], row['musicbrainz_id'],
                             row['auth_token'])
コード例 #11
0
def import_dump(private_archive, public_archive, listen_archive, threads):
    """ Import a ListenBrainz dump into the database.

        Note: This method tries to import the private db dump first, followed by the public db
            dump. However, in absence of a private dump, it imports sanitized versions of the
            user table in the public dump in order to satisfy foreign key constraints.

        Then it imports the listen dump.

        Args:
            private_archive (str): the path to the ListenBrainz private dump to be imported
            public_archive (str): the path to the ListenBrainz public dump to be imported
            listen_archive (str): the path to the ListenBrainz listen dump archive to be imported
            threads (int): the number of threads to use during decompression, defaults to 1
    """
    if not private_archive and not public_archive and not listen_archive:
        print('You need to enter a path to the archive(s) to import!')
        sys.exit(1)

    app = create_app()
    with app.app_context():
        db_dump.import_postgres_dump(private_archive, public_archive, threads)

        ls = init_influx_connection(
            current_app.logger, {
                'REDIS_HOST': current_app.config['REDIS_HOST'],
                'REDIS_PORT': current_app.config['REDIS_PORT'],
                'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
                'INFLUX_HOST': current_app.config['INFLUX_HOST'],
                'INFLUX_PORT': current_app.config['INFLUX_PORT'],
                'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
            })

        try:
            ls.import_listens_dump(listen_archive, threads)
        except IOError as e:
            current_app.logger.critical(
                'IOError while trying to import data into Influx: %s',
                str(e),
                exc_info=True)
            raise
        except InfluxDBClientError as e:
            current_app.logger.critical(
                'Error while sending data to Influx: %s',
                str(e),
                exc_info=True)
            raise
        except InfluxDBServerError as e:
            current_app.logger.critical(
                'InfluxDB Server Error while importing data: %s',
                str(e),
                exc_info=True)
            raise
        except Exception as e:
            current_app.logger.critical(
                'Unexpected error while importing data: %s',
                str(e),
                exc_info=True)
            raise
コード例 #12
0
def create_incremental(location, threads, dump_id):
    app = create_app()
    with app.app_context():
        ls = init_influx_connection(
            current_app.logger, {
                'REDIS_HOST': current_app.config['REDIS_HOST'],
                'REDIS_PORT': current_app.config['REDIS_PORT'],
                'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
                'INFLUX_HOST': current_app.config['INFLUX_HOST'],
                'INFLUX_PORT': current_app.config['INFLUX_PORT'],
                'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
            })

        if dump_id is None:
            end_time = datetime.now()
            dump_id = db_dump.add_dump_entry(int(end_time.strftime('%s')))
        else:
            dump_entry = db_dump.get_dump_entry(dump_id)
            if dump_entry is None:
                current_app.logger.error("No dump with ID %d found, exiting!",
                                         dump_id)
                sys.exit(-1)
            end_time = dump_entry['created']

        prev_dump_entry = db_dump.get_dump_entry(dump_id - 1)
        if prev_dump_entry is None:  # incremental dumps must have a previous dump in the series
            current_app.logger.error(
                "Invalid dump ID %d, could not find previous dump", dump_id)
            sys.exit(-1)
        start_time = prev_dump_entry['created']
        current_app.logger.info("Dumping data from %s to %s", start_time,
                                end_time)
        dump_path = os.path.join(
            location, 'listenbrainz-dump-{dump_id}-{time}-incremental'.format(
                dump_id=dump_id, time=end_time.strftime('%Y%m%d-%H%M%S')))
        create_path(dump_path)
        ls.dump_listens(dump_path,
                        dump_id=dump_id,
                        start_time=start_time,
                        end_time=end_time,
                        threads=threads,
                        spark_format=False)
        ls.dump_listens(dump_path,
                        dump_id=dump_id,
                        start_time=start_time,
                        end_time=end_time,
                        threads=threads,
                        spark_format=True)
        try:
            write_hashes(dump_path)
        except IOError as e:
            current_app.logger.error('Unable to create hash files! Error: %s',
                                     str(e),
                                     exc_info=True)
            return
        current_app.logger.info('Dumps created and hashes written at %s' %
                                dump_path)
コード例 #13
0
def create_influx(app):
    from listenbrainz.webserver.influx_connection import init_influx_connection
    return init_influx_connection(app.logger, {
        'INFLUX_HOST': app.config['INFLUX_HOST'],
        'INFLUX_PORT': app.config['INFLUX_PORT'],
        'INFLUX_DB_NAME': app.config['INFLUX_DB_NAME'],
        'REDIS_HOST': app.config['REDIS_HOST'],
        'REDIS_PORT': app.config['REDIS_PORT'],
    })
コード例 #14
0
 def setUp(self):
     super(TestInfluxListenStore, self).setUp()
     self.log = logging.getLogger(__name__)
     self.logstore = init_influx_connection(
         self.log, {
             'REDIS_HOST': config.REDIS_HOST,
             'REDIS_PORT': config.REDIS_PORT,
             'INFLUX_HOST': config.INFLUX_HOST,
             'INFLUX_PORT': config.INFLUX_PORT,
             'INFLUX_DB_NAME': config.INFLUX_DB_NAME,
         })
     self.testuser_id = db_user.create("test")
     user = db_user.get(self.testuser_id)
     print(user)
     self.testuser_name = db_user.get(self.testuser_id)['musicbrainz_id']
コード例 #15
0
    def setUp(self):
        super(TestInfluxListenStore, self).setUp()
        self.log = logging.getLogger(__name__)

        # In order to do counting correctly, we need a clean DB to start with
        self.reset_influx_db()

        self.logstore = init_influx_connection(self.log, {
            'REDIS_HOST': config.REDIS_HOST,
            'REDIS_PORT': config.REDIS_PORT,
            'INFLUX_HOST': config.INFLUX_HOST,
            'INFLUX_PORT': config.INFLUX_PORT,
            'INFLUX_DB_NAME': config.INFLUX_DB_NAME,
        })
        self.testuser_id = db_user.create("test")
        self.testuser_name = db_user.get(self.testuser_id)['musicbrainz_id']
コード例 #16
0
 def setUp(self):
     super().setUp()
     app = create_app()
     self.tempdir = tempfile.mkdtemp()
     self.runner = CliRunner()
     self.listenstore = init_influx_connection(
         logging.getLogger(__name__), {
             'REDIS_HOST': app.config['REDIS_HOST'],
             'REDIS_PORT': app.config['REDIS_PORT'],
             'REDIS_NAMESPACE': app.config['REDIS_NAMESPACE'],
             'INFLUX_HOST': app.config['INFLUX_HOST'],
             'INFLUX_PORT': app.config['INFLUX_PORT'],
             'INFLUX_DB_NAME': app.config['INFLUX_DB_NAME'],
         })
     self.user_id = db_user.create(1, 'iliekcomputers')
     self.user_name = db_user.get(self.user_id)['musicbrainz_id']
コード例 #17
0
def import_dump(private_archive, public_archive, listen_archive, threads):
    """ Import a ListenBrainz dump into the database.

        Note: This method tries to import the private db dump first, followed by the public db
            dump. However, in absence of a private dump, it imports sanitized versions of the
            user table in the public dump in order to satisfy foreign key constraints.

        Then it imports the listen dump.

        Args:
            private_archive (str): the path to the ListenBrainz private dump to be imported
            public_archive (str): the path to the ListenBrainz public dump to be imported
            listen_archive (str): the path to the ListenBrainz listen dump archive to be imported
            threads (int): the number of threads to use during decompression, defaults to 1
    """
    if not private_archive and not public_archive and not listen_archive:
        print('You need to enter a path to the archive(s) to import!')
        sys.exit(1)

    app = create_app()
    with app.app_context():
        db_dump.import_postgres_dump(private_archive, public_archive, threads)

        ls = init_influx_connection(current_app.logger,  {
            'REDIS_HOST': current_app.config['REDIS_HOST'],
            'REDIS_PORT': current_app.config['REDIS_PORT'],
            'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
            'INFLUX_HOST': current_app.config['INFLUX_HOST'],
            'INFLUX_PORT': current_app.config['INFLUX_PORT'],
            'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
        })

        try:
            ls.import_listens_dump(listen_archive, threads)
        except IOError as e:
            current_app.logger.critical('IOError while trying to import data into Influx: %s', str(e), exc_info=True)
            raise
        except InfluxDBClientError as e:
            current_app.logger.critical('Error while sending data to Influx: %s', str(e), exc_info=True)
            raise
        except InfluxDBServerError as e:
            current_app.logger.critical('InfluxDB Server Error while importing data: %s', str(e), exc_info=True)
            raise
        except Exception as e:
            current_app.logger.critical('Unexpected error while importing data: %s', str(e), exc_info=True)
            raise
コード例 #18
0
    def setUp(self):
        super(TestInfluxListenStore, self).setUp()
        self.log = logging.getLogger(__name__)

        # In order to do counting correctly, we need a clean DB to start with
        self.reset_influx_db()

        self.logstore = init_influx_connection(self.log, {
            'REDIS_HOST': config.REDIS_HOST,
            'REDIS_PORT': config.REDIS_PORT,
            'REDIS_NAMESPACE': config.REDIS_NAMESPACE,
            'INFLUX_HOST': config.INFLUX_HOST,
            'INFLUX_PORT': config.INFLUX_PORT,
            'INFLUX_DB_NAME': config.INFLUX_DB_NAME,
        })
        self.testuser_id = db_user.create(1, "test")
        self.testuser_name = db_user.get(self.testuser_id)['musicbrainz_id']
コード例 #19
0
    def start(self):
        with self.app.app_context():
            current_app.logger.info("Connecting to Influx...")
            self.ls = init_influx_connection(
                current_app.logger, {
                    'REDIS_HOST': current_app.config['REDIS_HOST'],
                    'REDIS_PORT': current_app.config['REDIS_PORT'],
                    'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
                    'INFLUX_HOST': current_app.config['INFLUX_HOST'],
                    'INFLUX_PORT': current_app.config['INFLUX_PORT'],
                    'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
                })
            current_app.logger.info("Done!")

            new_measurement_name = str(uuid.uuid4())
            current_app.logger.info("Temporary destination measurement: %s",
                                    new_measurement_name)

            current_app.logger.info(
                "Copying listens from %s to temporary measurement...",
                self.user_name)
            self.copy_measurement(src=self.user_name,
                                  dest=new_measurement_name,
                                  apply_filter=True)
            current_app.logger.info("Done!")

            current_app.logger.info("Removing user measurement...")
            self.ls.delete(self.user_name)
            current_app.logger.info("Done!")

            current_app.logger.info(
                "Copying listens back from temporary measurement to %s...",
                self.user_name)
            self.copy_measurement(src=new_measurement_name,
                                  dest=self.user_name,
                                  apply_filter=False)
            current_app.logger.info("Done!")

            current_app.logger.info("Removing temporary measurement...")
            self.ls.delete(new_measurement_name)
            current_app.logger.info("Done!")
コード例 #20
0
def import_listens(location=None, threads=None):
    """ Import a ListenBrainz listen dump into the Influx database.

        Args:
            location (str): path to the listenbrainz listen .tar.xz archive
            threads (int): the number of threads to use while decompressing, defaults to 1
    """

    if not location:
        print('No location given!')
        sys.exit(1)

    ls = init_influx_connection(
        log, {
            'REDIS_HOST': config.REDIS_HOST,
            'REDIS_PORT': config.REDIS_PORT,
            'INFLUX_HOST': config.INFLUX_HOST,
            'INFLUX_PORT': config.INFLUX_PORT,
            'INFLUX_DB_NAME': config.INFLUX_DB_NAME,
        })

    try:
        ls.import_listens_dump(location, threads)
    except IOError as e:
        log.error('IOError while trying to import data into Influx: %s',
                  str(e))
        raise
    except InfluxDBClientError as e:
        log.error('Error while sending data to Influx: %s', str(e))
        raise
    except InfluxDBServerError as e:
        log.error('InfluxDB Server Error while importing data: %s', str(e))
        raise
    except Exception as e:
        log.error('Unexpected error while importing data: %s', str(e))
        raise
コード例 #21
0
    def setUp(self):
        ServerTestCase.setUp(self)
        DatabaseTestCase.setUp(self)
        self.user = db_user.get_or_create('iliekcomputers')
        self.weirduser = db_user.get_or_create('weird\\user name')

        self.log = logging.getLogger(__name__)
        self.influx = InfluxDBClient(
            host=current_app.config['INFLUX_HOST'],
            port=current_app.config['INFLUX_PORT'],
            database=current_app.config['INFLUX_DB_NAME'],
        )

        self.influx.query('''create database %s''' %
                          current_app.config['INFLUX_DB_NAME'])

        self.logstore = init_influx_connection(
            self.log, {
                'REDIS_HOST': current_app.config['REDIS_HOST'],
                'REDIS_PORT': current_app.config['REDIS_PORT'],
                'INFLUX_HOST': current_app.config['INFLUX_HOST'],
                'INFLUX_PORT': current_app.config['INFLUX_PORT'],
                'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
            })
コード例 #22
0
from listenbrainz.webserver import create_app
from listenbrainz.webserver.views.api_tools import publish_data_to_queue
from listenbrainz.listenstore import InfluxListenStore
from listenbrainz.webserver.influx_connection import init_influx_connection
from werkzeug.exceptions import NotFound

import listenbrainz.db.user as db_user
import logging
import sqlalchemy

app = create_app()
influx = init_influx_connection(
    logging, {
        'REDIS_HOST': app.config['REDIS_HOST'],
        'REDIS_PORT': app.config['REDIS_PORT'],
        'REDIS_NAMESPACE': app.config['REDIS_NAMESPACE'],
        'INFLUX_HOST': app.config['INFLUX_HOST'],
        'INFLUX_PORT': app.config['INFLUX_PORT'],
        'INFLUX_DB_NAME': app.config['INFLUX_DB_NAME'],
    })


def update_row_ids_for_exceptions():
    with musicbrainz_db.engine.connect() as mb_connection:
        with db.engine.connect() as connection:
            # 2106 - Fée Deuspi
            result = mb_connection.execute(
                sqlalchemy.text("""
                SELECT id
                  FROM editor
                 WHERE name = 'Fée Deuspi'
コード例 #23
0
def create_full(location, threads, dump_id, last_dump_id):
    """ Create a ListenBrainz data dump which includes a private dump, a statistics dump
        and a dump of the actual listens from InfluxDB

        Args:
            location (str): path to the directory where the dump should be made
            threads (int): the number of threads to be used while compression
            dump_id (int): the ID of the ListenBrainz data dump
            last_dump_id (bool): flag indicating whether to create a full dump from the last entry in the dump table
    """
    app = create_app()
    with app.app_context():
        ls = init_influx_connection(
            current_app.logger, {
                'REDIS_HOST': current_app.config['REDIS_HOST'],
                'REDIS_PORT': current_app.config['REDIS_PORT'],
                'REDIS_NAMESPACE': current_app.config['REDIS_NAMESPACE'],
                'INFLUX_HOST': current_app.config['INFLUX_HOST'],
                'INFLUX_PORT': current_app.config['INFLUX_PORT'],
                'INFLUX_DB_NAME': current_app.config['INFLUX_DB_NAME'],
            })

        if last_dump_id:
            all_dumps = db_dump.get_dump_entries()
            if len(all_dumps) == 0:
                current_app.logger.error(
                    "Cannot create full dump with last dump's ID, no dump exists!"
                )
                sys.exit(-1)
            dump_id = all_dumps[0]['id']

        if dump_id is None:
            end_time = datetime.now()
            dump_id = db_dump.add_dump_entry(int(end_time.strftime('%s')))
        else:
            dump_entry = db_dump.get_dump_entry(dump_id)
            if dump_entry is None:
                current_app.logger.error("No dump with ID %d found", dump_id)
                sys.exit(-1)
            end_time = dump_entry['created']

        dump_path = os.path.join(
            location, 'listenbrainz-dump-{dump_id}-{time}-full'.format(
                dump_id=dump_id, time=end_time.strftime('%Y%m%d-%H%M%S')))
        create_path(dump_path)
        db_dump.dump_postgres_db(dump_path, end_time, threads)
        ls.dump_listens(dump_path,
                        dump_id=dump_id,
                        end_time=end_time,
                        threads=threads,
                        spark_format=False)
        ls.dump_listens(dump_path,
                        dump_id=dump_id,
                        end_time=end_time,
                        threads=threads,
                        spark_format=True)
        try:
            write_hashes(dump_path)
        except IOError as e:
            current_app.logger.error('Unable to create hash files! Error: %s',
                                     str(e),
                                     exc_info=True)
            return
        current_app.logger.info('Dumps created and hashes written at %s' %
                                dump_path)
コード例 #24
0
from brainzutils import musicbrainz_db
from listenbrainz.webserver import create_app
from listenbrainz.webserver.views.api_tools import publish_data_to_queue
from listenbrainz.listenstore import InfluxListenStore
from listenbrainz.webserver.influx_connection import init_influx_connection
from werkzeug.exceptions import NotFound

import listenbrainz.db.user as db_user
import logging
import sqlalchemy

app = create_app()
influx = init_influx_connection(logging, {
            'REDIS_HOST': app.config['REDIS_HOST'],
            'REDIS_PORT': app.config['REDIS_PORT'],
            'REDIS_NAMESPACE': app.config['REDIS_NAMESPACE'],
            'INFLUX_HOST': app.config['INFLUX_HOST'],
            'INFLUX_PORT': app.config['INFLUX_PORT'],
            'INFLUX_DB_NAME': app.config['INFLUX_DB_NAME'],
        })


def update_row_ids_for_exceptions():
    with musicbrainz_db.engine.connect() as mb_connection:
        with db.engine.connect() as connection:
            # 2106 - Fée Deuspi
            result = mb_connection.execute(sqlalchemy.text("""
                SELECT id
                  FROM editor
                 WHERE name = 'Fée Deuspi'
                """))