def __init__(self, log, conf): super(RedisListenStore, self).__init__(log) # Initialize brainzutils cache init_cache(host=conf['REDIS_HOST'], port=conf['REDIS_PORT'], namespace=conf['REDIS_NAMESPACE']) # This is used in tests. Leave for cleanup in LB-879 self.redis = cache._r
def __init__(self, conf, logger): super(TimescaleListenStore, self).__init__(logger) timescale.init_db_connection(conf['SQLALCHEMY_TIMESCALE_URI']) # Initialize brainzutils cache self.ns = conf['REDIS_NAMESPACE'] init_cache(host=conf['REDIS_HOST'], port=conf['REDIS_PORT'], namespace=conf['REDIS_NAMESPACE']) self.dump_temp_dir_root = conf.get('LISTEN_DUMP_TEMP_DIR_ROOT', tempfile.mkdtemp())
def __init__(self, conf, logger): super(InfluxListenStore, self).__init__(logger) self.influx = InfluxDBClient(host=conf['INFLUX_HOST'], port=conf['INFLUX_PORT'], database=conf['INFLUX_DB_NAME']) # Initialize brainzutils cache init_cache(host=conf['REDIS_HOST'], port=conf['REDIS_PORT'], namespace=conf['REDIS_NAMESPACE'])
def setUp(self): super(RedisListenStoreTestCase, self).setUp() # TODO: Ideally this would use a config from a flask app, but this test case doesn't create an app init_cache(config.REDIS_HOST, config.REDIS_PORT, config.REDIS_NAMESPACE) self.log = logging.getLogger() self._redis = init_redis_connection(self.log) self.testuser = db_user.get_or_create(1, "test")
def setUp(self): super().setUp() self.user = db_user.get_or_create(1, 'iliekcomputers') db_user.agree_to_gdpr(self.user['musicbrainz_id']) # Initialize brainzutils cache init_cache(host=current_app.config['REDIS_HOST'], port=current_app.config['REDIS_PORT'], namespace=current_app.config['REDIS_NAMESPACE']) self.redis = cache._r
def __init__(self, conf, logger): super(InfluxListenStore, self).__init__(logger) self.influx = InfluxDBClient(host=conf['INFLUX_HOST'], port=conf['INFLUX_PORT'], database=conf['INFLUX_DB_NAME']) # Initialize brainzutils cache init_cache(host=conf['REDIS_HOST'], port=conf['REDIS_PORT'], namespace=conf['REDIS_NAMESPACE']) self.dump_temp_dir_root = conf.get('LISTEN_DUMP_TEMP_DIR_ROOT', tempfile.mkdtemp())
def __init__(self, app): threading.Thread.__init__(self) self.done = False self.app = app self.queue = PriorityQueue() self.unmatched_listens_complete_time = 0 self.legacy_load_thread = None self.legacy_next_run = 0 self.legacy_listens_index_date = 0 self.num_legacy_listens_loaded = 0 self.last_processed = 0 init_cache(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'], namespace=app.config['REDIS_NAMESPACE']) metrics.init("listenbrainz") self.load_legacy_listens()
def recalculate_all_user_data(): timescale.init_db_connection(config.SQLALCHEMY_TIMESCALE_URI) db.init_db_connection(config.SQLALCHEMY_DATABASE_URI) init_cache(host=config.REDIS_HOST, port=config.REDIS_PORT, namespace=config.REDIS_NAMESPACE) # Find the created timestamp of the last listen query = "SELECT max(created) FROM listen WHERE created > :date" try: with timescale.engine.connect() as connection: result = connection.execute(sqlalchemy.text(query), date=datetime.now() - timedelta(weeks=4)) row = result.fetchone() last_created_ts = row[0] except psycopg2.OperationalError as e: logger.error("Cannot query ts to fetch latest listen." % str(e), exc_info=True) raise logger.info("Last created timestamp: " + str(last_created_ts)) # Select a list of users user_list = [] query = 'SELECT musicbrainz_id FROM "user"' try: with db.engine.connect() as connection: result = connection.execute(sqlalchemy.text(query)) for row in result: user_list.append(row[0]) except psycopg2.OperationalError as e: logger.error("Cannot query db to fetch user list." % str(e), exc_info=True) raise logger.info("Fetched %d users. Setting empty cache entries." % len(user_list)) # Reset the timestamps and listen counts to 0 for all users for user_name in user_list: cache.set(REDIS_USER_LISTEN_COUNT + user_name, 0, expirein=0, encode=False) cache.set(REDIS_USER_LISTEN_COUNT + user_name, 0, expirein=0, encode=False) cache.set(REDIS_USER_TIMESTAMPS + user_name, "0,0", expirein=0) # Tabulate all of the listen counts/timestamps for all users logger.info("Scan the whole listen table...") listen_counts = defaultdict(int) user_timestamps = {} query = "SELECT listened_at, user_name FROM listen where created <= :ts" try: with timescale.engine.connect() as connection: result = connection.execute(sqlalchemy.text(query), ts=last_created_ts) for row in result: ts = row[0] user_name = row[1] if user_name not in user_timestamps: user_timestamps[user_name] = [ts, ts] else: if ts > user_timestamps[user_name][1]: user_timestamps[user_name][1] = ts if ts < user_timestamps[user_name][0]: user_timestamps[user_name][0] = ts listen_counts[user_name] += 1 except psycopg2.OperationalError as e: logger.error("Cannot query db to fetch user list." % str(e), exc_info=True) raise logger.info("Setting updated cache entries.") # Set the timestamps and listen counts for all users for user_name in user_list: try: cache.increment(REDIS_USER_LISTEN_COUNT + user_name, amount=listen_counts[user_name]) except KeyError: pass try: tss = cache.get(REDIS_USER_TIMESTAMPS + user_name) (min_ts, max_ts) = tss.split(",") min_ts = int(min_ts) max_ts = int(max_ts) if min_ts and min_ts < user_timestamps[user_name][0]: user_timestamps[user_name][0] = min_ts if max_ts and max_ts > user_timestamps[user_name][1]: user_timestamps[user_name][1] = max_ts cache.set( REDIS_USER_TIMESTAMPS + user_name, "%d,%d" % (user_timestamps[user_name][0], user_timestamps[user_name][1]), expirein=0) except KeyError: pass