def test_incremental_dump(self): base = 1500000000 listens = generate_data(1, self.testuser_name, base - 4, 5, base + 1) # generate 5 listens with inserted_ts 1-5 self._insert_with_created(listens) listens = generate_data(1, self.testuser_name, base + 1, 5, base + 6) # generate 5 listens with inserted_ts 6-10 self._insert_with_created(listens) temp_dir = tempfile.mkdtemp() dump_location = self.dumpstore.dump_listens( location=temp_dir, dump_id=1, start_time=datetime.utcfromtimestamp(base + 6), end_time=datetime.utcfromtimestamp(base + 10)) self.assertTrue(os.path.isfile(dump_location)) self.reset_timescale_db() self.logstore.import_listens_dump(dump_location) recalculate_all_user_data() listens, min_ts, max_ts = self.logstore.fetch_listens( user=self.testuser, to_ts=base + 11) self.assertEqual(len(listens), 4) self.assertEqual(listens[0].ts_since_epoch, base + 5) self.assertEqual(listens[1].ts_since_epoch, base + 4) self.assertEqual(listens[2].ts_since_epoch, base + 3) self.assertEqual(listens[3].ts_since_epoch, base + 2) shutil.rmtree(temp_dir)
def test_submit_listen(self): """ Sends a valid listen after handshaking and checks if it is present in the listenstore """ timestamp = int(time.time()) audioscrobbler_auth_token = _get_audioscrobbler_auth_token(self.user['auth_token'], timestamp) r = self.handshake(self.user['musicbrainz_id'], audioscrobbler_auth_token, timestamp) self.assert200(r) response = r.data.decode('utf-8').split('\n') self.assertEqual(response[0], 'OK') sid = response[1] data = { 's': sid, 'a[0]': 'Kishore Kumar', 't[0]': 'Saamne Ye Kaun Aya', 'o[0]': 'P', 'l[0]': 300, 'b[0]': 'Jawani Diwani', 'i[0]': int(time.time()), } r = self.client.post(url_for('api_compat_old.submit_listens'), data=data) self.assert200(r) self.assertEqual(r.data.decode('utf-8'), 'OK\n') time.sleep(1) recalculate_all_user_data() to_ts = int(time.time()) listens, _, _ = self.ls.fetch_listens(self.user, to_ts=to_ts) self.assertEqual(len(listens), 1)
def test_time_range_full_dumps(self): base = 1500000000 listens = generate_data(1, self.testuser_name, base + 1, 5) # generate 5 listens with ts 1-5 self.logstore.insert(listens) listens = generate_data(1, self.testuser_name, base + 6, 5) # generate 5 listens with ts 6-10 self.logstore.insert(listens) temp_dir = tempfile.mkdtemp() dump_location = self.dumpstore.dump_listens( location=temp_dir, dump_id=1, end_time=datetime.utcfromtimestamp(base + 5)) self.assertTrue(os.path.isfile(dump_location)) self.reset_timescale_db() self.logstore.import_listens_dump(dump_location) recalculate_all_user_data() listens, min_ts, max_ts = self.logstore.fetch_listens( user=self.testuser, to_ts=base + 11) self.assertEqual(len(listens), 5) self.assertEqual(listens[0].ts_since_epoch, base + 5) self.assertEqual(listens[1].ts_since_epoch, base + 4) self.assertEqual(listens[2].ts_since_epoch, base + 3) self.assertEqual(listens[3].ts_since_epoch, base + 2) self.assertEqual(listens[4].ts_since_epoch, base + 1)
def test_record_listen(self): """ Tests if listen is recorded correctly if valid information is provided. """ token = Token.generate(self.lfm_user.api_key) token.approve(self.lfm_user.name) session = Session.create(token) timestamp = int(time.time()) data = { 'method': 'track.scrobble', 'api_key': self.lfm_user.api_key, 'sk': session.sid, 'artist[0]': 'Kishore Kumar', 'track[0]': 'Saamne Ye Kaun Aya', 'album[0]': 'Jawani Diwani', 'duration[0]': 300, 'timestamp[0]': timestamp, } r = self.client.post(url_for('api_compat.api_methods'), data=data) self.assert200(r) self.assertEqual(r.headers["Content-type"], "application/xml; charset=utf-8") response = xmltodict.parse(r.data) self.assertEqual(response['lfm']['@status'], 'ok') self.assertEqual(response['lfm']['scrobbles']['@accepted'], '1') # Check if listen reached the timescale listenstore time.sleep(1) recalculate_all_user_data() listens, _, _ = self.ls.fetch_listens(self.lb_user, from_ts=timestamp - 1) self.assertEqual(len(listens), 1)
def _create_test_data(self, user_name, user_id, test_data_file_name=None, recalculate=True): test_data = create_test_data_for_timescalelistenstore( user_name, user_id, test_data_file_name) self.logstore.insert(test_data) if recalculate: recalculate_all_user_data() return len(test_data)
def send_listen(self, user, filename): with open(self.path_to_data_file(filename)) as f: payload = json.load(f) response = self.client.post( url_for('api_v1.submit_listen'), data = json.dumps(payload), headers = {'Authorization': 'Token {}'.format(user['auth_token'])}, content_type = 'application/json' ) time.sleep(1) # sleep to allow timescale-writer to do its thing recalculate_all_user_data() return response
def send_listens(self): with open(self.path_to_data_file('user_export_test.json')) as f: payload = json.load(f) response = self.client.post(url_for('api_v1.submit_listen'), data=json.dumps(payload), headers={ 'Authorization': 'Token {}'.format( self.user['auth_token']) }, content_type='application/json') time.sleep(1) recalculate_all_user_data() return response
def test_delete_listens_update_metadata(self): user_1 = db_user.get_or_create(1, "user_1") user_2 = db_user.get_or_create(2, "user_2") recalculate_all_user_data() self._create_test_data(user_1) self._create_test_data(user_2) update_user_listen_data() metadata_1 = self._get_count_and_timestamp(user_1) self.assertEqual(metadata_1["min_listened_at"], 1400000000) self.assertEqual(metadata_1["max_listened_at"], 1400000200) self.assertEqual(metadata_1["count"], 5) metadata_2 = self._get_count_and_timestamp(user_2) self.assertEqual(metadata_2["min_listened_at"], 1400000000) self.assertEqual(metadata_2["max_listened_at"], 1400000200) self.assertEqual(metadata_2["count"], 5) # to test the case when the update script has not run since delete, so metadata in listen_user_metadata does # account for this listen and deleting should not affect it either. self._create_test_data(user_1, "timescale_listenstore_test_listens_2.json") self.logstore.delete_listen(1400000500, user_1["id"], "4269ddbc-9241-46da-935d-4fa9e0f7f371") # test min_listened_at is updated if that listen is deleted for a user self.logstore.delete_listen(1400000000, user_1["id"], "4269ddbc-9241-46da-935d-4fa9e0f7f371") # test max_listened_at is updated if that listen is deleted for a user self.logstore.delete_listen(1400000200, user_1["id"], "4269ddbc-9241-46da-935d-4fa9e0f7f371") # test normal listen delete updates correctly self.logstore.delete_listen(1400000100, user_2["id"], "4269ddbc-9241-46da-935d-4fa9e0f7f371") delete_listens() metadata_1 = self._get_count_and_timestamp(user_1) self.assertEqual(metadata_1["min_listened_at"], 1400000050) self.assertEqual(metadata_1["max_listened_at"], 1400000150) self.assertEqual(metadata_1["count"], 3) metadata_2 = self._get_count_and_timestamp(user_2) self.assertEqual(metadata_2["min_listened_at"], 1400000000) self.assertEqual(metadata_2["max_listened_at"], 1400000200) self.assertEqual(metadata_2["count"], 4)
def send_data(self, payload, user=None, recalculate=False): """ Sends payload to api.submit_listen and return the response """ if not user: user = self.user response = self.client.post( url_for('api_v1.submit_listen'), data=json.dumps(payload), headers={'Authorization': 'Token {}'.format(user['auth_token'])}, content_type='application/json') if recalculate: # recalculate only if asked because there are many tests for invalid # submissions or where we don't fetch listens. in those cases, this # sleep will add unnecessary slowness. time.sleep( 1) # wait for listens to be picked up by timescale writer recalculate_all_user_data() return response
def test_spotify_recently_played_submitted(self, mock_recently_played, mock_currently_playing): with open( self.path_to_data_file( 'spotify_recently_played_submitted.json')) as f: mock_recently_played.return_value = json.load(f) mock_currently_playing.return_value = None result = spotify_read_listens.process_all_spotify_users() self.assertEqual(result, (1, 0)) time.sleep(1) recalculate_all_user_data() with open( self.path_to_data_file( 'spotify_recently_played_expected.json')) as f: expected_data = json.load(f) url = url_for('api_v1.get_listens', user_name=self.user['musicbrainz_id']) r = self.wait_for_query_to_have_items(url, 1) self.assert200(r) payload = r.json['payload'] self.assertEqual(payload['count'], 1) self.assertEqual(payload['latest_listen_ts'], 1635138793) actual_listen = payload['listens'][0] expected_listen = expected_data['payload']['listens'][0] # some fields vary from run to run, set those to our expected values before testing equality actual_listen['inserted_at'] = expected_listen['inserted_at'] actual_listen['recording_msid'] = expected_listen['recording_msid'] actual_listen['track_metadata']['additional_info']['recording_msid'] = \ expected_listen['track_metadata']['additional_info']['recording_msid'] actual_listen['track_metadata']['additional_info']['release_msid'] = \ expected_listen['track_metadata']['additional_info']['release_msid'] actual_listen['track_metadata']['additional_info']['artist_msid'] = \ expected_listen['track_metadata']['additional_info']['artist_msid'] self.assertEqual(expected_listen, actual_listen)
def test_import_listens(self): self._create_test_data(self.testuser_name, self.testuser_id) temp_dir = tempfile.mkdtemp() dump_location = self.dumpstore.dump_listens( location=temp_dir, dump_id=1, end_time=datetime.now(), ) self.assertTrue(os.path.isfile(dump_location)) self.reset_timescale_db() self.logstore.import_listens_dump(dump_location) recalculate_all_user_data() listens, min_ts, max_ts = self.logstore.fetch_listens( user=self.testuser, to_ts=1400000300) self.assertEqual(len(listens), 5) self.assertEqual(listens[0].ts_since_epoch, 1400000200) self.assertEqual(listens[1].ts_since_epoch, 1400000150) self.assertEqual(listens[2].ts_since_epoch, 1400000100) self.assertEqual(listens[3].ts_since_epoch, 1400000050) self.assertEqual(listens[4].ts_since_epoch, 1400000000) shutil.rmtree(temp_dir)
def test_dump_and_import_listens_escaped(self): user = db_user.get_or_create(3, 'i have a\\weird\\user, na/me"\n') self._create_test_data(user['musicbrainz_id'], user['id']) self._create_test_data(self.testuser_name, self.testuser_id) temp_dir = tempfile.mkdtemp() dump_location = self.dumpstore.dump_listens( location=temp_dir, dump_id=1, end_time=datetime.now(), ) self.assertTrue(os.path.isfile(dump_location)) self.reset_timescale_db() self.logstore.import_listens_dump(dump_location) recalculate_all_user_data() listens, min_ts, max_ts = self.logstore.fetch_listens(user=user, to_ts=1400000300) self.assertEqual(len(listens), 5) self.assertEqual(listens[0].ts_since_epoch, 1400000200) self.assertEqual(listens[1].ts_since_epoch, 1400000150) self.assertEqual(listens[2].ts_since_epoch, 1400000100) self.assertEqual(listens[3].ts_since_epoch, 1400000050) self.assertEqual(listens[4].ts_since_epoch, 1400000000) listens, min_ts, max_ts = self.logstore.fetch_listens( user=self.testuser, to_ts=1400000300) self.assertEqual(len(listens), 5) self.assertEqual(listens[0].ts_since_epoch, 1400000200) self.assertEqual(listens[1].ts_since_epoch, 1400000150) self.assertEqual(listens[2].ts_since_epoch, 1400000100) self.assertEqual(listens[3].ts_since_epoch, 1400000050) self.assertEqual(listens[4].ts_since_epoch, 1400000000) shutil.rmtree(temp_dir)
def test_complete_workflow_json(self): """ Integration test for complete workflow to submit a listen using Last.fm compat api """ data = { 'method': 'auth.gettoken', 'api_key': self.lfm_user.api_key, 'format': 'json', } r = self.client.post(url_for('api_compat.api_methods'), data=data) self.assert200(r) token = r.json['token'] # login as user with self.client.session_transaction() as session: session['_user_id'] = self.lb_user['login_id'] session['_fresh'] = True r = self.client.post( url_for('api_compat.api_auth_approve'), data=f"token={token}", headers={'Content-Type': 'application/x-www-form-urlencoded'}) self.assert200(r) data = { 'method': 'auth.getsession', 'api_key': self.lfm_user.api_key, 'token': token, 'format': 'json' } r = self.client.post(url_for('api_compat.api_methods'), data=data) self.assert200(r) sk = r.json['session']['key'] data = { 'method': 'track.scrobble', 'api_key': self.lfm_user.api_key, 'sk': sk, 'format': 'json', 'artist[0]': 'Kishore Kumar', 'track[0]': 'Saamne Ye Kaun Aya', 'album[0]': 'Jawani Diwani', 'duration[0]': 300, 'timestamp[0]': int(time.time()), } r = self.client.post(url_for('api_compat.api_methods'), data=data) self.assert200(r) expected = { "scrobbles": { "scrobble": { "track": { "#text": data["track[0]"], "corrected": "0" }, "artist": { "#text": data["artist[0]"], "corrected": "0" }, "album": { "#text": data["album[0]"], "corrected": "0" }, "albumArtist": { "#text": data["artist[0]"], "corrected": "0" }, "timestamp": str(data["timestamp[0]"]), "ignoredMessage": { "code": "0" } }, "accepted": "1", "ignored": "0" } } self.assertEqual(expected, r.json) # Check if listen reached the timescale listenstore time.sleep(1) recalculate_all_user_data() listens, _, _ = self.ls.fetch_listens(self.lb_user, from_ts=data["timestamp[0]"] - 1) self.assertEqual(len(listens), 1)