コード例 #1
0
def vote(mbid, spotify_uri, user_id):
    """Submit report about incorrect Spotify mapping.

    Returns:
        Returns two values. First one is a boolean that indicates whether the submission has been successful.
        The second is an exception in case errors occur. If there are no errors, this value is None.
    """
    try:
        if _base_url is None or _key is None:
            raise ValueError(
                "Missing MBSPOTIFY_BASE_URI or MBSPOTIFY_ACCESS_KEY.")
        session = requests.Session()
        session.mount(_base_url, HTTPAdapter(max_retries=2))
        resp = session.post(_base_url + 'mapping/vote',
                            params={'key': _key},
                            headers={'Content-Type': 'application/json'},
                            data=json.dumps({
                                'mbid': str(mbid),
                                'spotify_uri': str(spotify_uri),
                                'user': str(user_id)
                            }))
        cache.delete(mbid, _CACHE_NAMESPACE)
        return resp.status_code == 200, None
    except (RequestException, ValueError) as e:
        return False, e
コード例 #2
0
    def delete_listen(self, listened_at: int, user_name: str,
                      recording_msid: str):
        """ Delete a particular listen for user with specified MusicBrainz ID.
        Args:
            listened_at: The timestamp of the listen
            user_name: the username of the user
            recording_msid: the MessyBrainz ID of the recording
        Raises: TimescaleListenStoreException if unable to delete the listen
        """

        args = {
            'listened_at': listened_at,
            'user_name': user_name,
            'recording_msid': recording_msid
        }
        query = """DELETE FROM listen
                    WHERE listened_at = :listened_at
                      AND user_name = :user_name
                      AND data -> 'track_metadata' -> 'additional_info' ->> 'recording_msid' = :recording_msid """

        try:
            with timescale.engine.connect() as connection:
                connection.execute(sqlalchemy.text(query), args)

            user_key = "{}{}".format(
                self.ns + REDIS_TIMESCALE_USER_LISTEN_COUNT, user_name)
            cache.delete(user_key)
        except psycopg2.OperationalError as e:
            self.log.error("Cannot delete listen for user: %s" % str(e))
            raise TimescaleListenStoreException
コード例 #3
0
    def insert(self, listens):
        """ Insert a batch of listens.
        """

        submit = []
        user_names = {}
        for listen in listens:
            user_names[listen.user_name] = 1
            submit.append(listen.to_influx(quote(listen.user_name)))

        if not self.influx.write_points(submit, time_precision='s'):
            self.log.error(
                "Cannot write data to influx. (write_points returned False), data=%s",
                json.dumps(submit, indent=3))

        # If we reach this point, we were able to write the listens to the InfluxListenStore.
        # So update the listen counts of the users cached in brainzutils cache.
        for data in submit:
            user_key = "{}{}".format(REDIS_INFLUX_USER_LISTEN_COUNT,
                                     data['fields']['user_name'])

            cached_count = cache.get(user_key, decode=False)
            if cached_count:
                cache.increment(user_key)

        # Invalidate cached data for user
        for user_name in user_names.keys():
            cache.delete(REDIS_USER_TIMESTAMPS % user_name)

        if len(listens):
            # Enter a measurement to count items inserted
            submit = [{
                'measurement': TEMP_COUNT_MEASUREMENT,
                'tags': {
                    COUNT_MEASUREMENT_NAME: len(listens)
                },
                'fields': {
                    COUNT_MEASUREMENT_NAME: len(listens)
                }
            }]
            try:
                if not self.influx.write_points(submit):
                    self.log.error(
                        "Cannot write listen cound to influx. (write_points returned False)"
                    )
            except (InfluxDBServerError, InfluxDBClientError,
                    ValueError) as err:
                self.log.error("Cannot write data to influx: %s, data: %s",
                               str(err),
                               json.dumps(submit, indent=3),
                               exc_info=True)
                raise
コード例 #4
0
ファイル: mbspotify.py プロジェクト: rsh7/critiquebrainz
def vote(mbid, spotify_uri, user_id):
    """Submit report about incorrect Spotify mapping."""
    if _base_url is None or _key is None:
        return

    # TODO(roman): Catch errors during voting.
    requests.post(_base_url + 'mapping/vote?key=' + _key, headers={'Content-Type': 'application/json'},
                  data=json.dumps({
                      'mbid': str(mbid),
                      'user': str(user_id),
                      'spotify_uri': str(spotify_uri),
                  }))
    cache.delete(mbid, _CACHE_NAMESPACE)
コード例 #5
0
 def test_delete_with_namespace(self):
     key = "testing"
     namespace = "spaaaaaaace"
     self.assertTrue(cache.set(key, u"Пример", namespace=namespace))
     self.assertEqual(cache.get(key, namespace=namespace), u"Пример")
     self.assertEqual(cache.delete(key, namespace=namespace), 1)
     self.assertIsNone(cache.get(key, namespace=namespace))
コード例 #6
0
 def test_delete_with_namespace(self):
     key = "testing"
     namespace = "spaaaaaaace"
     self.assertTrue(cache.set(key, u"Пример", namespace=namespace))
     self.assertEqual(cache.get(key, namespace=namespace), u"Пример")
     self.assertEqual(cache.delete(key, namespace=namespace), 1)
     self.assertIsNone(cache.get(key, namespace=namespace))
コード例 #7
0
    def insert(self, listens):
        """
            Insert a batch of listens. Returns a list of (listened_at, track_name, user_name) that indicates
            which rows were inserted into the DB. If the row is not listed in the return values, it was a duplicate.
        """

        submit = []
        user_names = {}
        for listen in listens:
            user_names[listen.user_name] = 1
            submit.append(listen.to_timescale())

        query = """INSERT INTO listen (listened_at, track_name, user_name, data)
                        VALUES %s
                   ON CONFLICT (listened_at, track_name, user_name)
                    DO NOTHING
                     RETURNING listened_at, track_name, user_name"""

        inserted_rows = []
        conn = timescale.engine.raw_connection()
        with conn.cursor() as curs:
            try:
                execute_values(curs, query, submit, template=None)
                while True:
                    result = curs.fetchone()
                    if not result:
                        break
                    inserted_rows.append((result[0], result[1], result[2]))
            except UntranslatableCharacter:
                conn.rollback()
                return

        conn.commit()

        # So update the listen counts of the users cached in brainzutils cache.
        for _, _, user_name in inserted_rows:
            user_key = "{}{}".format(
                self.ns + REDIS_TIMESCALE_USER_LISTEN_COUNT, user_name)
            cached_count = cache.get(user_key, decode=False)
            if cached_count:
                cache.increment(user_key)

        # Invalidate cached data for user
        for user_name in user_names:
            cache.delete(self.ns + REDIS_USER_TIMESTAMPS % user_name)

        return inserted_rows
コード例 #8
0
    def test_get_total_listen_count(self):
        total_count = self.logstore.get_total_listen_count()
        self.assertEqual(total_count, 0)

        count_user_1 = self._create_test_data(self.testuser["musicbrainz_id"],
                                              self.testuser["id"])
        uid = random.randint(2000, 1 << 31)
        testuser2 = db_user.get_or_create(uid, f"user_{uid}")
        count_user_2 = self._create_test_data(testuser2["musicbrainz_id"],
                                              testuser2["id"])

        cache.delete(REDIS_TOTAL_LISTEN_COUNT)
        add_missing_to_listen_users_metadata()
        update_user_listen_data()

        total_count = self.logstore.get_total_listen_count()
        self.assertEqual(total_count, count_user_1 + count_user_2)
コード例 #9
0
    def test_delete_single_listen(self):
        uid = random.randint(2000, 1 << 31)
        testuser = db_user.get_or_create(uid, "user_%d" % uid)
        testuser_name = testuser['musicbrainz_id']
        self._create_test_data(testuser_name, testuser["id"])

        listens, min_ts, max_ts = self.logstore.fetch_listens(user=testuser,
                                                              to_ts=1400000300)
        self.assertEqual(len(listens), 5)
        self.assertEqual(listens[0].ts_since_epoch, 1400000200)
        self.assertEqual(listens[1].ts_since_epoch, 1400000150)
        self.assertEqual(listens[2].ts_since_epoch, 1400000100)
        self.assertEqual(listens[3].ts_since_epoch, 1400000050)
        self.assertEqual(listens[4].ts_since_epoch, 1400000000)

        self.logstore.delete_listen(1400000050, testuser["id"],
                                    "c7a41965-9f1e-456c-8b1d-27c0f0dde280")

        pending = self._get_pending_deletes()
        self.assertEqual(len(pending), 1)
        self.assertEqual(pending[0]["listened_at"], 1400000050)
        self.assertEqual(pending[0]["user_id"], testuser["id"])
        self.assertEqual(str(pending[0]["recording_msid"]),
                         "c7a41965-9f1e-456c-8b1d-27c0f0dde280")

        delete_listens_and_update_user_listen_data()

        # clear cache entry so that count is fetched from db again
        cache.delete(REDIS_USER_LISTEN_COUNT + str(testuser["id"]))

        listens, min_ts, max_ts = self.logstore.fetch_listens(user=testuser,
                                                              to_ts=1400000300)
        self.assertEqual(len(listens), 4)
        self.assertEqual(listens[0].ts_since_epoch, 1400000200)
        self.assertEqual(listens[1].ts_since_epoch, 1400000150)
        self.assertEqual(listens[2].ts_since_epoch, 1400000100)
        self.assertEqual(listens[3].ts_since_epoch, 1400000000)

        self.assertEqual(
            self.logstore.get_listen_count_for_user(testuser["id"]), 4)
        min_ts, max_ts = self.logstore.get_timestamps_for_user(testuser["id"])
        self.assertEqual(min_ts, 1400000000)
        self.assertEqual(max_ts, 1400000200)
コード例 #10
0
    def insert(self, listens):
        """ Insert a batch of listens.
        """

        submit = []
        user_names = {}
        for listen in listens:
            user_names[listen.user_name] = 1
            submit.append(listen.to_influx(quote(listen.user_name)))

        if not self.influx.write_points(submit, time_precision='s'):
            self.log.error("Cannot write data to influx. (write_points returned False), data=%s", json.dumps(submit, indent=3))

        # If we reach this point, we were able to write the listens to the InfluxListenStore.
        # So update the listen counts of the users cached in brainzutils cache.
        for data in submit:
            user_key = "{}{}".format(REDIS_INFLUX_USER_LISTEN_COUNT, data['fields']['user_name'])

            cached_count = cache.get(user_key, decode=False)
            if cached_count:
                cache.increment(user_key)

        # Invalidate cached data for user
        for user_name in user_names.keys():
            cache.delete(REDIS_USER_TIMESTAMPS % user_name)

        if len(listens):
            # Enter a measurement to count items inserted
            submit = [{
                'measurement': TEMP_COUNT_MEASUREMENT,
                'tags': {
                    COUNT_MEASUREMENT_NAME: len(listens)
                },
                'fields': {
                    COUNT_MEASUREMENT_NAME: len(listens)
                }
            }]
            try:
                if not self.influx.write_points(submit):
                    self.log.error("Cannot write listen cound to influx. (write_points returned False)")
            except (InfluxDBServerError, InfluxDBClientError, ValueError) as err:
                self.log.error("Cannot write data to influx: %s, data: %s", str(err), json.dumps(submit, indent=3), exc_info=True)
                raise
コード例 #11
0
def vote(mbid, spotify_uri, user_id):
    """Submit report about incorrect Spotify mapping.

    Returns:
        Returns two values. First one is a boolean that indicates whether the submission has been successful.
        The second is an exception in case errors occur. If there are no errors, this value is None.
    """
    try:
        if _base_url is None or _key is None:
            raise ValueError("Missing MBSPOTIFY_BASE_URI or MBSPOTIFY_ACCESS_KEY.")
        session = requests.Session()
        session.mount(_base_url, HTTPAdapter(max_retries=2))
        resp = session.post(_base_url + 'mapping/vote',
                            params={'key': _key},
                            headers={'Content-Type': 'application/json'},
                            data=json.dumps({'mbid': str(mbid), 'spotify_uri': str(spotify_uri), 'user': str(user_id)}))
        cache.delete(mbid, _CACHE_NAMESPACE)
        return resp.status_code == 200, None
    except (RequestException, ValueError) as e:
        return False, e
コード例 #12
0
ファイル: mbspotify.py プロジェクト: rsh7/critiquebrainz
def add_mapping(mbid, spotify_uri, user_id):
    """Submit new Spotify mapping.

    Returns:
        Returns two values. First one is a boolean that indicates whether the submission has been successful.
        The second is an exception in case errors occur. If there are no errors, this value is None.
    """
    if _base_url is None or _key is None:
        return False, None

    try:
        session = requests.Session()
        session.mount(_base_url, HTTPAdapter(max_retries=2))
        resp = session.post(_base_url + 'mapping/add?key=' + _key,
                            headers={'Content-Type': 'application/json'},
                            data=json.dumps({'mbid': str(mbid), 'spotify_uri': spotify_uri, 'user': str(user_id)}))
        cache.delete(mbid, _CACHE_NAMESPACE)
        return resp.status_code == 200, None
    except RequestException as e:
        return False, e
コード例 #13
0
 def test_delete(self):
     key = "testing"
     self.assertTrue(cache.set(key, u"Пример"))
     self.assertEqual(cache.get(key), u"Пример")
     self.assertEqual(cache.delete(key), 1)
     self.assertIsNone(cache.get(key))
コード例 #14
0
    def test_delete_listens(self):
        """
        Test delete listens for a user
        """
        # test get requests to delete-listens view first
        self.temporary_login(self.user['login_id'])
        resp = self.client.get(url_for('profile.delete_listens'))
        self.assert200(resp)

        # send three listens for the user
        resp = self.send_listens()
        self.assert200(resp)

        # set the latest_import ts to a non-default value, so that we can check it was
        # reset later
        val = int(time.time())
        resp = self.client.post(
            url_for('api_v1.latest_import'),
            data=json.dumps({'ts': val}),
            headers={
                'Authorization': 'Token {}'.format(self.user['auth_token'])
            },
            content_type='application/json',
        )
        self.assert200(resp)
        resp = self.client.get(
            url_for('api_v1.latest_import',
                    user_name=self.user['musicbrainz_id']))
        self.assert200(resp)
        self.assertEqual(resp.json['latest_import'], val)

        self.assertNotEqual(
            self.redis.ttl(
                cache._prep_key(REDIS_USER_LISTEN_COUNT +
                                str(self.user['id']))), 0)

        # check that listens have been successfully submitted
        resp = self.client.get(
            url_for('api_v1.get_listen_count',
                    user_name=self.user['musicbrainz_id']))
        self.assert200(resp)
        self.assertEqual(json.loads(resp.data)['payload']['count'], 3)

        # now delete all the listens we just sent
        # we do a get request first to put the CSRF token in the flask global context
        # so that we can access it for using in the post request in the next step
        self.client.get(url_for('profile.delete_listens'))
        resp = self.client.post(url_for('profile.delete_listens'),
                                data={'csrf_token': g.csrf_token})
        self.assertRedirects(
            resp, url_for('user.profile',
                          user_name=self.user['musicbrainz_id']))

        # listen counts are cached for 5 min, so delete key otherwise cached will be returned
        cache.delete(REDIS_USER_LISTEN_COUNT + str(self.user['id']))

        # check that listens have been successfully deleted
        resp = self.client.get(
            url_for('api_v1.get_listen_count',
                    user_name=self.user['musicbrainz_id']))
        self.assert200(resp)
        self.assertEqual(json.loads(resp.data)['payload']['count'], 0)

        # check that the latest_import timestamp has been reset too
        resp = self.client.get(
            url_for('api_v1.latest_import',
                    user_name=self.user['musicbrainz_id']))
        self.assert200(resp)
        self.assertEqual(resp.json['latest_import'], 0)
コード例 #15
0
 def test_delete(self):
     key = "testing"
     self.assertTrue(cache.set(key, u"Пример"))
     self.assertEqual(cache.get(key), u"Пример")
     self.assertEqual(cache.delete(key), 1)
     self.assertIsNone(cache.get(key))