def get_recent_tracks(user, **kwargs):
    return retry(lambda: objectify.fromstring(
        urllib2.urlopen("http://ws.audioscrobbler.com/2.0/?" + urllib.urlencode(dict(method="user.getrecenttracks",
                                                                                     api_key=app.config["LAST_FM_API_KEY"],
                                                                                     user=user.username,
                                                                                     **kwargs))).read(),
        objectify.makeparser(encoding="utf-8", recover=True)
    ), logger=logger)
def download_artist_similarities():
    from lxml import objectify
    import operator
    from sqlalchemy.sql import func
    import urllib
    import urllib2
    from themyutils.misc import retry

    from bs4 import BeautifulSoup
    import requests
    artists = set(map(operator.itemgetter(0),
                      db.session.query(Artist.name).\
                                 filter(Artist.id.in_(db.session.query(UserArtist.artist_id).\
                                                                 filter(UserArtist.user_id.in_([6, 11]),
                                                                        UserArtist.scrobbles > 250)))))
    for artist in artists:
        """
        artist_1 = db.session.query(Artist).filter(Artist.name == artist).first()
        url = b"http://www.last.fm/ru/music/%s" % urllib.quote_plus(artist.encode("utf-8"), b"").replace(b"%2B", b"%252B")
        for i, li in enumerate(BeautifulSoup(requests.get(url + b"/+similar").text).select("li.large-grid-wrapper")):
            name = li.select("a.link-block-target")[0].text.strip()
            artist_2 = db.session.query(Artist).filter(Artist.name == name).first()
            if artist_2:
                a1, a2 = tuple(sorted([artist_1, artist_2], key=lambda artist: artist.name.lower()))
                if db.session.query(func.count(ArtistSimilarity.id)).\
                              filter(ArtistSimilarity.artist_1 == a1,
                                     ArtistSimilarity.artist_2 == a2).\
                              scalar() == 0:
                    similarity = ArtistSimilarity()
                    similarity.artist_1 = a1
                    similarity.artist_2 = a2
                    similarity.match = 1 - i / 100
                    print(artist_1.name, artist_2.name, similarity.match)
                    db.session.add(similarity)
                    db.session.commit()
        """

        #"""
        xml = retry(lambda: objectify.fromstring(
                        urllib2.urlopen("http://ws.audioscrobbler.com/2.0/?" + urllib.urlencode(dict(method="artist.getSimilar",
                                                                                                     api_key=app.config["LAST_FM_API_KEY"],
                                                                                                     artist=artist.encode("utf-8")))).read(),
                        objectify.makeparser(encoding="utf-8", recover=True)
                    ), max_tries=5, exceptions=((urllib2.HTTPError, lambda e: e.code not in [400]),))
        for match in xml.similarartists.iter("artist"):
            artist_2 = db.session.query(Artist).filter(Artist.name == unicode(match.name)).first()
            if artist_2:
                a1, a2 = tuple(sorted([artist_1, artist_2], key=lambda artist: artist.name.lower()))
                if db.session.query(func.count(ArtistSimilarity.id)).\
                              filter(ArtistSimilarity.artist_1 == a1,
                                     ArtistSimilarity.artist_2 == a2).\
                              scalar() == 0:
                    similarity = ArtistSimilarity()
                    similarity.artist_1 = a1
                    similarity.artist_2 = a2
                    similarity.match = float(match.match)
                    db.session.add(similarity)
                    db.session.commit()
Example #3
0
def update_events():
    artists = set([user_artist.artist.name
                   for user_artist in db.session.query(UserArtist).\
                                                 filter(UserArtist.scrobbles >= 100,
                                                        UserArtist.user_id.in_([6, 11]))])
    try:
        for artist in artists:
            db_artist = db.session.query(Artist).filter(Artist.name == artist).one()

            page = 1
            pages = -1
            while pages == -1 or page <= pages:
                logger.debug("Opening %s's page %d of %d", artist, page, pages)
                xml = retry(lambda: objectify.fromstring(
                    urllib2.urlopen("http://ws.audioscrobbler.com/2.0/?" + urllib.urlencode(dict(method="artist.getPastEvents",
                                                                                                 api_key=app.config["LAST_FM_API_KEY"],
                                                                                                 artist=artist.encode("utf-8"),
                                                                                                 page=page))).read(),
                    objectify.makeparser(encoding="utf-8", recover=True)
                ), max_tries=5, exceptions=((urllib2.HTTPError, lambda e: e.code not in [400]),), logger=logger)

                if pages == -1:
                    pages = int(xml.events.get("totalPages"))

                found = False
                for event in xml.events.iter("event"):
                    if not hasattr(event, "venue"):
                        continue

                    db_event = db.session.query(Event).get(int(event.id))
                    if db_event:
                        if db_artist in db_event.artists:
                            found = True
                            break
                        else:
                            db_event.artists.append(db_artist)
                    else:
                        db_event = Event()
                        db_event.id = int(event.id)
                        db_event.title = unicode(event.title)
                        db_event.datetime = dateutil.parser.parse(unicode(event.startDate))
                        db_event.url = unicode(event.url)
                        db_event.city = unicode(event.venue.location.city)
                        db_event.country = unicode(event.venue.location.country)
                        db_event.artists.append(db_artist)
                        db.session.add(db_event)

                if found:
                    break

                page = page + 1

            db.session.commit()
    except urllib2.HTTPError:
        pass
def update_user_artists():
    for u in db.session.query(User).\
                        filter(User.download_scrobbles == False,
                               (User.last_library_update == None) |\
                                   (User.last_library_update <= datetime.now() - timedelta(days=7))):
        session = db.create_scoped_session()

        user = session.query(User).get(u.id)

        session.query(UserArtist).\
                filter(UserArtist.user == user).\
                delete()

        page = 1
        pages = -1
        while pages == -1 or page <= pages:
            logger.debug("Opening %s's page %d of %d", user.username, page, pages)
            xml = retry(lambda: objectify.fromstring(
                urllib2.urlopen("http://ws.audioscrobbler.com/2.0/?" + urllib.urlencode(dict(method="library.getArtists",
                                                                                             api_key=app.config["LAST_FM_API_KEY"],
                                                                                             user=user.username,
                                                                                             limit=200,
                                                                                             page=page))).read(),
                objectify.makeparser(encoding="utf-8", recover=True)
            ), logger=logger)

            if pages == -1:
                pages = int(xml.artists.get("totalPages"))

            for artist in xml.artists.iter("artist"):
                user_artist = UserArtist()
                user_artist.user = user
                user_artist.artist = unicode(artist.name)
                user_artist.scrobbles = int(artist.playcount)
                session.add(user_artist)

                user.last_library_update = datetime.now()

            page = page + 1

        session.commit()