def similar(self): """tracks similar to this track""" params = Track._check_params( {'method': 'track.getSimilar'}, self.artist.name, self.name, self.mbid ) data = self._api._fetch_data(params).find('similartracks') return [ Track( self._api, subject = self, name = t.findtext('name'), artist = Artist( self._api, subject = self, name = t.findtext('artist/name'), mbid = t.findtext('artist/mbid'), url = t.findtext('artist/url') ), mbid = t.findtext('mbid'), stats = Stats( subject = t.findtext('name'), match = safe_float(t.findtext('match')) ), streamable = (t.findtext('streamable') == '1'), full_track = (t.find('streamable').attrib['fulltrack'] == '1'), image = dict([(i.get('size'), i.text) for i in t.findall('image')]), ) for t in data.findall('track') ]
def compare(api, type1, type2, value1, value2, limit = None): params = { 'method': 'tasteometer.compare', 'type1': type1, 'type2': type2, 'value1': value1, 'value2': value2 } if limit is not None: params.update({'limit': limit}) data = api._fetch_data(params).find('comparison/result') return Tasteometer( score = safe_float(data.findtext('score')), matches = safe_int(data.find('artists').attrib['matches']), artists = [ Artist( api, name = a.findtext('name'), url = a.findtext('url'), image = dict([(i.get('size'), i.text) for i in a.findall('image')]), ) for a in data.findall('artists/artist') ] )
def get_similar(self, limit = None): """ Get the artists similar to this artist. @param limit: the number of artists returned (optional) @type limit: L{int} @return: artists similar to this artist @rtype: L{list} of L{Artist} """ params = self._default_params({'method': 'artist.getSimilar'}) if limit is not None: params.update({'limit': limit}) data = self._api._fetch_data(params).find('similarartists') self._similar = [ Artist( self._api, subject = self, name = a.findtext('name'), mbid = a.findtext('mbid'), stats = Stats( subject = a.findtext('name'), match = safe_float(a.findtext('match')), ), url = 'http://' + a.findtext('url'), image = {'large': a.findtext('image')} ) for a in data.findall('artist') ] return self._similar[:]
def get_similar(self, limit=None): """ Get the artists similar to this artist. @param limit: the number of artists returned (optional) @type limit: L{int} @return: artists similar to this artist @rtype: L{list} of L{Artist} """ params = self._default_params({'method': 'artist.getSimilar'}) if limit is not None: params.update({'limit': limit}) data = self._api._fetch_data(params).find('similarartists') self._similar = [ Artist(self._api, subject=self, name=a.findtext('name'), mbid=a.findtext('mbid'), stats=Stats( subject=a.findtext('name'), match=safe_float(a.findtext('match')), ), url='http://' + a.findtext('url'), image={'large': a.findtext('image')}) for a in data.findall('artist') ] return self._similar[:]
def _read_url_data(self, opener, url, data = None): with _lock: now = datetime.utcnow().replace(tzinfo = UTC) delta = now - self._last_fetch_time delta = delta.seconds + safe_float(delta.microseconds)/1000000 if delta < Api.FETCH_INTERVAL: time.sleep(Api.FETCH_INTERVAL - delta) url_data = opener.open(url, data).read() self._last_fetch_time = datetime.utcnow().replace(tzinfo = UTC) return url_data
def _search_yield_func(api, venue): latitude = venue.findtext("location/{%s}point/{%s}lat" % ((Location.XMLNS,) * 2)) longitude = venue.findtext("location/{%s}point/{%s}long" % ((Location.XMLNS,) * 2)) return Venue( api, id=safe_int(venue.findtext("id")), name=venue.findtext("name"), location=Location( api, city=venue.findtext("location/city"), country=Country(api, name=venue.findtext("location/country")), street=venue.findtext("location/street"), postal_code=venue.findtext("location/postalcode"), latitude=(latitude.strip() != "") and safe_float(latitude) or None, longitude=(longitude.strip() != "") and safe_float(longitude) or None, ), url=venue.findtext("url"), )
def _search_yield_func(api, venue): latitude = venue.findtext('location/{%s}point/{%s}lat' % ((Location.XMLNS,)*2)) longitude = venue.findtext('location/{%s}point/{%s}long' % ((Location.XMLNS,)*2)) return Venue( api, id = safe_int(venue.findtext('id')), name = venue.findtext('name'), location = Location( api, city = venue.findtext('location/city'), country = Country( api, name = venue.findtext('location/country') ), street = venue.findtext('location/street'), postal_code = venue.findtext('location/postalcode'), latitude = (latitude.strip()!= '') and safe_float(latitude) or None, longitude = (longitude.strip()!= '') and safe_float(longitude) or None, ), url = venue.findtext('url') )
def get_neighbours(self, limit = None): params = self._default_params({'method': 'user.getNeighbours'}) if limit is not None: params.update({'limit': limit}) data = self._api._fetch_data(params).find('neighbours') return [ User( self._api, subject = self, name = u.findtext('name'), real_name = u.findtext('realname'), image = {'medium': u.findtext('image')}, url = u.findtext('url'), stats = Stats( subject = u.findtext('name'), match = u.findtext('match') and safe_float(u.findtext('match')), ), ) for u in data.findall('user') ]
def get_neighbours(self, limit=None): params = self._default_params({'method': 'user.getNeighbours'}) if limit is not None: params.update({'limit': limit}) data = self._api._fetch_data(params).find('neighbours') return [ User( self._api, subject = self, name = u.findtext('name'), real_name = u.findtext('realname'), image = {'medium': u.findtext('image')}, url = u.findtext('url'), stats = Stats( subject = u.findtext('name'), match = u.findtext('match') and safe_float(u.findtext('match')), ), ) for u in data.findall('user') ]
def compare(api, type1, type2, value1, value2, limit=None): params = { 'method': 'tasteometer.compare', 'type1': type1, 'type2': type2, 'value1': value1, 'value2': value2 } if limit is not None: params.update({'limit': limit}) data = api._fetch_data(params).find('comparison/result') return Tasteometer(score=safe_float(data.findtext('score')), matches=safe_int( data.find('artists').attrib['matches']), artists=[ Artist( api, name=a.findtext('name'), url=a.findtext('url'), image=dict([(i.get('size'), i.text) for i in a.findall('image')]), ) for a in data.findall('artists/artist') ])
def create_from_data(api, subject, start, end): w = WeeklyChart( subject = subject, start = start, end = end, ) max_tag_count = 3 global_top_tags = api.get_global_top_tags() from collections import defaultdict wac = subject.get_weekly_artist_chart(start, end) all_tags = defaultdict(lambda:0) tag_weights = defaultdict(lambda:0) total_playcount = 0 artist_count = 0 for artist in wac.artists: artist_count += 1 total_playcount += artist.stats.playcount tag_count = 0 for tag in artist.top_tags: if tag not in global_top_tags: continue if tag_count >= max_tag_count: break all_tags[tag] += 1 tag_count += 1 artist_pp = artist.stats.playcount/safe_float(wac.stats.playcount) cumulative_pp = total_playcount/safe_float(wac.stats.playcount) if (cumulative_pp > 0.75 or artist_pp < 0.01) and artist_count > 10: break for artist in wac.artists[:artist_count]: artist_pp = artist.stats.playcount/safe_float(wac.stats.playcount) tf = 1/safe_float(max_tag_count) tag_count = 0 weighted_tfidfs = {} for tag in artist.top_tags: if tag not in global_top_tags: continue if tag_count >= max_tag_count: break df = all_tags[tag]/safe_float(artist_count) tfidf = tf/df weighted_tfidf = safe_float(max_tag_count - tag_count)*tfidf weighted_tfidfs[tag.name] = weighted_tfidf tag_count += 1 sum_weighted_tfidfs = sum(weighted_tfidfs.values()) for tag in weighted_tfidfs: tag_weights[tag] += weighted_tfidfs[tag]/sum_weighted_tfidfs*artist_pp artist_pp = artist.stats.playcount/safe_float(wac.stats.playcount) tag_weights_sum = sum(tag_weights.values()) tag_weights = tag_weights.items() tag_weights.sort(key=lambda x:x[1], reverse=True) for i in xrange(len(tag_weights)): tag, weight = tag_weights[i] tag_weights[i] = (tag, weight, i+1) wtc = WeeklyTagChart( subject = subject, start = wac.start, end = wac.end, stats = Stats( subject = subject, playcount = 1000 ), tags = [ Tag( api, subject = w, name = tag, stats = Stats( subject = tag, rank = rank, count = safe_int(round(1000*weight/tag_weights_sum)), ) ) for (tag, weight, rank) in tag_weights ] ) wtc._artist_spectrum_analyzed = 100*total_playcount/safe_float(wac.stats.playcount) return wtc
def create_from_data(api, data): """ Create the Event object from the provided XML element. @param api: an instance of L{Api} @type api: L{Api} @param data: XML element @type data: C{xml.etree.ElementTree.Element} @return: an Event object corresponding to the provided XML element @rtype: L{Event} @note: Use the L{Api.get_event} method instead of using this method directly. """ start_date = None if data.findtext('startTime') is not None: start_date = datetime(*( time.strptime( "%s %s" % ( data.findtext('startDate').strip(), data.findtext('startTime').strip() ), '%a, %d %b %Y %H:%M' )[0:6]) ).replace(tzinfo = UTC) else: try: start_date = datetime(*( time.strptime( data.findtext('startDate').strip(), '%a, %d %b %Y %H:%M:%S' )[0:6]) ).replace(tzinfo = UTC) except ValueError: try: start_date = datetime(*( time.strptime( data.findtext('startDate').strip(), '%a, %d %b %Y' )[0:6]) ).replace(tzinfo = UTC) except ValueError: pass latitude = data.findtext('venue/location/{%s}point/{%s}lat' % ((Location.XMLNS,)*2)) longitude = data.findtext('venue/location/{%s}point/{%s}long' % ((Location.XMLNS,)*2)) return Event( api, id = safe_int(data.findtext('id')), title = data.findtext('title'), artists = [Artist(api, name = a.text) for a in data.findall('artists/artist')], headliner = Artist(api, name = data.findtext('artists/headliner')), venue = Venue( api, id = safe_int(data.findtext('venue/url').split('/')[-1]), name = data.findtext('venue/name'), location = Location( api, city = data.findtext('venue/location/city'), country = Country( api, name = data.findtext('venue/location/country') ), street = data.findtext('venue/location/street'), postal_code = data.findtext('venue/location/postalcode'), latitude = (latitude.strip()!= '') and safe_float(latitude) or None, longitude = (longitude.strip()!= '') and safe_float(longitude) or None, #timezone = data.findtext('venue/location/timezone') ), url = data.findtext('venue/url') ), start_date = start_date, description = data.findtext('description'), image = dict([(i.get('size'), i.text) for i in data.findall('image')]), url = data.findtext('url'), stats = Stats( subject = safe_int(data.findtext('id')), attendance = safe_int(data.findtext('attendance')), reviews = safe_int(data.findtext('reviews')), ), tag = data.findtext('tag') )
def create_from_data(api, subject, start, end): w = WeeklyChart( subject=subject, start=start, end=end, ) max_tag_count = 3 global_top_tags = api.get_global_top_tags() from collections import defaultdict wac = subject.get_weekly_artist_chart(start, end) all_tags = defaultdict(lambda: 0) tag_weights = defaultdict(lambda: 0) total_playcount = 0 artist_count = 0 for artist in wac.artists: artist_count += 1 total_playcount += artist.stats.playcount tag_count = 0 for tag in artist.top_tags: if tag not in global_top_tags: continue if tag_count >= max_tag_count: break all_tags[tag] += 1 tag_count += 1 artist_pp = artist.stats.playcount / safe_float( wac.stats.playcount) cumulative_pp = total_playcount / safe_float(wac.stats.playcount) if (cumulative_pp > 0.75 or artist_pp < 0.01) and artist_count > 10: break for artist in wac.artists[:artist_count]: artist_pp = artist.stats.playcount / safe_float( wac.stats.playcount) tf = 1 / safe_float(max_tag_count) tag_count = 0 weighted_tfidfs = {} for tag in artist.top_tags: if tag not in global_top_tags: continue if tag_count >= max_tag_count: break df = all_tags[tag] / safe_float(artist_count) tfidf = tf / df weighted_tfidf = safe_float(max_tag_count - tag_count) * tfidf weighted_tfidfs[tag.name] = weighted_tfidf tag_count += 1 sum_weighted_tfidfs = sum(weighted_tfidfs.values()) for tag in weighted_tfidfs: tag_weights[tag] += weighted_tfidfs[ tag] / sum_weighted_tfidfs * artist_pp artist_pp = artist.stats.playcount / safe_float( wac.stats.playcount) tag_weights_sum = sum(tag_weights.values()) tag_weights = tag_weights.items() tag_weights.sort(key=lambda x: x[1], reverse=True) for i in xrange(len(tag_weights)): tag, weight = tag_weights[i] tag_weights[i] = (tag, weight, i + 1) wtc = WeeklyTagChart( subject=subject, start=wac.start, end=wac.end, stats=Stats(subject=subject, playcount=1000), tags=[ Tag(api, subject=w, name=tag, stats=Stats( subject=tag, rank=rank, count=safe_int(round(1000 * weight / tag_weights_sum)), )) for (tag, weight, rank) in tag_weights ]) wtc._artist_spectrum_analyzed = 100 * total_playcount / safe_float( wac.stats.playcount) return wtc
def create_from_data(api, data): """ Create the Event object from the provided XML element. @param api: an instance of L{Api} @type api: L{Api} @param data: XML element @type data: C{xml.etree.ElementTree.Element} @return: an Event object corresponding to the provided XML element @rtype: L{Event} @note: Use the L{Api.get_event} method instead of using this method directly. """ start_date = None if data.findtext('startTime') is not None: start_date = datetime(*(time.strptime( "%s %s" % (data.findtext('startDate').strip(), data.findtext('startTime').strip()), '%a, %d %b %Y %H:%M')[0:6])).replace(tzinfo=UTC) else: try: start_date = datetime(*(time.strptime( data.findtext('startDate').strip(), '%a, %d %b %Y %H:%M:%S')[0:6])).replace(tzinfo=UTC) except ValueError: try: start_date = datetime(*(time.strptime( data.findtext('startDate').strip(), '%a, %d %b %Y') [0:6])).replace(tzinfo=UTC) except ValueError: pass latitude = data.findtext('venue/location/{%s}point/{%s}lat' % ((Location.XMLNS, ) * 2)) longitude = data.findtext('venue/location/{%s}point/{%s}long' % ((Location.XMLNS, ) * 2)) return Event( api, id=safe_int(data.findtext('id')), title=data.findtext('title'), artists=[ Artist(api, name=a.text) for a in data.findall('artists/artist') ], headliner=Artist(api, name=data.findtext('artists/headliner')), venue=Venue( api, id=safe_int(data.findtext('venue/url').split('/')[-1]), name=data.findtext('venue/name'), location=Location( api, city=data.findtext('venue/location/city'), country=Country( api, name=data.findtext('venue/location/country')), street=data.findtext('venue/location/street'), postal_code=data.findtext('venue/location/postalcode'), latitude=(latitude.strip() != '') and safe_float(latitude) or None, longitude=(longitude.strip() != '') and safe_float(longitude) or None, #timezone = data.findtext('venue/location/timezone') ), url=data.findtext('venue/url')), start_date=start_date, description=data.findtext('description'), image=dict([(i.get('size'), i.text) for i in data.findall('image')]), url=data.findtext('url'), stats=Stats( subject=safe_int(data.findtext('id')), attendance=safe_int(data.findtext('attendance')), reviews=safe_int(data.findtext('reviews')), ), tag=data.findtext('tag'))