def save(self, user): f = self.cleaned_data['file'] g = Group.objects.get(name='test') if settings.DEBUG: send_debug('Calculating full hash', g) full_md5 = hashlib.md5() for chunk in f.chunks(): full_md5.update(chunk) full_hash = full_md5.hexdigest() if settings.DEBUG: send_debug('Done, calculating part hashes', g) f.seek(0) begin_hash = hashlib.md5(f.read(100)).hexdigest() f.seek((len(f)/2)-50) middle_hash = hashlib.md5(f.read(100)).hexdigest() f.seek(len(f)-100) end_hash = hashlib.md5(f.read(100)).hexdigest() try: tl = TrackLocation.objects.get(hash=full_hash, size=len(f)) track = tl.track except TrackLocation.DoesNotExist: if settings.DEBUG: send_debug('uploading to s3', g); try: url = upload_to_s3(f, full_hash) except Exception, e: if settings.DEBUG: send_debug('exception: %s' % e, g) if settings.DEBUG: send_debug('done, getting length', g) f.seek(0) mf = mad.MadFile(f) length = mf.total_time() / 1000 tl = TrackLocation(url=url, size=len(f), begin_hash=begin_hash, middle_hash=middle_hash, end_hash=end_hash, hash=full_hash) artist_name, album_name, track_name, year, hash = get_track_data_from_file(f.temporary_file_path()) if artist_name != '': artist = Artist.objects.get_or_create(name=artist_name)[0] else: artist = None if album_name != '': album, created = Album.objects.get_or_create(artist=artist, name=album_name) if created: try: r = urllib2.urlopen('http://ws.audioscrobbler.com/2.0/?method=album.getinfo&api_key=72553de16666cad1c8f7e319292e9123&artist=%s&album=%s' % (artist_name.replace(' ', '%20'), album_name.replace(' ', '%20'))) bs = BeautifulSoup(r.read()) album.image = bs.album.image.nextSibling.nextSibling.nextSibling.nextSibling.contents[0] album.save() except: pass else: album = None track = Track.objects.get_or_create(title=track_name, album=album, year=year, artist=artist, length=length)[0] tl.track = track tl.save()
def enqueue_track(self, track, user): r = _get_redis() # enqueue track to user queue r.rpush('%s_%s_queue' % (self.id, user.id), track.id) # increment total count count = r.incr('%s_queued' % self.id) # send debug message if in debug mode if settings.DEBUG: comet_utils.send_debug("%s enqueued %s" % (user, track), self) # add user to group's queue set if r.zscore('%s_users' % self.id, user.id) == None: r.zadd('%s_users' % self.id, user.id, 0) r.zadd('%s_users_next' % self.id, user.id, 1) # send queue update if next song not pushed if self.check_for_next_track()[0] == None: comet_utils.send_queue_update(count, self) # return queue count return r.get('%s_queued' % self.id)
def check_for_next_track(self): r = _get_redis() current_lock = '%s_current_lock' % self.id # don't continue if current song is playing or no queued tracks if r.ttl('%s_current' % self.id) > -1: comet_utils.send_debug("returning null bceause %s_current's ttl = %s" % (self.id, r.ttl('%s_current' % self.id)), self) return None, None, None if r.exists(current_lock): comet_utils.send_debug("returning null bceause of lock") return None, None, None r.set(current_lock, 1) try: # next track! results = self.next_track(r) finally: r.delete(current_lock) return results
def save(self, user): f = self.cleaned_data['file'] g = Group.objects.get(name='test') if settings.DEBUG: send_debug('Calculating full hash', g) full_md5 = hashlib.md5() for chunk in f.chunks(): full_md5.update(chunk) full_hash = full_md5.hexdigest() if settings.DEBUG: send_debug('Done, calculating part hashes', g) f.seek(0) begin_hash = hashlib.md5(f.read(100)).hexdigest() f.seek((len(f) / 2) - 50) middle_hash = hashlib.md5(f.read(100)).hexdigest() f.seek(len(f) - 100) end_hash = hashlib.md5(f.read(100)).hexdigest() try: tl = TrackLocation.objects.get(hash=full_hash, size=len(f)) track = tl.track except TrackLocation.DoesNotExist: if settings.DEBUG: send_debug('uploading to s3', g) try: url = upload_to_s3(f, full_hash) except Exception, e: if settings.DEBUG: send_debug('exception: %s' % e, g) if settings.DEBUG: send_debug('done, getting length', g) f.seek(0) mf = mad.MadFile(f) length = mf.total_time() / 1000 tl = TrackLocation(url=url, size=len(f), begin_hash=begin_hash, middle_hash=middle_hash, end_hash=end_hash, hash=full_hash) artist_name, album_name, track_name, year, hash = get_track_data_from_file( f.temporary_file_path()) if artist_name != '': artist = Artist.objects.get_or_create(name=artist_name)[0] else: artist = None if album_name != '': album, created = Album.objects.get_or_create(artist=artist, name=album_name) if created: try: r = urllib2.urlopen( 'http://ws.audioscrobbler.com/2.0/?method=album.getinfo&api_key=72553de16666cad1c8f7e319292e9123&artist=%s&album=%s' % (artist_name.replace( ' ', '%20'), album_name.replace(' ', '%20'))) bs = BeautifulSoup(r.read()) album.image = bs.album.image.nextSibling.nextSibling.nextSibling.nextSibling.contents[ 0] album.save() except: pass else: album = None track = Track.objects.get_or_create(title=track_name, album=album, year=year, artist=artist, length=length)[0] tl.track = track tl.save()