def run(): client = praw.Reddit(user_agent='song_finder') subbreddits = ['listentothis'] subreddit = client.get_subreddit(subbreddits[0]) top_posts = subreddit.get_top_from_week() for post in top_posts: track_dict = parse_post_title(post.title) if not track_dict: continue date_posted = datetime.fromtimestamp(post.created_utc) hours_delta = (datetime.now() - date_posted).total_seconds() source_score = int(post.score) / hours_delta * 100.0 track_dict.update({'genres': post.link_flair_text, 'source_score': source_score, 'source': 'reddit - listentothis'}) queued_track = QueuedTrack(**track_dict) session.merge(queued_track) session.commit()
def run(): client = praw.Reddit(user_agent='song_finder') subbreddits = ['listentothis'] subreddit = client.get_subreddit(subbreddits[0]) top_posts = subreddit.get_top_from_week() for post in top_posts: track_dict = parse_post_title(post.title) if not track_dict: continue date_posted = datetime.fromtimestamp(post.created_utc) hours_delta = (datetime.now() - date_posted).total_seconds() source_score = int(post.score) / hours_delta * 100.0 track_dict.update({ 'genres': post.link_flair_text, 'source_score': source_score, 'source': 'reddit - listentothis' }) queued_track = QueuedTrack(**track_dict) session.merge(queued_track) session.commit()
def run(): LOGGER.info('Getting music from HypeMachine...') results = hypem.get_popular(filter='lastweek', page=1) LOGGER.info('Found {} tracks, merging to database...'.format( len(results.data))) try: for track in results.data: date_posted = datetime.fromtimestamp(track.data['dateposted']) #TODO: this is unpredictable because there are random postid's returned with different loved and # dateposted values hours_delta = (datetime.now() - date_posted).total_seconds() / 60 / 60 source_score = int(track.data['loved_count'] / hours_delta) hypem_row = QueuedTrack(title=track.data['title'], artist=track.data['artist'], year=date_posted.year, source='hypemachine', source_score=source_score, duration=track.data['time']) session.merge(hypem_row) except: raise finally: session.commit() LOGGER.info('Merge completed.')
def run(): LOGGER.info('Getting music from HypeMachine...') results = hypem.get_popular(filter='lastweek', page=1) LOGGER.info('Found {} tracks, merging to database...'.format( len(results.data))) try: for track in results.data: date_posted = datetime.fromtimestamp(track.data['dateposted']) #TODO: this is unpredictable because there are random postid's returned with different loved and # dateposted values hours_delta = ( datetime.now() - date_posted).total_seconds() / 60 / 60 source_score = int(track.data['loved_count'] / hours_delta) hypem_row = QueuedTrack( title=track.data['title'], artist=track.data['artist'], year=date_posted.year, source='hypemachine', source_score=source_score, duration=track.data['time']) session.merge(hypem_row) except: raise finally: session.commit() LOGGER.info('Merge completed.')
def update_db_for_itunes_data(itunes_data, db_field): db_tracks = session.query(SavedTrack).filter(SavedTrack.path.in_([i['file_path'] for i in itunes_data])).all() itunes_key = [key for key in itunes_data[0].keys() if key != 'file_path'][0] # gross for db_track in db_tracks: db_track.__setattr__(db_field, [track[itunes_key] for track in itunes_data if track['file_path'] == db_track.path][0]) session.add(db_track) session.commit()
def save(self): self.sync() if self.easyID3.is_modified: LOGGER.info('Saving file changes...') self.easyID3.save() if session.is_modified(self.model): LOGGER.info('Committing model changes...') session.merge(self.model) session.commit()
def run(): # TODO: break this into smaller functions LOGGER.info('Running music downloader...') tracks_to_download = get_tracks_to_download() if not tracks_to_download: LOGGER.info('No queued tracks found in database') return LOGGER.info('Found {} tracks from database to download...'.format(len(tracks_to_download))) options = { 'format': 'bestaudio/best', 'postprocessors': [{ 'key': 'FFmpegExtractAudio', 'preferredcodec': 'mp3', 'preferredquality': '192', }], 'quiet': False} for queued_track in tracks_to_download: exists = session.query(SavedTrack).filter(SavedTrack.artist == queued_track.artist, SavedTrack.title == queued_track.title).count() > 0 if exists: LOGGER.info('Track already exists as Saved Track, deleting Queued track and skipping download.') session.delete(queued_track) session.commit() continue track_save_name = u'{} - {}'.format(queued_track.artist, queued_track.title) final_track_path = TRACK_DIRECTORY + track_save_name + '.mp3' holding_track_path = HOLD_DIRECTORY + track_save_name + '.mp3' LOGGER.info('Downloading track: {}'.format(track_save_name)) options['outtmpl'] = u'{}/{}.%(ext)s'.format(HOLD_DIRECTORY, track_save_name) ydl = youtube_dl.YoutubeDL(options) download_link = build_download_link(queued_track.youtube_video_id) # download the track try: ydl.download([download_link]) except youtube_dl.utils.DownloadError as e: LOGGER.warning('youtube-dl encountered an error: {}' .format(e.message)) continue saved_track = SavedTrack() saved_track.update_from_dict(queued_track.as_dict()) saved_track.path = final_track_path saved_track.md5 = calculate_md5(holding_track_path) fingerprint_duration = fingerprint_file(holding_track_path, 30) saved_track.fingerprint = fingerprint_duration[1] saved_track.duration = fingerprint_duration[0] session.merge(saved_track) session.delete(queued_track) session.commit() os.rename(holding_track_path, final_track_path) LOGGER.info('Complete. Downloaded track data committed to database.')
def run(): tracks_to_search_for = session.query(QueuedTrack).filter(QueuedTrack.youtube_video_id == None).all() tracks_to_search_for = [track.as_dict() for track in tracks_to_search_for] try: for track in tracks_to_search_for: query = track['artist'] + ' ' + track['title'] params['q'] = query resp = requests.get(url=base_url, params=params, verify=False) json_response = resp.json() if json_response['items']: track.update(get_best_result(resp.json(), query)) track.pop('similarity_score') session.merge(QueuedTrack(**track)) except: raise finally: session.commit()