def run() -> bool: logging.info('Running song-history run() funciton.') token = get_token() data = get_recents(token=token) csv_df, latest = download_to_df() print(csv_df.tail()) print(latest) # spot_df = json_to_df(data=data, latest=latest) return csv_df
def get_current(token=None) -> dict: if not token: token = get_token() URL = "https://api.spotify.com/v1/me/player" # api-endpoint for current playback HEAD = {'Authorization': 'Bearer ' + token} # provide auth. crendtials content = r.get(url=URL, headers=HEAD) if content.status_code == 200: return content.json() else: return {}
def get_recents(token=None) -> dict: if not token: token = get_token() URL = "https://api.spotify.com/v1/me/player/recently-played" # api-endpoint for recently played HEAD = {'Authorization': 'Bearer ' + token} # provide auth. crendtials PARAMS = {'limit': 50} # default here is 20 content = r.get(url=URL, headers=HEAD, params=PARAMS) if content.status_code == 200: return content.json() return {}
def update_history_db(model): logging.info('Running song-history run() funciton.') token = get_token() data = get_recents(token=token) csv_df, latest = download_to_df() csv_df.rename(columns={'name': 'track'}, inplace=True) # get the largest datetime in db # check for entries the have datetimes higher than largest dt # only add these entires # add for loop for f in model._meta.get_fields(): print(f.name)
def n_track_analyses_generator(track_ids): token = get_token() headers = { 'Authorization': f"{token['token_type']} {token['access_token']}" } idx = 0 start_time = time() while True: if idx == len(track_ids): break track_id = track_ids[idx] response = get(_get_track_analysis_url(track_id), headers=headers) if response.status_code == 429: print(f'we were rate limited after {idx + 1} requests') if response.status_code == 200: result = response.json() result['track']['id'] = track_id yield result idx += 1 elif 'Retry-After' in headers: print( f"Got status code {response.status_code} with a Retry-After header. Retrying after {headers['Retry-After']}" ) process_retry(headers['Retry-After']) elif response.status_code == 503: print( f"Got status code {response.status_code} without a Retry-After header. Retrying after {3}" ) process_retry(3) elif response.status_code == 404: print(f"received 404 for track {track_id}.") track_ids.remove(track_id) result = {'track_not_found': track_id} yield result else: print(response) print(response.json()) raise Exception("Unexpected non-200 status code") total_time = time() - start_time print( f'fetched {len(track_ids)} track analysis objects in {total_time:.3f} seconds' )
def authorize_callback(): data = auth.get_token(request.query.code) response.set_cookie('access_token', data['access_token'], max_age=data['expires_in'], domain='localhost', path='/') response.set_cookie('refresh_token', data['refresh_token'], expires=time() * 2, domain='localhost', path='/') user_data = spotify.me(data['access_token']) user = base64.b64encode(user_data[u'id'].encode('utf-8')) response.set_cookie('username', user, expires=time() * 2, domain='localhost', path='/') redirect('http://localhost:8000/#/modes')
def authorize_callback(): data = auth.get_token(request.query.code) response.set_cookie('access_token', data['access_token'], max_age=data['expires_in'], domain='localhost', path='/') response.set_cookie('refresh_token', data['refresh_token'], expires=time()*2, domain='localhost', path='/') user_data = spotify.me(data['access_token']) user = base64.b64encode(user_data[u'id'].encode('utf-8')) response.set_cookie('username', user, expires=time()*2, domain='localhost', path='/') redirect('http://localhost:8000/#/modes')
def search(query_str='', types='track'): # only search for tracks for now token = get_token() URL = "https://api.spotify.com/v1/search" HEAD = {'Authorization': 'Bearer ' + token} PARAMS = {'q': query_str, 'type': types} response = r.get(url=URL, headers=HEAD, params=PARAMS).json() results = [] for t in types.split(','): items = response[t + 's']['items'] for item in items: artists = [a['name'] for a in item['artists']] results.append({ 'artists': artists, 'track': item['name'], 'artwork': item['album']['images'][0]['url'], 'url': item['external_urls']['spotify'], 'id': item['id'] }) return results
def n_track_features(track_ids): if len(track_ids) == 0: return [] token = get_token() headers = { 'Authorization': f"{token['token_type']} {token['access_token']}" } while True: response = get(_get_track_features_url(), headers=headers, params={'ids': ','.join(track_ids)}) if response.status_code == 429: print(f'we were rate limited') if response.status_code == 200: return response.json()['audio_features'] elif 'Retry-After' in headers: print( f"Got status code {response.status_code} with a Retry-After header. Retrying after {headers['Retry-After']}" ) process_retry(headers['Retry-After']) else: print(response) print(response.reason) raise Exception("Unexpected non-200 status code")
def get_durations(ids='', token=None, store=True) -> pd.DataFrame: ''' API call with built-in cache reader to return df (id, artist, duration) ''' pth = os.path.join(dir_path, 'store', 'duration_df.pkl') # ids.pop(ids.index[3]) # TODO: delete when left on checked and working durations = pd.DataFrame({'id': ids, 'duration': 0}) durations.set_index('id', inplace=True) print('Looking for cached durations') try: local_durations = pd.read_pickle(pth) durations = durations.merge(local_durations, on='id', left_index=True, right_index=True) durations.drop(columns='duration_x', inplace=True) durations.rename(columns={'duration_y': 'duration'}, inplace=True) except Exception as e: print('\n\n\n EXCEPTION CODE HERE \n', e) print('Nothing stored locally. Calling API...') durations.fillna(0, inplace=True) ids = durations.index[durations.duration < 1] if len(ids) > 0: print(f'{len(ids)} new ids to check') if not token: token = get_token() batches = (len(ids) // 50) + 1 print(f'Will be executing {batches} API call(s)') URL = "https://api.spotify.com/v1/tracks" # api-endpoint for recently played HEAD = {'Authorization': 'Bearer ' + token} # provide auth. crendtials # batching the unstored indexes incase exceeds max for i in range(batches): print('Batch', i) if i == (batches - 1): batch_ids = ids[50 * i:] # last set of indices else: batch_ids = ids[50 * i:50 * (i + 1)] # forward indexing print(','.join(batch_ids)) try: b_ids = ','.join(batch_ids) except: print( 'Fix song_history line 162' ) # TODO: make sure new songs are being batched correctly break PARAMS = {'ids': b_ids} tracks = None cnt = 0 while tracks is None: data = r.get(url=URL, headers=HEAD, params=PARAMS).json() try: tracks = data['tracks'] except: print('Bad response. Trying again...') cnt += 1 if cnt > 20: return {} time.sleep(2) batch_dur = [] for track in data['tracks']: batch_dur.append(track['duration_ms']) print(batch_dur) durations.duration[batch_ids] = batch_dur # this only gets stored again when new ids are added print(f'Storing at {pth}') print(durations.head(3)) durations.to_pickle(pth) print('Success!') return durations
def main(): cfg = Config.fromfile(CONFIG) logger.setup(cfg.log_output, name='information_log') log(f'Initial data storage: {CONFIG}') auth_data = cfg.authorization_data.params api = cfg.api_data.params add_url = cfg.ADDITION_URL.params headers = cfg.HEADERS.params token_response = auth.get_token(auth_data, \ api['authorization'], headers, \ add_url['get_token']) if token_response['status'] == 'ok': log('Successfully got token') else: msg = f'Bad response: response status is {token_response.text["status"]}' log(msg) raise Exception(msg) token = token_response['account']['token']['value'] session_ids, get_responses, total_time = [], [], [] image_files = prcimg.collect_images(cfg.EXTENSIONS) for idx, image_name in enumerate(image_files): detect_response = prcimg.send_image(image_name, \ api['detect'], \ headers, \ add_url['send_image'] + token) if detect_response['status'] == 'ok': log('Image has been successfully sent') session_ids.append(detect_response['sessionId']) else: msg = f'Bad response: response status is {token_response.text["status"]}' log(msg) raise Exception(msg) for idx in range(len(image_files)): while True: t.sleep( cfg.ASK_FOR_PROCESS) # waits according to our global variable get_response = auth.detect_result(api['detect'] + \ add_url['send_image'] + token + \ add_url['get_response'] + session_ids[idx], \ headers) if get_response['session'][ 'processed'] == 1: # if image has been processed get_responses.append( get_response) # --> go to the next image session total_time.append(get_response['session']['detectionTime']) break log('All images have been successfully processed') log(f'Total number of images is {len(image_files)}') log(f'Total time of detection is {sum(total_time)}') log(f'Mean time of detection on each image is {sum(total_time)/len(image_files)}' )
from flask import Flask import spotify_requests import authorize import scraper import json import logging import os from flask_cors import CORS logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) app = Flask(__name__) CORS(app) token = authorize.get_token() def get_samples(tracks_dict, token): URL = scraper.search_for_track(tracks_dict) found_samples = scraper.find_samples(URL) if found_samples: result = json.loads(found_samples['results']) for res in result: sample_url_in_spotify = spotify_requests.find_track_by_name( token=token, artist=res.get("track_artist"), track=res.get("track_name")) res['track_url'] = sample_url_in_spotify logger.info(f'Found the following samples: \n {result}')