def run(): delay_ms = 100 # We update this to aim for REQ_PER_SECOND per second request_times = [] # Batch insert to save time states = [] batch_count = 0 last_credential_time = time.time() creds = get_credentials() # Used for logging purposes batch_start_time = time.time() log_count = 0 while True: batch_count += 1 log_count += 1 current_time = time.time() if current_time - last_credential_time > REFRESH_CRED_SECS: # REFRESH creds = get_credentials() last_credential_time = current_time state = get_player_state(creds) states.append(state) req_time = time.time() - current_time request_times.append(req_time) if len(request_times) > 10: request_times = list( reversed(request_times))[1:11] # Use last 10 times # Calculate delay time avg_time = sum(request_times) / len(request_times) delay_ms = (1 - REQ_PER_SECOND * avg_time) / REQ_PER_SECOND if delay_ms < 0: delay_ms = 0 time.sleep(delay_ms) if batch_count > BATCH_SIZE_INSERT: store_player_states(states) batch_count = 0 states = [] if log_count == LOG_AFTER: log_count = 0 end_b = time.time() dt = end_b - batch_start_time rate = LOG_AFTER / dt logging.debug( "[STATUS] start={}, end={}s, dt = {}, sleep_time={}, avg_request_time={}, rate={}" .format(batch_start_time, end_b, dt, delay_ms, avg_time, rate)) batch_start_time = end_b
def add_track_ids(db: DbStore, export_path=EXPORT_PATH): data = parse_file(export_path) print("Loaded {} plays fro GDPR. Removing short plays (<30s)...".format(len(data))) data = remove_short_plays(data) print("Now got {} plays. Removing tracks already in database...".format(len(data))) # FIXME # data = remove_recent(data, db.get_first_record()) print("Got {} plays to insert into database".format(len(data))) creds = get_credentials() count = len(data) failures = [] plays = [] cache = {} for i, item in enumerate(data): main_artist = item["artistName"].split(",")[0] cache_key = "{}:{}".format(item["trackName"], item["artistName"]) print("[{}%] ".format(int(100 * i / count)), end="") if cache_key in cache: print("Cache hit! ", end="") play = cache[cache_key] # shift into format for play else: play = get_track_play(db, creds, item["trackName"], main_artist) play = { "track": { "duration_ms": None, "popularity": None, "name": play["track_name"], "id": play["track_id"], "album": { "name": play["album_name"], "id": play["album_id"] }, "artists": [ { "name": play["main_artist_name"], "id": play["main_artist_id"], } ] } } # strip().replace(" ", "T") + "Z" if play: play["played_at"] = item["time"].isoformat() print("got {} by {} with id {}".format(item["trackName"], item["artistName"], play)) cache[cache_key] = play db.add_play(play) else: failures.append(item) print("FAIL FAIL FAIL") print(failures) with open("import.json", "w+", encoding="utf-8") as f: f.write(json.dumps(plays, default=json_serial))
def main(): tracks = new_songs(datetime.datetime(2018, 8, 18), datetime.datetime(2018, 8, 25)) creds = spotify.get_credentials() pl = spotify.create_playlist("benbanerjeerichards", "New tracks last week", False, False, None, creds)["id"] spotify.add_to_playlist("benbanerjeerichards", pl, tracks, creds)
def main(): parser = argparse.ArgumentParser() parser.add_argument("--id") parser.add_argument( "--overwrite", action="store_true", help="Clear any items in the existing playlist before adding new songs" ) parser.add_argument("--dedup", action="store_true", help="Remove duplicates in playlist") args = parser.parse_args() creds = get_credentials() username = get_profile(creds)["id"] ids = [] for line in sys.stdin: ids.append(line.replace("\n", "").replace("\r", "")) if not args.id: if len(sys.argv) == 1: playlist_name = "Untitled" else: playlist_name = sys.argv[1] p_id = create_playlist(username, playlist_name, False, False, "", creds)["id"] add_to_playlist(username, p_id, ids, creds) else: tracks = [] if args.overwrite else get_songs_in_playlist( username, args.id, creds) tracks = tracks + ids if args.dedup: # We want to delete items later on in the playlist and retain order deduped = [] for track in tracks: if track not in deduped: deduped.append(track) tracks = deduped # Now write new tracks # Overwrite playlist add_to_playlist(username, args.id, tracks, creds, replace=True)
def run_sounds_good(): creds = spotify.get_credentials() sounds_good_tracks = spotify.get_playlist_basic(USER_ID, PLAYLIST_ID, creds) saved_songs = spotify.get_saved_tracks(creds) logging.info("Got {} tracks from sounds good, {} tracks from saved songs" \ .format(len(sounds_good_tracks), len(saved_songs))) saved_ids = get_track_ids(saved_songs) saved, unsaved = split_saved_unsaved(sounds_good_tracks, saved_ids) logging.info("Sounds good: saved = {}, unsaved = {}".format(len(saved), len(unsaved))) add_to_archive = [] remove_from_sounds_good = [] for track in saved: logging.info("Track {}({}) is saved, removing from sounds good" \ .format(track["track"]["name"], track["track"]["id"])) remove_from_sounds_good.append(track["track"]["id"]) today = datetime.datetime.now(datetime.timezone.utc) for track in unsaved: t_id = track["track"]["id"] date_added = dateutil.parser.parse(track["added_at"]) delta = today - date_added if delta < MOVE_AFTER: logging.info("Track {}({}) is unsaved, stays in playlist due to Delta = {} < {}" \ .format(track["track"]["name"], t_id, delta, MOVE_AFTER)) else: logging.info("Track {}({}) is unsaved, removed from playlist due to Delta = {} >= {}" \ .format(track["track"]["name"], t_id, delta, MOVE_AFTER)) add_to_archive.append(t_id) remove_from_sounds_good.append(t_id) logging.info("Adding to archive playlist: {}".format(",".join(add_to_archive))) spotify.add_to_playlist(USER_ID, MOVE_TO_PLAYLIST, add_to_archive, creds) logging.info("Removing from sounds good: {}".format(",".join(remove_from_sounds_good))) spotify.remove_from_playlist(USER_ID, PLAYLIST_ID, remove_from_sounds_good, creds)
def playing(): creds = spotify.get_credentials() playing = spotify.get_current_playback(creds) if playing is None or playing == {}: return {} if playing["device"]["is_private_session"]: return {} track = playing["item"] return { "song": track["name"], "song_link": track["href"], "artist": track["artists"][0]["name"], "artist_link": track["artists"][0]["href"], "progress_ms": playing["progress_ms"], "duration_ms": track["duration_ms"], "progress": format_ms_as_str(playing["progress_ms"]), "duration": format_ms_as_str(track["duration_ms"]) }
def load(path): spotify = util.get_spotify_db() ids = [] data = None with open(path) as f: data = json.loads(f.read()) ids = [] creds = get_credentials() for track in data: if not "trackId" in track: continue ids.append(track["trackId"]) ids = list(set(ids)) full_tracks = get_tracks(ids, creds) track_by_id = {} for f_track in full_tracks: track_by_id[f_track["id"]] = f_track print("GOT {} tracks".format(len(full_tracks))) states = [] for track in data: if "trackId" not in track: continue state = { "played_at": track["time"], "track": track_by_id[track["trackId"]] } states.append(state) spotify.tracks.insert_many(states)
import numpy as np import matplotlib.pyplot as plt import matplotlib.dates as mdates import pymongo import util import datetime import spotify from dateutil.parser import parse tracks = spotify.get_saved_tracks(spotify.get_credentials()) date = parse(tracks[-1]["added_at"]) date = date.replace(hour=0, minute=0, second=0, microsecond=0) totals = [0] for track in reversed(tracks): added = parse(track["added_at"]) if added > date: while added > date: totals.append(0) date += datetime.timedelta(days=1) print(date) totals.append(1) else: totals[-1] += 1 print(totals) print(len(tracks)) plt.plot(np.cumsum(totals))