def export_diff(database_path, diff_path): db = jsondb.Database(database_path) saved_tracks = [track for _, track in db.data().items() if track.get("tidal_id", None) and track.get("tidal_id") != -1] log.warning(f'saved tracks from file {database_path}: {len(saved_tracks)}') tidal_tracks = [] tidal._get_items for track in tqdm(tidal.user.favorites.tracks(), unit=" tracks"): tidal_tracks.append(format_tidal_track(track)) log.warning(f'loaded tracks from Tidal library: {len(tidal_tracks)}') saved_tracks_tids = set(t["tidal_id"] for t in saved_tracks) tidal_tracks_tids = set(t["tidal_id"] for t in tidal_tracks) diff_tids = tidal_tracks_tids - saved_tracks_tids log.warning(f'there\'s {len(diff_tids)} tracks more in Tidal library than inside {database_path}') diff_tracks = [t for t in tidal_tracks if t["tidal_id"] in diff_tids] log.warning('found tracks:') for dt in diff_tracks: print(echo_track(dt)) if os.path.isfile(diff_path): log.warning(f'file {diff_path} exists, going to remove it') os.remove(diff_path) diff_db = jsondb.Database(diff_path) diff_db.data(dictionary={ i: t for i, t in enumerate(diff_tracks) }) log.warning(f'diff db "{diff_path}" saved with {len(diff_tracks)} tracks inside!')
def import_library(database_path, confirm=False, retry_count=5, sleep_timeout=5): db = jsondb.Database(database_path) tracks = db.data() tracks_to_skip = {} tracks_to_import = {} for idx, track in tracks.items(): if not track.get("tidal_id", None) or track.get("tidal_id") == -1: tracks_to_skip[idx] = track else: tracks_to_import[idx] = track log.warning(f'you about to import\t{len(tracks_to_import.items())} tracks') log.warning(f'you about to skip\t{len(tracks_to_skip.items())} tracks') if not confirm: log.warning("supply flag --confirm true to execute import") exit(1) else: for idx, track in tqdm(tracks_to_import.items(), unit=" tracks"): local_retry_count = retry_count while local_retry_count >= 0: try: local_retry_count -= 1 tidal.user.favorites.add_track(track["tidal_id"]) track["exported_to_tidal"] = True break except Exception: # log.exception(e) log.exception(f'error during favorites.add_track -> {echo_track(track)}') time.sleep(sleep_timeout) track["exported_to_tidal"] = False db.data(key=idx, value=track)
def clean_by_file(file_db): db = jsondb.Database(file_db) tracks_to_delete = [track for _, track in db.data().items() if track.get("tidal_id", None) and track.get("tidal_id") != -1] log.warning(f'are you sure going to remove {len(tracks_to_delete)} tracks from your favorites on Tidal?') if not args.confirm: log.warning(f'pass --confirm flag') exit(1) else: log.warning(f'RUNNING TIDAL FAV CLEANUP') for track in tqdm(tracks_to_delete, unit=" tracks"): tidal.user.favorites.remove_track(track["tidal_id"]) log.warning("COMPLETED") exit(0)
def first_pricessing(database_path, find_only_by_track_name=False, process_not_founds=False, ignore_artists_match=False): sample_size = 2000 db = jsondb.Database(database_path) # find only non-processed tracks if not process_not_founds: exported_tracks = {i: x for i, x in db.data().items() if "tidal_id" not in x.keys()} else: exported_tracks = {i: x for i, x in db.data().items() if "tidal_id" not in x.keys() or not x.get("tidal_id", None)} log.info(f'loaded: {len(exported_tracks.items())} unprocessed tracks') for saved_track_idx, saved_track in tqdm(exported_tracks.items(), unit=" track"): if saved_track.get("tidal_id", None) and not process_not_founds: pass log.info(f"looking for track:\t{echo_track(saved_track)}") if find_only_by_track_name: search_result = tidal.search("track", f"{saved_track['name']}") else: search_result = tidal.search("track", f"{saved_track['artists'][-1]} - {saved_track['name']}") found_tracks = [format_tidal_track(x) for x in search_result.tracks] tidal_id = None for found_track in found_tracks: log.debug(f"found result: {found_track}") if track_equal(found_track, saved_track, ignore_artists=ignore_artists_match): log.info(f"found equal track:\t{echo_track(found_track)}") tidal_id = found_track["tidal_id"] break # if not tidal_id: # if not args.skip: # print(f'equal track for {echo_track(saved_track)} not found') # print(f'enter number to pick from below (-1 to ignore)') # [print(f'{i} -> {echo_track(t)}') for i, t in enumerate(found_tracks)] # put found tidal_id to ignore in next processings saved_track["tidal_id"] = tidal_id db.data(key=saved_track_idx, value=saved_track) log.info(f'track {saved_track["name"]} updated with tidal_id = {saved_track["tidal_id"]}') sample_size -= 1 if sample_size < 0: break
def manual_processing(database_path): db = jsondb.Database(database_path) # find only non-processed tracks exported_tracks = {i: x for i, x in db.data().items() if not x.get("tidal_id")} log.info(f'loaded: {len(exported_tracks.items())} "not-found" tracks') for saved_track_idx, saved_track in tqdm(exported_tracks.items(), unit=" track"): log.info(f"looking for track:\t{echo_track(saved_track)}") search_result = tidal.search("track", f"{saved_track['name']}") found_tracks = {i: format_tidal_track(x) for i, x in enumerate(search_result.tracks)} tidal_id = None if len(found_tracks) == 0: log.warning(f"NOTHING FOUND FOR {echo_track(saved_track)}") elif len(found_tracks) == 1: # somehow exact match found tidal_id = found_tracks[0]["tidal_id"] else: # manual select [print(f'{i}:\t{t["name"]:40}{", ".join(t.get("artists", [])):40}{t["album"]:40}') for i, t in found_tracks.items()] while tidal_id == None: selected_idx = int(input('ENTER number to pick from below (-1 to ignore): ')) if selected_idx == -1: tidal_id = -1 else: try: manually_selected_track = found_tracks.get(selected_idx, None) if manually_selected_track: tidal_id = manually_selected_track["tidal_id"] print(f'{echo_track(manually_selected_track)} with tidal_id = {tidal_id}') except Exception as e: log.exception(e) # update with picked if tidal_id != None: saved_track["tidal_id"] = tidal_id db.data(key=saved_track_idx, value=saved_track) log.info(f'track {saved_track["name"]} updated with tidal_id = {saved_track["tidal_id"]}')
def setup_database(self): db_config = next( (x for x in self.config.databases if x.server == self.hostname), None) assert db_config is not None, 'Missing database config!' db_root = resolve_path(db_config.path) self.db = dict() def get_taken_ts(o): metadata = o.get('metadata') if metadata is None: return None return metadata.get('taken_ts') def get_taken_ts_tuple(o): t = get_taken_ts(o) if t is None: return None return tuple(int(x) for x in t[:10].split('-')) + (t[11:], ), None def get_taken_date_tuple(o): t = get_taken_ts(o) if t is None: return None return tuple(int(x) for x in t[:10].split('-')), None def get_taken_date(o, get_value): t = get_taken_ts(o) if t is None: return None return t[:10], get_value(o) def get_source(o): metadata = o.get('metadata') if metadata is None: return None source = metadata.get('source') original_filename = metadata.get('original_filename') if not all([source, original_filename]): return None return (source, original_filename), None def sum_per(field, values): result = {} for value in values: v = value.get(field) if v in result: result[v] += 1 else: result[v] = 1 result['total'] = len(values) return result def each_tag(value): for subvalue in value.get('tags', []): yield (subvalue, None) def each_tag_with_taken_ts(value): for subvalue in value.get('tags', []): yield ((subvalue, get_taken_ts(value)), None) def each_file_reference(value): for file in value.get('files', []): yield file.get('reference'), None entry = jsondb.Database(os.path.join(db_root, 'entry')) entry.define('by_taken_ts', get_taken_ts_tuple) entry.define( 'state_by_date', lambda o: get_taken_date(o, lambda oo: {'state': oo['state']}), lambda keys, values, rereduce: sum_per('state', values)) entry.define('by_date', get_taken_date_tuple) entry.define('by_state', lambda o: (o['state'], None)) entry.define('by_state_and_taken_ts', lambda o: ((o['state'], get_taken_ts(o)), None)) entry.define('by_source', get_source) entry.define( 'by_tag', each_tag, lambda keys, values, rereduce: len(values), ) entry.define('by_tag_and_taken_ts', each_tag_with_taken_ts) entry.define('by_file_reference', each_file_reference) self.db['entry'] = entry file = jsondb.Database(os.path.join(db_root, 'file')) file.define('by_reference', lambda o: (o['reference'], None)) self.db['file'] = file date = jsondb.Database(os.path.join(db_root, 'date')) date.define('by_date', lambda o: (o['_id'], None)) self.db['date'] = date job = jsondb.Database(os.path.join(db_root, 'job')) job.define( 'by_state', lambda o: ((o['state'], o['release'], o['priority']), None), ) job.define( 'by_updated', lambda o: (10000000000 - int(o['updated']), None), ) job.define( 'stats', lambda o: (None, { 'state': o['state'] }), lambda keys, values, rereduce: sum_per('state', values), ) self.db['job'] = job
def print_non_founds(database_path): db = jsondb.Database(database_path) exported_tracks = {i: x for i, x in db.data().items() if not x.get("tidal_id") or x.get("tidal_id") == -1} exported_tracks_sorted = OrderedDict(sorted(exported_tracks.items(), key=lambda t: f'{", ".join(t[1].get("artists", []))} - {t[1].get("name", "")}')) log.warning(f'loaded: {len(exported_tracks_sorted.items())} "not-found" tracks') [print(f'{t["name"]:70}{", ".join(t.get("artists", [])):60}{t["album"]:40}') for i, t in exported_tracks_sorted.items()]
offset=current_track) if len(results['items']) <= 0: break current_track += len(results['items']) progressbar.update(len(results['items'])) for idx, item in enumerate(results['items']): track = item['track'] final_track = { "added_at": item["added_at"], "artists": [x['name'] for x in track['artists']], "name": track['name'], "album": track["album"]["name"], "spotify_id": track["id"], } final_tracks.append(final_track) print(f"total found: {len(final_tracks)}") final_tracks.reverse() if args.save != None: os.remove(args.save) db = jsondb.Database(args.save) dict_db = {idx: item for idx, item in enumerate(final_tracks)} db.data(dictionary=dict_db) print(f'file {args.save} saved with {len(final_tracks)} tracks') else: print( f'try start script with --save flag eg: python spotify-export.py --save export-file.txt' )