def prepare_and_upload( path, group_id, metadata, track_data, hybrid, lossy_master, spectral_urls, lossy_comment, ): """Wrapper function for all the data compiling and processing.""" if not group_id: cover_url = upload_cover(path) data = compile_data( path, metadata, track_data, hybrid, cover_url, spectral_urls, lossy_comment ) else: data = compile_data_for_group( path, group_id, metadata, track_data, hybrid, spectral_urls, lossy_comment ) torrent_path, files = compile_files(path, metadata) click.secho(f"Uploading torrent...", fg="yellow") try: torrent_id, group_id = loop.run_until_complete(RED_API.upload(data, files)) shutil.move( torrent_path, os.path.join(config.DOTTORRENTS_DIR, f"{os.path.basename(path)}.torrent"), ) return torrent_id, group_id except RequestError as e: click.secho(str(e), fg="red", bold=True) exit()
def prepare_and_upload( gazelle_site, path, group_id, metadata, cover_url, track_data, hybrid, lossy_master, spectral_urls, lossy_comment, request_id, ): """Wrapper function for all the data compiling and processing.""" if not group_id: if not cover_url: cover_url = upload_cover(path) data = compile_data_new_group( path, metadata, track_data, hybrid, cover_url, spectral_urls, lossy_comment, request_id, ) else: data = compile_data_existing_group( path, group_id, metadata, track_data, hybrid, spectral_urls, lossy_comment, request_id, ) torrent_path, torrent_file = generate_torrent(gazelle_site, path) files = compile_files(path, torrent_file, metadata) click.secho(f"Uploading torrent...", fg="yellow") try: torrent_id = loop.run_until_complete(gazelle_site.upload(data, files)) shutil.move( torrent_path, os.path.join( gazelle_site.dot_torrents_dir, f"{os.path.basename(path)} - {gazelle_site.site_string}.torrent", ), ) return torrent_id except RequestError as e: click.secho(str(e), fg="red", bold=True) exit()
def upload( gazelle_site, path, group_id, source, lossy, spectrals, encoding, existing=None, overwrite_meta=False, recompress=False, source_url=None, searchstrs=None, request_id=None, spectrals_after=False, ): """Upload an album folder to Gazelle Site Offer the choice to upload to another tracker after completion.""" path = os.path.abspath(path) if not source: source = _prompt_source() audio_info = gather_audio_info(path) hybrid = check_hybrid(audio_info) standardize_tags(path) tags = gather_tags(path) rls_data = construct_rls_data( tags, audio_info, source, encoding, existing=existing, overwrite=overwrite_meta, prompt_encoding=True, ) try: if rls_data["encoding"] == "24bit Lossless" and click.confirm( click.style( "24bit detected. Do you want to check whether might be upconverted?", fg="magenta", ), default=True, ): upload_upconvert_test(path) if group_id is None: searchstrs = generate_dupe_check_searchstrs( rls_data["artists"], rls_data["title"], rls_data["catno"]) group_id = check_existing_group(gazelle_site, searchstrs) if spectrals_after: lossy_master = False # We tell the uploader not to worry about it being lossy until later. else: lossy_master, spectral_ids = check_spectrals( path, audio_info, lossy, spectrals) metadata = get_metadata(path, tags, rls_data) download_cover_if_nonexistent(path, metadata["cover"]) path, metadata, tags, audio_info = edit_metadata( path, tags, metadata, source, rls_data, recompress) if not group_id: group_id = recheck_dupe(gazelle_site, searchstrs, metadata) click.echo() track_data = concat_track_data(tags, audio_info) except click.Abort: return click.secho(f"\nAborting upload...", fg="red") except AbortAndDeleteFolder: shutil.rmtree(path) return click.secho(f"\nDeleted folder, aborting upload...", fg="red") lossy_comment = None if spectrals_after: spectral_urls = None else: if lossy_master: lossy_comment = generate_lossy_approval_comment( source_url, list(track_data.keys())) click.echo() spectrals_path = os.path.join(path, "Spectrals") spectral_urls = handle_spectrals_upload_and_deletion( spectrals_path, spectral_ids) if config.LAST_MINUTE_DUPE_CHECK: last_min_dupe_check(gazelle_site, searchstrs) if not group_id: # This prevents the cover being uploaded more than once for multiple sites. cover_url = upload_cover(path) else: cover_url = None # Shallow copy to avoid errors on multiple uploads in one session. remaining_gazelle_sites = list(salmon.trackers.tracker_list) tracker = gazelle_site.site_code while True: # Loop until we don't want to upload to any more sites. if not tracker: if spectrals_after: # Here we are checking the spectrals after uploading to the first site # if they were not done before. lossy_master, lossy_comment, spectral_urls = post_upload_spectral_check( gazelle_site, path, torrent_id, None, track_data, source, source_url) spectrals_after = False click.secho("Would you like to upload to another tracker? ", fg="magenta", nl=False) tracker = salmon.trackers.choose_tracker(remaining_gazelle_sites) gazelle_site = salmon.trackers.get_class(tracker)() click.secho(f"Uploading to {gazelle_site.base_url}", fg="cyan") searchstrs = generate_dupe_check_searchstrs( rls_data["artists"], rls_data["title"], rls_data["catno"]) group_id = check_existing_group(gazelle_site, searchstrs, metadata) remaining_gazelle_sites.remove(tracker) if not request_id and config.CHECK_REQUESTS: request_id = check_requests(gazelle_site, searchstrs) torrent_id = prepare_and_upload( gazelle_site, path, group_id, metadata, cover_url, track_data, hybrid, lossy_master, spectral_urls, lossy_comment, request_id, ) if lossy_master: report_lossy_master( gazelle_site, torrent_id, spectral_urls, track_data, source, lossy_comment, source_url=source_url, ) url = "{}/torrents.php?torrentid={}".format(gazelle_site.base_url, torrent_id) click.secho( f"\nSuccessfully uploaded {url} ({os.path.basename(path)}).", fg="green", bold=True, ) if config.COPY_UPLOADED_URL_TO_CLIPBOARD: pyperclip.copy(url) tracker = None request_id = None if not remaining_gazelle_sites or not config.MULTI_TRACKER_UPLOAD: return click.secho(f"\nDone uploading this release.", fg="green")