Example #1
0
def _select_choice(choices, rls_data):
    """
    Allow the user to select a metadata choice. Then, if the metadata came from a scraper,
    run the scrape(s) and return combined metadata.
    """
    while True:
        if choices:
            res = click.prompt(
                click.style(
                    "\nWhich metadata results would you like to use? Other "
                    "options: paste URLs, [m]anual, [a]bort",
                    fg="magenta",
                    bold=True,
                ),
                type=click.STRING,
            )
        else:
            res = click.prompt(
                click.style(
                    "\nNo metadata results were found. Options: paste URLs, "
                    "[m]anual, [a]bort",
                    fg="magenta",
                    bold=True,
                ),
                type=click.STRING,
            )

        if res.lower().startswith("m"):
            return _get_manual_metadata(rls_data)
        elif res.lower().startswith("a"):
            raise click.Abort

        sources, tasks = [], []
        for r in res.split():
            if r.lower().startswith("http"):
                for name, source in METASOURCES.items():
                    if source.Scraper.regex.match(r.strip()):
                        sources.append(name)
                        tasks.append(source.Scraper().scrape_release(
                            r.strip()))
                        break
            elif r.strip().isdigit() and int(r) in choices:
                scraper = METASOURCES[choices[int(r)][0]].Scraper()
                sources.append(choices[int(r)][0])
                tasks.append(
                    handle_scrape_errors(
                        scraper.scrape_release_from_id(choices[int(r)][1])))
        if not tasks:
            continue

        metadatas = loop.run_until_complete(asyncio.gather(*tasks, loop=loop))
        meta = combine_metadatas(*((s, m) for s, m in zip(sources, metadatas)
                                   if m),
                                 base=rls_data)
        meta = clean_metadata(meta)
        meta["artists"], meta["tracks"] = generate_artists(meta["tracks"])
        return meta
Example #2
0
def generate_source_links(metadata_urls):
    links = []
    for url in metadata_urls:
        for name, source in METASOURCES.items():
            if source.Scraper.regex.match(url):
                if config.ICONS_IN_DESCRIPTIONS:
                    links.append(
                        f"[pad=0|3][url={url}][img=18]{SOURCE_ICONS[name]}[/img] "
                        f"{name}[/url][/pad]")
                else:
                    links.append(f"[url={url}]{name}[/url]")
                break
    if config.ICONS_IN_DESCRIPTIONS:
        return " ".join(links)
    return " | ".join(links)
Example #3
0
def get_source_from_link(url):
    for name, source in METASOURCES.items():
        if source.Scraper.regex.match(url):
            return name