예제 #1
0
 def __init__(self, urls, size, format, *, thumbnail_url, source, source_quality, rank=None,
              check_metadata=CoverImageMetadata.NONE):
   """
   Args:
     urls: Cover image file URL. Can be a tuple of URLs of images to be joined
     size: Cover size as a (with, height) tuple
     format: Cover image format as a CoverImageFormat enum, or None if unknown
     thumbnail_url: Cover thumbnail image file URL, or None if not available
     source: Cover source object that produced this result
     source_quality: Quality of the cover's source as a CoverSourceQuality enum value
     rank: Integer ranking of the cover in the other results from the same source, or None if not available
     check_metadata: If != 0, hint that the format and/or size parameters are not reliable and must be double checked
   """
   if not isinstance(urls, str):
     self.urls = urls
   else:
     self.urls = (urls,)
   self.size = size
   assert((format is None) or (format in CoverImageFormat))
   self.format = format
   self.thumbnail_url = thumbnail_url
   self.thumbnail_sig = None
   self.source = source
   assert(source_quality in CoverSourceQuality)
   self.source_quality = source_quality
   self.rank = rank
   assert((format is not None) or ((check_metadata & CoverImageMetadata.FORMAT) != 0))
   assert((size is not None) or ((check_metadata & CoverImageMetadata.SIZE) != 0))
   self.check_metadata = check_metadata
   self.reliable_metadata = True
   self.is_similar_to_reference = False
   self.is_only_reference = False
   if not hasattr(__class__, "image_cache"):
     cache_filepath = os.path.join(appdirs.user_cache_dir(appname="sacad",
                                                          appauthor=False),
                                   "sacad-cache.sqlite")
     os.makedirs(os.path.dirname(cache_filepath), exist_ok=True)
     __class__.image_cache = web_cache.WebCache(cache_filepath,
                                                "cover_image_data",
                                                caching_strategy=web_cache.CachingStrategy.LRU,
                                                expiration=60 * 60 * 24 * 365)  # 1 year
     __class__.metadata_cache = web_cache.WebCache(cache_filepath,
                                                   "cover_metadata",
                                                   caching_strategy=web_cache.CachingStrategy.LRU,
                                                   expiration=60 * 60 * 24 * 365)  # 1 year
     for cache, cache_name in zip((__class__.image_cache, __class__.metadata_cache),
                                  ("cover_image_data", "cover_metadata")):
       purged_count = cache.purge()
       logging.getLogger("Cache").debug("%u obsolete entries have been removed from cache '%s'" % (purged_count,
                                                                                                   cache_name))
       row_count = len(cache)
       logging.getLogger("Cache").debug("Cache '%s' contains %u entries" % (cache_name, row_count))
예제 #2
0
    def __init__(self,
                 target_size,
                 size_tolerance_prct,
                 *,
                 min_delay_between_accesses=0,
                 jitter_range_ms=None,
                 rate_limited_domains=None,
                 allow_cookies=False):
        self.target_size = target_size
        self.size_tolerance_prct = size_tolerance_prct
        self.logger = logging.getLogger(self.__class__.__name__)

        ua_cache_dir = os.path.join(
            appdirs.user_cache_dir(appname="sacad", appauthor=False),
            "fake_useragent")
        os.makedirs(ua_cache_dir, exist_ok=True)
        self.ua = fake_useragent.UserAgent(
            path=os.path.join(ua_cache_dir, "ua.json"))

        if not hasattr(__class__, "api_cache"):
            db_filepath = os.path.join(
                appdirs.user_cache_dir(appname="sacad", appauthor=False),
                "sacad-cache.sqlite")
            os.makedirs(os.path.dirname(db_filepath), exist_ok=True)
            day_s = 60 * 60 * 24
            __class__.api_cache = web_cache.WebCache(
                db_filepath,
                "cover_source_api_data",
                caching_strategy=web_cache.CachingStrategy.FIFO,
                expiration=random.randint(day_s * 7, day_s * 14),  # 1-2 weeks
                compression=web_cache.Compression.DEFLATE)
            __class__.probe_cache = web_cache.WebCache(
                db_filepath,
                "cover_source_probe_data",
                caching_strategy=web_cache.CachingStrategy.FIFO,
                expiration=day_s * 30 * 6)  # 6 months
            logging.getLogger('Cache').debug(
                f'Total size of file {db_filepath}: {__class__.api_cache.getDatabaseFileSize()}'
            )
            for cache, cache_name in zip(
                (__class__.api_cache, __class__.probe_cache),
                ('cover_source_api_data', 'cover_source_probe_data')):
                purged_count = cache.purge()
                logging.getLogger('Cache').debug(
                    f'{purged_count} obsolete entries have been removed from cache {cache_name}'
                )
                row_count = len(cache)
                logging.getLogger('Cache').debug(
                    f'Cache {cache_name} contains {row_count} entries')
예제 #3
0
파일: base.py 프로젝트: gvc14/sacad
 def __init__(self,
              target_size,
              size_tolerance_prct,
              min_delay_between_accesses=2 / 3,
              allow_cookies=False):
     self.target_size = target_size
     self.size_tolerance_prct = size_tolerance_prct
     self.logger = logging.getLogger(self.__class__.__name__)
     self.http = http_helpers.Http(
         allow_session_cookies=allow_cookies,
         min_delay_between_accesses=min_delay_between_accesses,
         logger=self.logger)
     if not hasattr(__class__, "api_cache"):
         db_filepath = os.path.join(
             appdirs.user_cache_dir(appname="sacad", appauthor=False),
             "sacad-cache.sqlite")
         os.makedirs(os.path.dirname(db_filepath), exist_ok=True)
         day_s = 60 * 60 * 24
         __class__.api_cache = web_cache.WebCache(
             db_filepath,
             "cover_source_api_data",
             caching_strategy=web_cache.CachingStrategy.FIFO,
             expiration=random.randint(day_s * 7, day_s * 14),  # 1-2 weeks
             compression=web_cache.Compression.DEFLATE)
         __class__.probe_cache = web_cache.WebCache(
             db_filepath,
             "cover_source_probe_data",
             caching_strategy=web_cache.CachingStrategy.FIFO,
             expiration=day_s * 30 * 6)  # 6 months
         logging.getLogger("Cache").debug(
             "Total size of file '%s': %s" %
             (db_filepath, __class__.api_cache.getDatabaseFileSize()))
         for cache, cache_name in zip(
             (__class__.api_cache, __class__.probe_cache),
             ("cover_source_api_data", "cover_source_probe_data")):
             purged_count = cache.purge()
             logging.getLogger("Cache").debug(
                 "%u obsolete entries have been removed from cache '%s'" %
                 (purged_count, cache_name))
             row_count = len(cache)
             logging.getLogger("Cache").debug(
                 "Cache '%s' contains %u entries" % (cache_name, row_count))
예제 #4
0
def cl_main():
    # parse args
    arg_parser = argparse.ArgumentParser(
        description="AMG Player v%s. %s" % (__version__, __doc__),
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    arg_parser.add_argument("-c",
                            "--count",
                            type=int,
                            default=20,
                            dest="count",
                            help="Amount of recent reviews to fetch")
    arg_parser.add_argument("-m",
                            "--mode",
                            choices=tuple(m.name.lower() for m in PlayerMode),
                            default=PlayerMode.MANUAL.name.lower(),
                            dest="mode",
                            help="""Playing mode.
                                  "manual" let you select tracks to play one by one.
                                  "radio" let you select the first one, and then plays all tracks by chronological
                                  order.
                                  "discover" automatically plays all tracks by chronological order from the first non
                                  played one.""")
    arg_parser.add_argument(
        "-i",
        "--interactive",
        action="store_true",
        default=False,
        dest="interactive",
        help=
        "Before playing each track, ask user confirmation, and allow opening review URL."
    )
    arg_parser.add_argument("-v",
                            "--verbosity",
                            choices=("warning", "normal", "debug"),
                            default="normal",
                            dest="verbosity",
                            help="Level of logging output")
    args = arg_parser.parse_args()
    args.mode = PlayerMode[args.mode.upper()]

    # setup logger
    logger = logging.getLogger()
    logging_level = {
        "warning": logging.WARNING,
        "normal": logging.INFO,
        "debug": logging.DEBUG
    }
    logging.getLogger().setLevel(logging_level[args.verbosity])
    logging.getLogger("requests").setLevel(logging.ERROR)
    logging.getLogger("urllib3").setLevel(logging.ERROR)
    logging.getLogger("PIL").setLevel(logging.ERROR)
    logging_formatter = colored_logging.ColoredFormatter(fmt="%(message)s")
    logging_handler = logging.StreamHandler()
    logging_handler.setFormatter(logging_formatter)
    logger.addHandler(logging_handler)

    # locale (for date display)
    locale.setlocale(locale.LC_ALL, "")

    # warn if missing tools
    if not HAS_FFMPEG:
        logging.getLogger().warning(
            "FFmpeg is not installed, some features won't be available")

    # get reviews
    known_reviews = KnownReviews()
    reviews = list(itertools.islice(get_reviews(), args.count))

    # http cache
    cache_dir = appdirs.user_cache_dir("amg-player")
    os.makedirs(cache_dir, exist_ok=True)
    cache_filepath = os.path.join(cache_dir, "http_cache.db")
    http_cache = web_cache.WebCache(
        cache_filepath,
        "reviews",
        caching_strategy=web_cache.CachingStrategy.FIFO,
        expiration=60 * 60 * 24 * 30 * 3,  # 3 months
        compression=web_cache.Compression.DEFLATE)
    purged_count = http_cache.purge()
    row_count = len(http_cache)
    logging.getLogger().debug("HTTP Cache contains %u entries (%u removed)" %
                              (row_count, purged_count))

    # initial menu
    if args.mode in (PlayerMode.MANUAL, PlayerMode.RADIO):
        menu_ret = menu.AmgMenu.setupAndShow(args.mode, reviews, known_reviews,
                                             http_cache)

    to_play = None
    track_loop = True
    while track_loop:
        if (args.mode in (PlayerMode.MANUAL, PlayerMode.RADIO)):
            if menu_ret is None:
                break
            else:
                selected_idx, action = menu_ret

        if args.mode is PlayerMode.MANUAL:
            # fully interactive mode
            review = reviews[selected_idx]
        elif args.mode is PlayerMode.RADIO:
            # select first track interactively, then auto play
            if to_play is None:
                review = reviews[selected_idx]
                to_play = reviews[0:reviews.index(review) + 1]
                to_play.reverse()
                to_play = iter(to_play)
        elif args.mode is PlayerMode.DISCOVER:
            # auto play all non played tracks
            if to_play is None:
                to_play = filter(lambda x: not known_reviews.isKnownUrl(x.url),
                                 reversed(reviews))
        if args.mode in (PlayerMode.RADIO, PlayerMode.DISCOVER):
            try:
                review = next(to_play)
            except StopIteration:
                break

        # fetch review & play
        review_page = fetch_page(review.url, http_cache=http_cache)
        track_urls, audio_only = get_embedded_track(review_page, http_cache)
        if track_urls is None:
            logging.getLogger().warning("Unable to extract embedded track")
        else:
            print("-" * (shutil.get_terminal_size()[0] - 1))
            print("Artist: %s\n"
                  "Album: %s\n"
                  "Review URL: %s\n"
                  "Published: %s\n"
                  "Tags: %s" % (review.artist, review.album, review.url,
                                review.date_published.strftime("%x %H:%M"),
                                ", ".join(review.tags)))
            if args.interactive:
                input_loop = True
                while input_loop:
                    c = None
                    while c not in frozenset("pdrsq"):
                        c = input(
                            "[P]lay / [D]ownload / Go to [R]eview / [S]kip to next track / Exit [Q] ? "
                        ).lower()
                    if c == "p":
                        known_reviews.setLastPlayed(review.url)
                        play(review, track_urls, merge_with_picture=audio_only)
                        input_loop = False
                    elif c == "d":
                        download_audio(review, track_urls)
                        input_loop = False
                    elif c == "r":
                        webbrowser.open_new_tab(review.url)
                    elif c == "s":
                        input_loop = False
                    elif c == "q":
                        input_loop = False
                        track_loop = False
            else:
                known_reviews.setLastPlayed(review.url)
                if ((args.mode in (PlayerMode.MANUAL, PlayerMode.RADIO)) and
                    (action is menu.AmgMenu.UserAction.DOWNLOAD_AUDIO)):
                    download_audio(review, track_urls)
                else:
                    play(review, track_urls, merge_with_picture=audio_only)

        if track_loop and (args.mode is PlayerMode.MANUAL):
            # update menu and display it
            menu_ret = menu.AmgMenu.setupAndShow(args.mode,
                                                 reviews,
                                                 known_reviews,
                                                 http_cache,
                                                 selected_idx=selected_idx)
예제 #5
0
#!/usr/local/bin/python3.8
import flask
import requests

import web_cache

app = flask.Flask(__name__)
app.config['DEBUG']

BASE_URL = 'http://api.tvmaze.com'
CACHE = web_cache.WebCache()


def response_or_cache(url):
    '''Handler for returning cached requests or new requests if url is uncached.

    Args:
        url (str): url pointing to tv_maze api endpoint.
    
    Returns:
        dict: JSON response that is new, cached, or an error.
    '''
    if url in CACHE:
        return CACHE[url]
    else:
        request = requests.get(url)
        if request.ok:
            CACHE[url] = flask.jsonify(request.json())
            return CACHE[url]
        else:
            bad_api_call = flask.jsonify(success=False)