Esempio n. 1
0
    def setUp(self) -> None:
        super().setUp()
        self.metadata_conn = sqlite3.Connection(":memory:", isolation_level=None)
        self.metadata_pool = lambda: nullcontext(self.metadata_conn)

        self.scraper = scrape.MetadataTipScraper(
            api=self.api,
            metadata_pool=self.metadata_pool,
            user_access=site.UserAccess(auth=TEST_AUTH),
        )

        # Set up mock response for feed
        feed_content = importlib_resources.read_binary(
            "btn_cache.tests", "test_feed.xml"
        )
        # Don't need full query string for match
        self.requests_mocker.get(
            "https://broadcasthe.net/feeds.php",
            content=feed_content,
            headers={"Content-Type": "application/xml"},
        )

        self.torrents: Dict[str, api_types.TorrentEntry] = {}
        for i in range(1, 6):
            entry = TEST_ENTRY.copy()
            entry["TorrentID"] = str(i)
            entry["Time"] = str(i)
            self.torrents[str(i)] = entry
        result = api_types.GetTorrentsResult(results="5", torrents=self.torrents)
        self.api_mock = self.mock_api_request(
            "getTorrents", [self.key, {}, 2 ** 31, 0], result
        )
 def test_get_torrent(self, mock: requests_mock.Mocker) -> None:
     mock.get(
         "https://broadcasthe.net/torrents.php?action=download&id=456"
         "&torrent_pass=dummy_passkey",
         complete_qs=True,
         text="torrent",
     )
     response = site.UserAccess(auth=AUTH).get_torrent(456)
     self.assertEqual(response.status_code, 200)
     self.assertEqual(response.text, "torrent")
Esempio n. 3
0
    def setUp(self) -> None:
        super().setUp()
        self.metadata_conn = sqlite3.Connection(":memory:", isolation_level=None)
        self.metadata_pool = lambda: nullcontext(self.metadata_conn)

        self.scraper = scrape.MetadataTipScraper(
            api=self.api,
            metadata_pool=self.metadata_pool,
            user_access=site.UserAccess(auth=TEST_AUTH),
        )
 def test_get_feed(self, mock: requests_mock.Mocker) -> None:
     mock.get(
         "https://broadcasthe.net/feeds.php?feed=torrents_all&user=123"
         "&auth=dummy_auth&authkey=dummy_authkey"
         "&passkey=dummy_passkey",
         complete_qs=True,
         text="response",
     )
     response = site.UserAccess(auth=AUTH).get_feed("torrents_all")
     self.assertEqual(response.status_code, 200)
     self.assertEqual(response.text, "response")
def main() -> None:
    parser = argparse.ArgumentParser()

    parser.add_argument("--verbose", "-v", action="count")
    parser.add_argument("--path", type=pathlib.Path, required=True)
    parser.add_argument(
        "--disable",
        action="append",
        choices=("metadata", "metadata_tip", "snatchlist"),
    )

    parser.add_argument("--api_max_calls", type=int, default=150)
    parser.add_argument("--api_period", type=int, default=3600)
    parser.add_argument("--web_request_rate", type=float, default=0.2)
    parser.add_argument("--web_request_burst", type=float, default=10)

    parser.add_argument("--snatchlist_period", type=float, default=3600)

    parser.add_argument("--parent", type=int)

    args = parser.parse_args()

    if args.verbose:
        level = logging.DEBUG
    else:
        level = logging.INFO

    logging.basicConfig(
        stream=sys.stdout,
        level=level,
        format="%(asctime)s %(levelname)s %(threadName)s "
        "%(filename)s:%(lineno)d %(message)s",
    )

    storage = storage_lib.Storage(args.path)

    session = requests.Session()

    rate_limiter = ratelimit.RateLimiter(rate=args.web_request_rate,
                                         burst=args.web_request_burst)
    api_rate_limiter = ratelimit.APIRateLimiter(max_calls=args.api_max_calls,
                                                period=args.api_period)

    auth = storage.get_user_auth()

    user_access = site.UserAccess(auth=auth,
                                  session=session,
                                  rate_limiter=rate_limiter)
    if auth.api_key is None:
        raise ValueError("api_key is required")
    api = api_lib.RateLimitedAPI(auth.api_key,
                                 rate_limiter=api_rate_limiter,
                                 session=session)

    def metadata_factory() -> sqlite3.Connection:
        conn = sqlite3.Connection(storage.metadata_db_path,
                                  isolation_level=None)
        cur = conn.cursor()
        cur.execute("pragma busy_timeout = 5000")
        # Metadata updates use temp tables with small data sizes
        cur.execute("pragma temp_store = MEMORY")
        cur.execute("pragma trusted_schema = OFF")
        cur.execute("pragma journal_mode = WAL")
        cur.execute(f"pragma mmap_size = {2**32}")
        cur.execute("pragma synchronous = NORMAL")
        return conn

    metadata_pool = dbver.null_pool(metadata_factory)

    def user_factory() -> sqlite3.Connection:
        conn = sqlite3.Connection(storage.user_db_path, isolation_level=None)
        cur = conn.cursor()
        cur.execute("pragma busy_timeout = 5000")
        cur.execute("pragma trusted_schema = OFF")
        cur.execute("pragma journal_mode = WAL")
        cur.execute(f"pragma mmap_size = {2**28}")
        cur.execute("pragma synchronous = NORMAL")
        return conn

    user_pool = dbver.null_pool(user_factory)

    disable: Set[list] = set(args.disable) if args.disable else set()

    daemons: Dict[str, daemon_lib.Daemon] = {}
    if "metadata" not in disable:
        daemons["metadata_scraper"] = scrape.MetadataScraper(
            api=api, metadata_pool=metadata_pool)
    if "metadata_tip" not in disable:
        daemons["metadata_tip_scraper"] = scrape.MetadataTipScraper(
            api=api, user_access=user_access, metadata_pool=metadata_pool)
    if "snatchlist" not in disable:
        daemons["snatchlist_scraper"] = scrape.SnatchlistScraper(
            api=api, user_pool=user_pool, period=args.snatchlist_period)

    if args.parent:
        daemons["parent_checker"] = ParentChecker(args.parent)

    executor = concurrent.futures.ThreadPoolExecutor(max_workers=8)

    def signal_handler(signum: int, _: Any) -> None:
        _LOG.info("terminating due to signal %d", signum)
        for daemon in daemons.values():
            daemon.terminate()

    try:
        # Set signal handlers within the try-finally, so we'll be sure to unset
        # them if we get a signal while setting them
        signal.signal(signal.SIGINT, signal_handler)
        signal.signal(signal.SIGTERM, signal_handler)

        futures: List[concurrent.futures.Future] = []
        for name, daemon in daemons.items():
            executor = concurrent.futures.ThreadPoolExecutor(
                thread_name_prefix=name)
            futures.append(executor.submit(daemon.run))

        # Wait for any daemon to die or be terminated
        concurrent.futures.wait(futures,
                                return_when=concurrent.futures.FIRST_COMPLETED)

        # Ensure all daemons are terminated; all are killed if one dies
        for daemon in daemons.values():
            daemon.terminate()
        # Re-raise any exceptions
        for future in futures:
            future.result()
    finally:
        signal.signal(signal.SIGINT, signal.SIG_DFL)
        signal.signal(signal.SIGTERM, signal.SIG_DFL)
async def get_access() -> btn_site.UserAccess:
    return btn_site.UserAccess(
        auth=await get_auth(), session=await get_requests_session()
    )
 def test_empty_auth(self, _: requests_mock.Mocker) -> None:
     access = site.UserAccess(auth=site.UserAuth())
     with self.assertRaises(ValueError):
         access.get_feed("dummy_feed")
     with self.assertRaises(ValueError):
         access.get_torrent(456)