def on_start(self): proxy = httpclient.format_proxy(self.config["proxy"]) youtube.Video.proxy = proxy headers = { "user-agent": httpclient.format_user_agent(self.user_agent), "Cookie": "PREF=hl=en;", "Accept-Language": "en;q=0.8", } if youtube.api_enabled is True: if youtube_api.youtube_api_key is None: logger.error("No YouTube API key provided, disabling API") youtube.api_enabled = False else: youtube.Entry.api = youtube_api.API(proxy, headers) if youtube.Entry.search(q="test") is None: logger.error( "Failed to verify YouTube API key, disabling API" ) youtube.api_enabled = False else: logger.info("YouTube API key verified") if youtube.api_enabled is False: # regex based api # logger.info("Using scrAPI") # youtube.Entry.api = youtube_scrapi.scrAPI(proxy, headers) # # beautiful soup 4 based api logger.info("using bs4API") youtube.Entry.api = youtube_bs4api.bs4API(proxy, headers)
def on_start(self): proxy = httpclient.format_proxy(self.config['proxy']) youtube.Video.proxy = proxy headers = { 'user-agent': httpclient.format_user_agent(self.user_agent), 'Cookie': 'PREF=hl=en;', 'Accept-Language': 'en;q=0.8' } if youtube.api_enabled is True: if youtube.API.youtube_api_key is None: logger.error('No YouTube API key provided, disabling API') youtube.api_enabled = False else: youtube.Entry.api = youtube.API(proxy, headers) if youtube.Entry.search(q='test') is None: logger.error( 'Failed to verify YouTube API key, disabling API') youtube.api_enabled = False else: logger.info('YouTube API key verified') if youtube.api_enabled is False: logger.info('Using scrAPI') youtube.Entry.api = youtube.scrAPI(proxy, headers)
def __init__(self, config, audio): super(RNZBackend, self).__init__() self.library = RNZLibraryProvider(backend=self) self.uri_schemes = ['rnz'] http_cache = config['rnz']['http_cache'] http_cache = os.path.expanduser(http_cache) logging.info("http_cache: %s", http_cache) requests_cache.install_cache(http_cache, backend='sqlite', expire_after=300) proxy_config = config['proxy'] self.session = requests.Session() if proxy_config is not None: proxy = httpclient.format_proxy(proxy_config) self.session.proxies.update({'http': proxy, 'https': proxy}) full_user_agent = httpclient.format_user_agent( "%s/%s" % (mopidy_rnz.Extension.dist_name, mopidy_rnz.__version__)) logging.debug('user_agent: %s', full_user_agent) self.session.headers.update({'user-agent': full_user_agent})
def __init__(self, config): self.proxy = httpclient.format_proxy(config["proxy"]) self.ua_str = httpclient.format_user_agent( f"{mopidy_bandcamp.Extension.dist_name}/{mopidy_bandcamp.__version__}" ) self.identity = config["bandcamp"]["identity"] self.collection_items = config["bandcamp"]["collection_items"] self.fan_id = None
def _get_session(self, proxy_config): proxy = httpclient.format_proxy(proxy_config) full_user_agent = httpclient.format_user_agent('/'.join(( mopidy_beets.BeetsExtension.dist_name, mopidy_beets.__version__))) session = requests.Session() session.proxies.update({'http': proxy, 'https': proxy}) session.headers.update({'user-agent': full_user_agent}) return session
def _get_session(self, proxy_config): proxy = httpclient.format_proxy(proxy_config) full_user_agent = httpclient.format_user_agent('/'.join( (mopidy_beets.BeetsExtension.dist_name, mopidy_beets.__version__))) session = requests.Session() session.proxies.update({'http': proxy, 'https': proxy}) session.headers.update({'user-agent': full_user_agent}) return session
def get_requests_session(proxy_config, user_agent, token): proxy = httpclient.format_proxy(proxy_config) full_user_agent = httpclient.format_user_agent(user_agent) session = requests.Session() session.proxies.update({"http": proxy, "https": proxy}) session.headers.update({"user-agent": full_user_agent}) return session
def get_requests_session(proxy_config, user_agent): proxy = httpclient.format_proxy(proxy_config) full_user_agent = httpclient.format_user_agent(user_agent) session = requests.Session() session.proxies.update({'http': proxy, 'https': proxy}) session.headers.update({'user-agent': full_user_agent}) return session
def get_requests_session(proxy_config): user_agent = '%s/%s' % (Extension.dist_name, __version__) proxy = httpclient.format_proxy(proxy_config) full_user_agent = httpclient.format_user_agent(user_agent) session = requests.Session() session.proxies.update({'http': proxy, 'https': proxy}) session.headers.update({'user-agent': full_user_agent}) return session
def get_requests_session(self): proxy = httpclient.format_proxy(self.__config['proxy']) full_user_agent = httpclient.format_user_agent( '%s/%s' % (self.__dist_name, self.__version)) session = requests.Session() session.proxies.update({'http': proxy, 'https': proxy}) session.headers.update({'user-agent': full_user_agent}) return session
def get_requests_session(proxy_config): user_agent = f"{Extension.dist_name}/{__version__}" proxy = httpclient.format_proxy(proxy_config) full_user_agent = httpclient.format_user_agent(user_agent) session = requests.Session() session.proxies.update({"http": proxy, "https": proxy}) session.headers.update({"user-agent": full_user_agent}) return session
def get_requests_session(cls, config): import requests proxy = httpclient.format_proxy(config['proxy']) user_agent_string = '%s/%s' % (cls.dist_name, cls.version) user_agent = httpclient.format_user_agent(user_agent_string) session = requests.Session() session.proxies.update({'http': proxy, 'https': proxy}) session.headers.update({'user-agent': user_agent}) return session
def get_requests_session(proxy_config, user_agent): logger.debug('RadioBrowser: Start backend.get_requests_session') proxy = httpclient.format_proxy(proxy_config) full_user_agent = httpclient.format_user_agent(user_agent) session = requests.Session() session.proxies.update({'http': proxy, 'https': proxy}) session.headers.update({'user-agent': full_user_agent}) return session
def __init__(self, proxy_config=None, user_agent=None): super(SomaFMClient, self).__init__() # Build requests session self.session = requests.Session() if proxy_config is not None: proxy = httpclient.format_proxy(proxy_config) self.session.proxies.update({'http': proxy, 'https': proxy}) full_user_agent = httpclient.format_user_agent(user_agent) self.session.headers.update({'user-agent': full_user_agent})
def __init__(self, audio, backend, proxy_config=None, user_agent=None): super().__init__(audio=audio, backend=backend) # Build requests session self.session = requests.Session() if proxy_config is not None: proxy = httpclient.format_proxy(proxy_config) self.session.proxies.update({"http": proxy, "https": proxy}) full_user_agent = httpclient.format_user_agent(user_agent) self.session.headers.update({"user-agent": full_user_agent})
def __init__(self, proxy_config=None, user_agent=None): super(RadioNetClient, self).__init__() self.session = requests.Session() if proxy_config is not None: proxy = httpclient.format_proxy(proxy_config) self.session.proxies.update({'http': proxy, 'https': proxy}) full_user_agent = httpclient.format_user_agent(user_agent) self.session.headers.update({'user-agent': full_user_agent}) self.session.headers.update({'cache-control': 'no-cache'})
def get_requests_session(proxy_config, user_agent, token, public=False): proxy = httpclient.format_proxy(proxy_config) full_user_agent = httpclient.format_user_agent(user_agent) session = requests.Session() session.proxies.update({"http": proxy, "https": proxy}) if not public: session.headers.update({"user-agent": full_user_agent}) session.headers.update({"Authorization": f"OAuth {token}"}) return session
def _get_session(self): proxy = httpclient.format_proxy(self.proxy) full_user_agent = httpclient.format_user_agent('/'.join( (mopidy_emby.Extension.dist_name, mopidy_emby.__version__))) session = requests.Session() session.cert = self.cert session.proxies.update({'http': proxy, 'https': proxy}) session.headers.update({'user-agent': full_user_agent}) return session
def get_url_opener(cls, config): import urllib2 proxy = httpclient.format_proxy(config['proxy']) if proxy: handlers = [urllib2.ProxyHandler({'http': proxy, 'https': proxy})] else: handlers = [] opener = urllib2.build_opener(*handlers) user_agent = '%s/%s' % (cls.dist_name, cls.version) opener.addheaders = [('User-agent', httpclient.format_user_agent(user_agent))] return opener
def __init__(self, headers, cert=None, proxy=None): http_proxy = httpclient.format_proxy(proxy) user_agent = httpclient.format_user_agent('/'.join( (mopidy_jellyfin.Extension.dist_name, mopidy_jellyfin.__version__))) self.headers = headers self.session = requests.Session() self.session.cert = cert self.session.proxies.update({'http': http_proxy, 'https': http_proxy}) self.session.headers.update(self.headers) self.session.headers.update({'user-agent': user_agent})
def get_url_opener(cls, config): import urllib2 proxy = httpclient.format_proxy(config['proxy']) if proxy: handlers = [urllib2.ProxyHandler({'http': proxy, 'https': proxy})] else: handlers = [] opener = urllib2.build_opener(*handlers) user_agent = '%s/%s' % (cls.dist_name, cls.version) opener.addheaders = [ ('User-agent', httpclient.format_user_agent(user_agent)) ] return opener
def make_session(config): proxy = httpclient.format_proxy(config['proxy']) agent = httpclient.format_user_agent('%s/%s' % ( mopidy_funkwhale.Extension.dist_name, mopidy_funkwhale.__version__)) funkwhale_config = config['funkwhale'] url = urlparse.urljoin(funkwhale_config['host'], '/api/v1/') session = SessionWithUrlBase(url_base=url) session.proxies.update({'http': proxy, 'https': proxy}) session.headers.update({'user-agent': agent}) return session
def __init__( self, hostname: str, port: int, proxy_config: Dict[str, str], user_agent: str, ): self.cache_location: Path = self._init_cache_location() self.hostname: str = hostname self.port: int = port self.http_session: requests.Session = self._init_http_session( proxy=httpclient.format_proxy(proxy_config), user_agent=httpclient.format_user_agent(user_agent), )
def __init__(self, config, audio): super().__init__() ext_config = config[Extension.ext_name] self.client = client = InternetArchiveClient( ext_config["base_url"], retries=ext_config["retries"], timeout=ext_config["timeout"], ) product = f"{Extension.dist_name}/{Extension.version}" client.useragent = httpclient.format_user_agent(product) proxy = httpclient.format_proxy(config["proxy"]) client.proxies.update({"http": proxy, "https": proxy}) client.cache = _cache(**ext_config) self.library = InternetArchiveLibraryProvider(ext_config, self) self.playback = InternetArchivePlaybackProvider(audio, self)
def __init__(self, config, audio): super(InternetArchiveBackend, self).__init__() ext_config = config[Extension.ext_name] self.client = client = InternetArchiveClient( ext_config['base_url'], retries=ext_config['retries'], timeout=ext_config['timeout'] ) product = '%s/%s' % (Extension.dist_name, Extension.version) client.useragent = httpclient.format_user_agent(product) proxy = httpclient.format_proxy(config['proxy']) client.proxies.update({'http': proxy, 'https': proxy}) client.cache = _cache(**ext_config) self.library = InternetArchiveLibraryProvider(ext_config, self) self.playback = InternetArchivePlaybackProvider(audio, self)
def _get_data( self, a_id ): # from https://gist.github.com/methane/2185380#gistcomment-1301483 proxies = dict(http=self.proxy_formatted, https=self.proxy_formatted) useragent = httpclient.format_user_agent('{name}/{ver}'.format( name=mopidy_subidy.SubidyExtension.dist_name, ver=mopidy_subidy.__version__)) censored_url = self.subsonic_api.get_censored_coverart_image_uri(a_id) logger.debug("Loading cover art from subsonic with url: '%s'" % censored_url) url = self.subsonic_api.get_coverart_image_uri(a_id) try: fetched = requests.get(url, headers={'user-agent': useragent}, proxies=proxies) return fetched except Exception as e: logger.warning( 'Connecting to subsonic failed when loading cover art image.') raise tornado.web.HTTPError()
def test_format_user_agent(name, expected): assert re.match(expected, httpclient.format_user_agent(name))
from mopidy import httpclient import mock import pytest import vcr import youtube_dl from mopidy_youtube import Extension, backend, youtube from mopidy_youtube.apis import youtube_scrapi proxy = None # httpclient.format_proxy(config['proxy']) youtube.Video.proxy = proxy user_agent = "{}/{}".format(Extension.dist_name, Extension.version) headers = { "user-agent": httpclient.format_user_agent(user_agent), "Cookie": "PREF=hl=en;", "Accept-Language": "en;q=0.8", } @pytest.yield_fixture def youtube_dl_mock(): patcher = mock.patch.object(youtube, "youtube_dl", spec=youtube_dl) yield patcher.start() patcher.stop() @pytest.fixture def youtube_dl_mock_with_video(youtube_dl_mock): video_mock = youtube_dl_mock.YoutubeDL.return_value
def get_user_agent(): return format_user_agent('%s/%s' % (Extension.dist_name, Extension.version))