def _urljoin(base, url): """ Join relative URLs to base URLs like urllib.parse.urljoin but support arbitrary URIs (esp. 'http+unix://'). """ parsed = urlparse(base) scheme = parsed.scheme return urlparse(urljoin(parsed._replace(scheme="http").geturl(), url))._replace(scheme=scheme).geturl()
def _urljoin(base, url): """ Join relative URLs to base URLs like urllib.parse.urljoin but support arbitrary URIs (esp. 'http+unix://'). """ parsed = urlparse(base) scheme = parsed.scheme return urlparse(urljoin(parsed._replace(scheme='http').geturl(), url))._replace(scheme=scheme).geturl()
def _fetch(self, url_suffix, body, log_exceptions=True, attempts=3): full_url = urljoin(self._url, url_suffix) last_exception = None attempt = 0 while attempt < attempts: attempt += 1 if last_exception: logger.info("Retrying...") self._wait() # wait for a bit and retry try: response = self._fetcher.fetch(full_url, body, self._connect_timeout) break except FetcherException as e: last_exception = e.original_exc if log_exceptions: logger.exception("Failed connecting to remote scheduler %r", self._url) continue else: raise RPCError( "Errors (%d attempts) when connecting to remote scheduler %r" % (attempts, self._url), last_exception ) return response