Example #1
0
    def test_length(self):
        cookie_jar = CookieJar()
        policy = DeFactoCookiePolicy(cookie_jar=cookie_jar)
        cookie_jar.set_policy(policy)

        request = urllib.request.Request('http://example.com/')
        response = FakeResponse(
            [
                'Set-Cookie: k={0}'.format('a' * 400)
            ],
            'http://example.com/'
        )

        cookie_jar.extract_cookies(response, request)

        print(cookie_jar._cookies)

        self.assertTrue(cookie_jar._cookies['example.com']['/'].get('k'))

        request = urllib.request.Request('http://example.com/')
        response = FakeResponse(
            [
                'Set-Cookie: k={0}'.format('a' * 5000)
            ],
            'http://example.com/'
        )

        cookie_jar.extract_cookies(response, request)

        self.assertFalse(cookie_jar._cookies['example.com']['/'].get('k2'))
Example #2
0
    def test_domain_limit(self):
        cookie_jar = CookieJar()
        policy = CookieLimitsPolicy(cookie_jar=cookie_jar)
        cookie_jar.set_policy(policy)

        request = urllib.request.Request('http://example.com/')

        for key in range(55):
            response = FakeResponse(['Set-Cookie: k{0}=a'.format(key)],
                                    'http://example.com/')

            cookie_jar.extract_cookies(response, request)

            if key < 50:
                self.assertTrue(
                    cookie_jar._cookies['example.com']['/']\
                        .get('k{0}'.format(key))
                )
            else:
                self.assertFalse(
                    cookie_jar._cookies['example.com']['/']\
                        .get('k{0}'.format(key))
                )

        response = FakeResponse(['Set-Cookie: k3=b'], 'http://example.com/')

        cookie_jar.extract_cookies(response, request)
        self.assertEqual('b',
                         cookie_jar._cookies['example.com']['/']['k3'].value)
Example #3
0
    def test_empty_value(self):
        cookie_jar = CookieJar()
        policy = DeFactoCookiePolicy(cookie_jar=cookie_jar)
        cookie_jar.set_policy(policy)

        request = urllib.request.Request('http://example.com/')
        response = FakeResponse(['Set-Cookie: k'], 'http://example.com/')

        cookie_jar.extract_cookies(response, request)

        print(cookie_jar._cookies)

        self.assertTrue(cookie_jar._cookies.get('example.com'))
Example #4
0
    def test_empty_value(self):
        cookie_jar = CookieJar()
        policy = DeFactoCookiePolicy(cookie_jar=cookie_jar)
        cookie_jar.set_policy(policy)

        request = urllib.request.Request('http://example.com/')
        response = FakeResponse(
            [
                'Set-Cookie: k'
            ],
            'http://example.com/'
        )

        cookie_jar.extract_cookies(response, request)

        print(cookie_jar._cookies)

        self.assertTrue(cookie_jar._cookies.get('example.com'))
Example #5
0
    def test_ascii(self):
        # Differences with FakeResponse:
        # On Python 3, MIME encoded-word syntax is used
        # On Python 2, U backslash syntax is used but it's not decoded back.
        cookie_jar = CookieJar()
        policy = DeFactoCookiePolicy(cookie_jar=cookie_jar)
        cookie_jar.set_policy(policy)

        request = urllib.request.Request('http://example.com/')
        response = FakeResponse(
            [
                'Set-Cookie: k=�'
            ],
            'http://example.com/'
        )

        cookie_jar.extract_cookies(response, request)

        print(cookie_jar._cookies)

        self.assertFalse(cookie_jar._cookies.get('example.com'))
Example #6
0
    def test_domain_limit(self):
        cookie_jar = CookieJar()
        policy = DeFactoCookiePolicy(cookie_jar=cookie_jar)
        cookie_jar.set_policy(policy)

        request = urllib.request.Request('http://example.com/')

        for key in range(55):
            response = FakeResponse(
                [
                    'Set-Cookie: k{0}=a'.format(key)
                ],
                'http://example.com/'
            )

            cookie_jar.extract_cookies(response, request)

            if key < 50:
                self.assertTrue(
                    cookie_jar._cookies['example.com']['/']
                    .get('k{0}'.format(key))
                )
            else:
                self.assertFalse(
                    cookie_jar._cookies['example.com']['/']
                    .get('k{0}'.format(key))
                )

        response = FakeResponse(
            [
                'Set-Cookie: k3=b'
            ],
            'http://example.com/'
        )

        cookie_jar.extract_cookies(response, request)
        self.assertEqual(
            'b',
            cookie_jar._cookies['example.com']['/']['k3'].value
        )
Example #7
0
    def test_length(self):
        cookie_jar = CookieJar()
        policy = CookieLimitsPolicy(cookie_jar=cookie_jar)
        cookie_jar.set_policy(policy)

        request = urllib.request.Request('http://example.com/')
        response = FakeResponse(['Set-Cookie: k={0}'.format('a' * 400)],
                                'http://example.com/')

        cookie_jar.extract_cookies(response, request)

        print(cookie_jar._cookies)

        self.assertTrue(cookie_jar._cookies['example.com']['/'].get('k'))

        request = urllib.request.Request('http://example.com/')
        response = FakeResponse(['Set-Cookie: k={0}'.format('a' * 5000)],
                                'http://example.com/')

        cookie_jar.extract_cookies(response, request)

        self.assertFalse(cookie_jar._cookies['example.com']['/'].get('k2'))
Example #8
0
def _query_w_or_v_sa(sql,
                     database,
                     filename,
                     file_to_upload,
                     loginurl,
                     sqlurl,
                     login_details,
                     programme_id=None,
                     lowercase=True,
                     extra_post=None):
    """Adapted from http://casu.ast.cam.ac.uk/surveys-projects/wfcam/data-access/wsa-freeform.py"""
    # Send request to login to the archive

    cj = CookieJar()
    if login_details['username'] is not None and login_details[
            'username'].strip():
        # There are non-empty login details
        q = loginurl % (login_details['username'], login_details['password'],
                        login_details['community'])
        response = urllib.request.urlopen(q)
        request = urllib.request.Request(q)

        # Extract the cookies from the response header and use them for future connections
        cj.extract_cookies(response, request)
    opener = urllib.request.build_opener(
        urllib.request.HTTPCookieProcessor(cj))

    # Construct and post the request
    postdata = {
        'formaction': 'freeform',
        'sqlstmt': sql,
        'emailAddress': '',
        'database': database,
        'timeout': 1800,
        'format': 'FITS',
        'compress': 'GZIP',
        'rows': 10,
        'iFmt': 'VOTable'
    }

    if extra_post is not None:
        postdata.update(extra_post)

    if programme_id is not None:
        postdata['programmeID'] = programme_id

    if file_to_upload is not None:
        files = {'uploadSQLFile': (file_to_upload, open(file_to_upload, 'rb'))}
    else:
        files = None

    response = requests.post(sqlurl, data=postdata, files=files)
    res = response.text
    # Find where our output file is
    try:
        fitsfile = re.compile("<a href=\"(\S+%s).+" % 'fits.gz').search(
            str(res)).group(1)
    except AttributeError:
        raise RuntimeError('Query Failed, Reponse: {}'.format(res))

    # Request the fitsfile
    fitsres = opener.open(fitsfile).read()
    # Save file to local disk
    if filename:
        with open(filename, 'wb') as f:
            f.write(fitsres)

    else:
        with tempfile.NamedTemporaryFile(delete=False,
                                         suffix='.fits.gz',
                                         mode='wb') as f:
            f.write(fitsres)

    hdulist = fits.open(f.name)

    # FITs files are big endian, while pandas assumes native byte order i.e. little endian on x86
    # calling .byteswap().newbyteorder() on a numpy array switches to native order
    df = pd.DataFrame(np.array(hdulist[1].data).byteswap().newbyteorder())

    if lowercase:
        df.columns = map(str.lower, df.columns)

    if not filename:
        os.remove(f.name)

    return df
Example #9
0
class CookieTransport(TimeoutTransport):
    '''A subclass of xmlrpclib.Transport that supports cookies.'''
    cookiejar = None
    scheme = 'http'

    # Cribbed from xmlrpclib.Transport.send_user_agent
    def send_cookies(self, connection, cookie_request):
        if self.cookiejar is None:
            self.cookiejar = CookieJar()
        elif self.cookiejar:
            # Let the cookiejar figure out what cookies are appropriate
            self.cookiejar.add_cookie_header(cookie_request)
            # Pull the cookie headers out of the request object...
            cookielist = list()
            for h, v in cookie_request.header_items():
                if h.startswith('Cookie'):
                    cookielist.append([h, v])
            # ...and put them over the connection
            for h, v in cookielist:
                connection.putheader(h, v)

    def single_request(self, host, handler, request_body, verbose=1):
        # issue XML-RPC request

        h = self.make_connection(host)
        if verbose:
            h.set_debuglevel(1)

        request_url = "%s://%s/" % (self.scheme, host)
        cookie_request = urllib.request.Request(request_url)

        try:
            self.send_request(h, handler, request_body)
            self.send_host(h, host)
            self.send_cookies(
                h, cookie_request)  # ADDED. creates cookiejar if None.
            self.send_user_agent(h)
            self.send_content(h, request_body)

            response = h.getresponse(buffering=True)

            # ADDED: parse headers and get cookies here
            # fake a response object that we can fill with the headers above
            class CookieResponse:
                def __init__(self, headers):
                    self.headers = headers

                def info(self):
                    return self.headers

            cookie_response = CookieResponse(response.msg)
            # Okay, extract the cookies from the headers
            self.cookiejar.extract_cookies(cookie_response, cookie_request)
            # And write back any changes
            if hasattr(self.cookiejar, 'save'):
                self.cookiejar.save(self.cookiejar.filename)

            if response.status == 200:
                self.verbose = verbose
                return self.parse_response(response)
        except xmlrpc.client.Fault:
            raise
        except Exception:
            # All unexpected errors leave connection in
            # a strange state, so we clear it.
            self.close()
            raise

        # discard any response data and raise exception
        if (response.getheader("content-length", 0)):
            response.read()
        raise xmlrpc.client.ProtocolError(
            host + handler,
            response.status,
            response.reason,
            response.msg,
        )
Example #10
0
    def _on_log_in(self, _widget):
        import gi
        gi.require_version('WebKit2', '4.0')
        from gi.repository import Gtk, WebKit2, Soup
        loop = GLib.MainLoop()
        window = Gtk.Window()
        window.set_title("Log in to GMail")
        window.set_default_size(800, 600)
        window.set_destroy_with_parent(True)
        window.set_transient_for(self._window)
        window.set_modal(True)
        scrolled_window = Gtk.ScrolledWindow()
        window.add(scrolled_window)
        webview = WebKit2.WebView()
        webview.load_uri(GMAIL_URL)
        scrolled_window.add(webview)
        window.connect("destroy", lambda _: loop.quit())
        got_to_inbox = False

        def _on_load_changed(_, load_event):
            if load_event != WebKit2.LoadEvent.FINISHED:
                return
            uri = webview.get_uri()
            nonlocal got_to_inbox
            if uri == "https://mail.google.com/mail/u/0/#inbox":
                nonlocal got_to_inbox
                got_to_inbox = True
                window.destroy()

        webview.connect("load-changed", _on_load_changed)
        window.show_all()
        loop.run()
        cookie_headers = None

        def _got_cookies(cookies, result, data):
            try:
                headers = []
                for cookie in cookies.get_cookies_finish(result):
                    headers.append(cookie.to_set_cookie_header())
                nonlocal cookie_headers
                cookie_headers = headers
            except:
                logging.warning("getting cookies failed", exc_info=True)
            loop.quit()

        webview.get_context().get_cookie_manager().get_cookies(
            GMAIL_RSS_URL, None, _got_cookies, None)
        loop.run()
        window.destroy()
        if not cookie_headers:
            logging.warning("could not set cookies from login")
            return
        if not got_to_inbox:
            logging.warning("login dialog closed before login done")
            return
        cookie_jar = CookieJar()
        cookie_jar.extract_cookies(FakeHTTPResonse(cookie_headers),
                                   urllib.request.Request(GMAIL_RSS_URL))
        self._set_gmail_credentials(generate_gmail_cookies_key(), cookie_jar)

        self._on_entry_changed(None)
Example #11
0
class Cookies(MutableMapping):
    """
    HTTP Cookies, as a mutable mapping.
    """

    def __init__(self, cookies: CookieTypes = None) -> None:
        if cookies is None or isinstance(cookies, dict):
            self.jar = CookieJar()
            if isinstance(cookies, dict):
                for key, value in cookies.items():
                    self.set(key, value)
        elif isinstance(cookies, Cookies):
            self.jar = CookieJar()
            for cookie in cookies.jar:
                self.jar.set_cookie(cookie)
        else:
            self.jar = cookies

    def extract_cookies(self, response: Response) -> None:
        """
        Loads any cookies based on the response `Set-Cookie` headers.
        """
        urlib_response = self._CookieCompatResponse(response)
        urllib_request = self._CookieCompatRequest(response.request)

        self.jar.extract_cookies(urlib_response, urllib_request)  # type: ignore

    def set_cookie_header(self, request: Request) -> None:
        """
        Sets an appropriate 'Cookie:' HTTP header on the `Request`.
        """
        urllib_request = self._CookieCompatRequest(request)
        self.jar.add_cookie_header(urllib_request)

    def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None:
        """
        Set a cookie value by name. May optionally include domain and path.
        """
        kwargs = {
            "version": 0,
            "name": name,
            "value": value,
            "port": None,
            "port_specified": False,
            "domain": domain,
            "domain_specified": bool(domain),
            "domain_initial_dot": domain.startswith("."),
            "path": path,
            "path_specified": bool(path),
            "secure": False,
            "expires": None,
            "discard": True,
            "comment": None,
            "comment_url": None,
            "rest": {"HttpOnly": None},
            "rfc2109": False,
        }
        cookie = Cookie(**kwargs)  # type: ignore
        self.jar.set_cookie(cookie)

    def get(  # type: ignore
        self, name: str, default: str = None, domain: str = None, path: str = None
    ) -> typing.Optional[str]:
        """
        Get a cookie by name. May optionally include domain and path
        in order to specify exactly which cookie to retrieve.
        """
        value = None
        for cookie in self.jar:
            if cookie.name == name:
                if domain is None or cookie.domain == domain:  # type: ignore
                    if path is None or cookie.path == path:
                        if value is not None:
                            message = f"Multiple cookies exist with name={name}"
                            raise CookieConflict(message)
                        value = cookie.value

        if value is None:
            return default
        return value

    def delete(self, name: str, domain: str = None, path: str = None) -> None:
        """
        Delete a cookie by name. May optionally include domain and path
        in order to specify exactly which cookie to delete.
        """
        if domain is not None and path is not None:
            return self.jar.clear(domain, path, name)

        remove = []
        for cookie in self.jar:
            if cookie.name == name:
                if domain is None or cookie.domain == domain:  # type: ignore
                    if path is None or cookie.path == path:
                        remove.append(cookie)

        for cookie in remove:
            self.jar.clear(cookie.domain, cookie.path, cookie.name)  # type: ignore

    def clear(self, domain: str = None, path: str = None) -> None:
        """
        Delete all cookies. Optionally include a domain and path in
        order to only delete a subset of all the cookies.
        """
        args = []
        if domain is not None:
            args.append(domain)
        if path is not None:
            assert domain is not None
            args.append(path)
        self.jar.clear(*args)

    def update(self, cookies: CookieTypes = None) -> None:  # type: ignore
        cookies = Cookies(cookies)
        for cookie in cookies.jar:
            self.jar.set_cookie(cookie)

    def __setitem__(self, name: str, value: str) -> None:
        return self.set(name, value)

    def __getitem__(self, name: str) -> str:
        value = self.get(name)
        if value is None:
            raise KeyError(name)
        return value

    def __delitem__(self, name: str) -> None:
        return self.delete(name)

    def __len__(self) -> int:
        return len(self.jar)

    def __iter__(self) -> typing.Iterator[str]:
        return (cookie.name for cookie in self.jar)

    def __bool__(self) -> bool:
        for _ in self.jar:
            return True
        return False

    class _CookieCompatRequest(urllib.request.Request):
        """
        Wraps a `Request` instance up in a compatibility interface suitable
        for use with `CookieJar` operations.
        """

        def __init__(self, request: Request) -> None:
            super().__init__(
                url=str(request.url),
                headers=dict(request.headers),
                method=request.method,
            )
            self.request = request

        def add_unredirected_header(self, key: str, value: str) -> None:
            super().add_unredirected_header(key, value)
            self.request.headers[key] = value

    class _CookieCompatResponse:
        """
        Wraps a `Request` instance up in a compatibility interface suitable
        for use with `CookieJar` operations.
        """

        def __init__(self, response: Response):
            self.response = response

        def info(self) -> email.message.Message:
            info = email.message.Message()
            for key, value in self.response.headers.items():
                info[key] = value
            return info
Example #12
0
class HttpTransport(Transport):
    """
    Basic HTTP transport implemented using using urllib2, that provides for
    cookies & proxies but no authentication.

    """
    def __init__(self, **kwargs):
        """
        @param kwargs: Keyword arguments.
            - B{proxy} - An HTTP proxy to be specified on requests.
                 The proxy is defined as {protocol:proxy,}
                    - type: I{dict}
                    - default: {}
            - B{timeout} - Set the URL open timeout (seconds).
                    - type: I{float}
                    - default: 90

        """
        Transport.__init__(self)
        Unskin(self.options).update(kwargs)
        self.cookiejar = CookieJar()
        self.proxy = {}
        self.urlopener = None

    def open(self, request):
        try:
            url = self.__get_request_url_for_urllib(request)
            log.debug('opening (%s)', url)
            u2request = urllib.request.Request(url)
            self.proxy = self.options.proxy
            return self.u2open(u2request)
        except urllib.error.HTTPError as e:
            raise TransportError(str(e), e.code, e.fp)

    def send(self, request):
        url = self.__get_request_url_for_urllib(request)
        msg = request.message
        headers = request.headers
        try:
            u2request = urllib.request.Request(url, msg, headers)
            self.addcookies(u2request)
            self.proxy = self.options.proxy
            request.headers.update(u2request.headers)
            log.debug('sending:\n%s', request)
            fp = self.u2open(u2request)
            self.getcookies(fp, u2request)
            headers = fp.headers
            if sys.version_info < (3, 0):
                headers = headers.dict
            reply = Reply(http.client.OK, headers, fp.read())
            log.debug('received:\n%s', reply)
            return reply
        except urllib.error.HTTPError as e:
            if e.code not in (http.client.ACCEPTED, http.client.NO_CONTENT):
                raise TransportError(e.msg, e.code, e.fp)

    def addcookies(self, u2request):
        """
        Add cookies in the cookiejar to the request.

        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Request.

        """
        self.cookiejar.add_cookie_header(u2request)

    def getcookies(self, fp, u2request):
        """
        Add cookies in the request to the cookiejar.

        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Request.

        """
        self.cookiejar.extract_cookies(fp, u2request)

    def u2open(self, u2request):
        """
        Open a connection.

        @param u2request: A urllib2 request.
        @type u2request: urllib2.Request.
        @return: The opened file-like urllib2 object.
        @rtype: fp

        """
        tm = self.options.timeout
        url = self.u2opener()
        if (sys.version_info < (3, 0)) and (self.u2ver() < 2.6):
            socket.setdefaulttimeout(tm)
            return url.open(u2request)
        return url.open(u2request, timeout=tm)

    def u2opener(self):
        """
        Create a urllib opener.

        @return: An opener.
        @rtype: I{OpenerDirector}

        """
        if self.urlopener is None:
            return urllib.request.build_opener(*self.u2handlers())
        return self.urlopener

    def u2handlers(self):
        """
        Get a collection of urllib handlers.

        @return: A list of handlers to be installed in the opener.
        @rtype: [Handler,...]

        """
        return [urllib.request.ProxyHandler(self.proxy)]

    def u2ver(self):
        """
        Get the major/minor version of the urllib2 lib.

        @return: The urllib2 version.
        @rtype: float

        """
        try:
            part = urllib.request.__version__.split('.', 1)
            return float('.'.join(part))
        except Exception as e:
            log.exception(e)
            return 0

    def __deepcopy__(self, memo={}):
        clone = self.__class__()
        p = Unskin(self.options)
        cp = Unskin(clone.options)
        cp.update(p)
        return clone

    @staticmethod
    def __get_request_url_for_urllib(request):
        """
        Returns the given request's URL, properly encoded for use with urllib.

        We expect that the given request object already verified that the URL
        contains ASCII characters only and stored it as a native str value.

        urllib accepts URL information as a native str value and may break
        unexpectedly if given URL information in another format.

        Python 3.x httplib.client implementation must be given a unicode string
        and not a bytes object and the given string is internally converted to
        a bytes object using an explicitly specified ASCII encoding.

        Python 2.7 httplib implementation expects the URL passed to it to not
        be a unicode string. If it is, then passing it to the underlying
        httplib Request object will cause that object to forcefully convert all
        of its data to unicode, assuming that data contains ASCII data only and
        raising a UnicodeDecodeError exception if it does not (caused by simple
        unicode + string concatenation).

        Python 2.4 httplib implementation does not really care about this as it
        does not use the internal optimization present in the Python 2.7
        implementation causing all the requested data to be converted to
        unicode.

        """
        assert isinstance(request.url, str)
        return request.url
Example #13
0
class HttpTransport(Transport):
    """
    Basic HTTP transport implemented using using urllib2, that provides for
    cookies & proxies but no authentication.

    """

    def __init__(self, **kwargs):
        """
        @param kwargs: Keyword arguments.
            - B{proxy} - An HTTP proxy to be specified on requests.
                 The proxy is defined as {protocol:proxy,}
                    - type: I{dict}
                    - default: {}
            - B{timeout} - Set the URL open timeout (seconds).
                    - type: I{float}
                    - default: 90

        """
        Transport.__init__(self)
        Unskin(self.options).update(kwargs)
        self.cookiejar = CookieJar()
        self.proxy = {}
        self.urlopener = None

    def open(self, request):
        try:
            url = self.__get_request_url_for_urllib(request)
            log.debug('opening (%s)', url)
            u2request = urllib.request.Request(url)
            self.proxy = self.options.proxy
            return self.u2open(u2request)
        except urllib.error.HTTPError as e:
            raise TransportError(str(e), e.code, e.fp)

    def send(self, request):
        url = self.__get_request_url_for_urllib(request)
        msg = request.message
        headers = request.headers
        try:
            u2request = urllib.request.Request(url, msg, headers)
            self.addcookies(u2request)
            self.proxy = self.options.proxy
            request.headers.update(u2request.headers)
            log.debug('sending:\n%s', request)
            fp = self.u2open(u2request)
            self.getcookies(fp, u2request)
            headers = fp.headers
            if sys.version_info < (3, 0):
                headers = headers.dict
            reply = Reply(http.client.OK, headers, fp.read())
            log.debug('received:\n%s', reply)
            return reply
        except urllib.error.HTTPError as e:
            if e.code not in (http.client.ACCEPTED, http.client.NO_CONTENT):
                raise TransportError(e.msg, e.code, e.fp)

    def addcookies(self, u2request):
        """
        Add cookies in the cookiejar to the request.

        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Request.

        """
        self.cookiejar.add_cookie_header(u2request)

    def getcookies(self, fp, u2request):
        """
        Add cookies in the request to the cookiejar.

        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Request.

        """
        self.cookiejar.extract_cookies(fp, u2request)

    def u2open(self, u2request):
        """
        Open a connection.

        @param u2request: A urllib2 request.
        @type u2request: urllib2.Request.
        @return: The opened file-like urllib2 object.
        @rtype: fp

        """
        tm = self.options.timeout
        url = self.u2opener()
        if (sys.version_info < (3, 0)) and (self.u2ver() < 2.6):
            socket.setdefaulttimeout(tm)
            return url.open(u2request)
        return url.open(u2request, timeout=tm)

    def u2opener(self):
        """
        Create a urllib opener.

        @return: An opener.
        @rtype: I{OpenerDirector}

        """
        if self.urlopener is None:
            return urllib.request.build_opener(*self.u2handlers())
        return self.urlopener

    def u2handlers(self):
        """
        Get a collection of urllib handlers.

        @return: A list of handlers to be installed in the opener.
        @rtype: [Handler,...]

        """
        return [urllib.request.ProxyHandler(self.proxy)]

    def u2ver(self):
        """
        Get the major/minor version of the urllib2 lib.

        @return: The urllib2 version.
        @rtype: float

        """
        try:
            part = urllib2.__version__.split('.', 1)
            return float('.'.join(part))
        except Exception as e:
            log.exception(e)
            return 0

    def __deepcopy__(self, memo={}):
        clone = self.__class__()
        p = Unskin(self.options)
        cp = Unskin(clone.options)
        cp.update(p)
        return clone

    @staticmethod
    def __get_request_url_for_urllib(request):
        """
        Returns the given request's URL, properly encoded for use with urllib.

        We expect that the given request object already verified that the URL
        contains ASCII characters only and stored it as a native str value.

        urllib accepts URL information as a native str value and may break
        unexpectedly if given URL information in another format.

        Python 3.x httplib.client implementation must be given a unicode string
        and not a bytes object and the given string is internally converted to
        a bytes object using an explicitly specified ASCII encoding.

        Python 2.7 httplib implementation expects the URL passed to it to not
        be a unicode string. If it is, then passing it to the underlying
        httplib Request object will cause that object to forcefully convert all
        of its data to unicode, assuming that data contains ASCII data only and
        raising a UnicodeDecodeError exception if it does not (caused by simple
        unicode + string concatenation).

        Python 2.4 httplib implementation does not really care about this as it
        does not use the internal optimization present in the Python 2.7
        implementation causing all the requested data to be converted to
        unicode.

        """
        assert isinstance(request.url, str)
        return request.url
Example #14
0
class CookieTransport(TimeoutTransport):
    '''A subclass of xmlrpclib.Transport that supports cookies.'''
    cookiejar = None
    scheme = 'http'

    # Cribbed from xmlrpclib.Transport.send_user_agent
    def send_cookies(self, connection, cookie_request):
        if self.cookiejar is None:
            self.cookiejar = CookieJar()
        elif self.cookiejar:
            # Let the cookiejar figure out what cookies are appropriate
            self.cookiejar.add_cookie_header(cookie_request)
            # Pull the cookie headers out of the request object...
            cookielist = list()
            for h, v in cookie_request.header_items():
                if h.startswith('Cookie'):
                    cookielist.append([h, v])
            # ...and put them over the connection
            for h, v in cookielist:
                connection.putheader(h, v)

    # This is the same request() method from xmlrpclib.Transport,
    # with a couple additions noted below
    def request(self, host, handler, request_body, verbose=0):
        h = self.make_connection(host)
        if verbose:
            h.set_debuglevel(1)

        request_url = "%s://%s/" % (self.scheme, host)
        cookie_request = urllib.request.Request(request_url)

        self.send_request(h, handler, request_body)
        self.send_host(h, host)
        self.send_cookies(h, cookie_request)  # ADDED. creates cookiejar if None.
        self.send_user_agent(h)
        self.send_content(h, request_body)

        errcode, errmsg, headers = h.getreply()

        # ADDED: parse headers and get cookies here
        # fake a response object that we can fill with the headers above
        class CookieResponse:
            def __init__(self, headers):
                self.headers = headers

            def info(self):
                return self.headers
        cookie_response = CookieResponse(headers)
        # Okay, extract the cookies from the headers
        self.cookiejar.extract_cookies(cookie_response, cookie_request)
        # And write back any changes
        if hasattr(self.cookiejar, 'save'):
            self.cookiejar.save(self.cookiejar.filename)

        if errcode != 200:
            raise xmlrpc.client.ProtocolError(
                host + handler,
                errcode, errmsg,
                headers
            )

        self.verbose = verbose

        try:
            sock = h._conn.sock
        except AttributeError:
            sock = None

        return self._parse_response(h.getfile(), sock)
Example #15
0
    def register(self):
        if not (any(self.user.breakfast) or any(self.user.lunch) or any(self.user.dinner)):
            print("\tNothing to reserve.")
            return None
        try:
            display = Display(visible=False, size=(1600, 1200))
            display.start()
            with contextlib.closing(webdriver.Firefox()) as browser:
                cj = CookieJar()
                opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
                opener.addheaders = [('User-agent', 'Mozilla/5.0')]
                authentication_url = "https://stu.iust.ac.ir/j_security_check"
                payload = {"j_username": self.user.stu_username,
                           "j_password": self.user.stu_password,
                           "captcha_input": get_captcha(cj),
                           "login": u"ورود", }
                data = urllib.parse.urlencode(payload)
                binary_data = data.encode('UTF-8')
                request = urllib.request.Request(authentication_url, binary_data)
                response = opener.open(request)
                cj.extract_cookies(response, request)
                print(cj._cookies)
                contents = str(response.read(), 'utf-8')

                if u'iconWarning.gif' in contents:
                    print("wrong user pass")
                    return "wup"

                new_cookie = {'expiry': None, 'value':cj._cookies['stu.iust.ac.ir']['/']['JSESSIONID'].value, 'name': 'JSESSIONID', 'secure': True, 'path': '/', 'domain': 'stu.iust.ac.ir'}

                #browser = webdriver.Firefox()
                browser.get("https://stu.iust.ac.ir")
                browser.add_cookie(new_cookie)

                # TODO: handle wrong user or pass
                browser.get("https://stu.iust.ac.ir/nurture/user/multi/reserve/showPanel.rose")
                browser.find_element_by_id("nextWeekBtn").click()
                import time

                for self_id in set(self.user.breakfast + self.user.lunch + self.user.dinner) - {0}:
                    # browser.get("https://stu.iust.ac.ir/nurture/user/multi/reserve/showPanel.rose")
                    self_hidden_id = None
                    for i in range(10):
                        try:
                            self_hidden_id = browser.find_element_by_id("selfHiddenId")
                            break
                        except:
                            time.sleep(0.3)

                    if self_hidden_id.get_attribute('value') != self_id:
                        try:
                            browser.find_element_by_id("selfId").find_element_by_xpath(
                                "//option[@value='" + str(self_id) + "']").click()
                        except NoSuchElementException:
                            print("\tERR - Invalid self: {} self:{}".format(self.user.stu_username, self_id))
                            continue
                    foods_to_register = []
                    food_chart = _get_foods(browser.page_source)

                    for i, day in enumerate(self.user.breakfast):
                        if day == self_id:
                            foods_in_day = food_chart[i][0]
                            chosen = choose_food(self.user, foods_in_day)
                            if chosen is not None:
                                foods_to_register.append(chosen)
                    for i, day in enumerate(self.user.lunch):
                        if day == self_id:
                            foods_in_day = food_chart[i][1]
                            chosen = choose_food(self.user, foods_in_day)
                            if chosen is not None:
                                foods_to_register.append(chosen)
                    for i, day in enumerate(self.user.dinner):
                        if day == self_id:
                            foods_in_day = food_chart[i][2]
                            chosen = choose_food(self.user, foods_in_day)
                            if chosen is not None:
                                foods_to_register.append(chosen)

                    for index, food_to_check in foods_to_register:
                        print("\t->" + str(food_to_check))
                        browser.find_element_by_id("userWeekReserves.selected" + str(index)).click()
                    browser.find_element_by_id("doReservBtn").click()

        except Exception as e:
            return str(e)
            # browser.quit()
        display.stop()
        return None
Example #16
0
def unshort_url(url, parse_documents=False, enable_cookies=None, **kwargs):
    """Try to unshort the given URL (follow http redirects).

    Parameters:
        url (`str`):
            Shortened URL.

        parse_documents (`bool`, *optional*):
            If True, Unalix will also try to obtain the unshortened URL from the
            response's body.

        enable_cookies (`bool`, *optional*):
            True: Unalix will handle cookies for all requests.
            False: Unalix will not handle cookies.
            None (default): Unalix will handle cookies only if needed.

            In most cases, cookies returned in HTTP responses are useless.
            They do not need to be stored or sent back to the server.

            Keeping this as "None" should be enough for you. Only set this parameter
            to True if you get stuck at some redirect loop due to missing cookies.

        **kwargs (`bool`, *optional*):
            Optional arguments that `parse_rules` takes.

    Raises:
        ConnectionError: In case some error occurred during the request.

        TooManyRedirects: In case the request exceeded maximum allowed redirects.

        InvalidURL: In case the provided *url* is not a valid URL or hostname.

        InvalidScheme: In case the provided *url* has a invalid or unknown scheme.

        InvalidContentEncoding: In case the "Content-Enconding" header has a invalid value.

    Usage:
      >>> from unalix import unshort_url
      >>> unshort_url("https://bitly.is/Pricing-Pop-Up")
      'https://bitly.com/pages/pricing'
    """
    url = parse_rules(parse_url(url), **kwargs)

    if enable_cookies is None:
        cookies = CookieJar(policy=allow_cookies_if_needed)
    elif enable_cookies is True:
        cookies = CookieJar(policy=allow_all_cookies)
    else:
        cookies = CookieJar(policy=deny_all_cookies)

    total_redirects = 0

    while True:

        if total_redirects > max_redirects:
            raise TooManyRedirects(
                "The request exceeded maximum allowed redirects", url
            )

        scheme, netloc, path, params, query, fragment = urlparse(url)
        connection = create_connection(scheme, netloc)

        add_missing_attributes(url, connection)

        if query:
            path = f"{path}?{query}"

        cookies.add_cookie_header(connection)

        headers = connection.headers
        headers.update(default_headers)

        try:
            connection.request("GET", path, headers=headers)
            response = connection.getresponse()
        except Exception as exception:
            raise ConnectionError(str(exception), url)

        cookies.extract_cookies(response, connection)

        redirect_url = handle_redirects(url, response)

        if isinstance(redirect_url, str):
            total_redirects = total_redirects + 1
            url = parse_rules(redirect_url, **kwargs)
            continue

        if parse_documents:
            extracted_url = extract_url(url, response)
            if isinstance(extracted_url, str):
                url = parse_rules(extracted_url, **kwargs)
                continue

        break

    if not response.isclosed():
        response.close()

    return url
Example #17
0
class HttpTransport(Transport):
    """
    HTTP transport using urllib2.  Provided basic http transport
    that provides for cookies, proxies but no authentication.
    """
    
    def __init__(self, **kwargs):
        """
        @param kwargs: Keyword arguments.
            - B{proxy} - An http proxy to be specified on requests.
                 The proxy is defined as {protocol:proxy,}
                    - type: I{dict}
                    - default: {}
            - B{timeout} - Set the url open timeout (seconds).
                    - type: I{float}
                    - default: 90
        """
        Transport.__init__(self)
        Unskin(self.options).update(kwargs)
        self.cookiejar = CookieJar()
        self.proxy = {}
        self.urlopener = None
        
    def open(self, request):
        try:
            url = request.url
            log.debug('opening (%s)', url)
            u2request = u2.Request(url)
            self.proxy = self.options.proxy
            return self.u2open(u2request)
        except HTTPError as e:
            raise TransportError(str(e), e.code, e.fp)

    def send(self, request):
        result = None
        url = request.url
        msg = request.message
        headers = request.headers
        try:
            u2request = u2.Request(url, msg, headers)
            self.addcookies(u2request)
            self.proxy = self.options.proxy
            request.headers.update(u2request.headers)
            log.debug('sending:\n%s', request)
            fp = self.u2open(u2request)
            self.getcookies(fp, u2request)
            #result = Reply(200, fp.headers.dict, fp.read())
            #print(str(fp))
            result = Reply(200, fp.headers, fp.read())
            log.debug('received:\n%s', result)
        except HTTPError as e:
            if e.code in (202,204):
                result = None
            else:
                raise TransportError(e.msg, e.code, e.fp)
        return result

    def addcookies(self, u2request):
        """
        Add cookies in the cookiejar to the request.
        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Requet.
        """
        self.cookiejar.add_cookie_header(u2request)
        
    def getcookies(self, fp, u2request):
        """
        Add cookies in the request to the cookiejar.
        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Requet.
        """
        self.cookiejar.extract_cookies(fp, u2request)
        
    def u2open(self, u2request):
        """
        Open a connection.
        @param u2request: A urllib2 request.
        @type u2request: urllib2.Requet.
        @return: The opened file-like urllib2 object.
        @rtype: fp
        """
        tm = self.options.timeout
        url = self.u2opener()
        if self.u2ver() < 2.6:
            socket.setdefaulttimeout(tm)
            return url.open(u2request)
        else:
            return url.open(u2request, timeout=tm)
            
    def u2opener(self):
        """
        Create a urllib opener.
        @return: An opener.
        @rtype: I{OpenerDirector}
        """
        if self.urlopener is None:
            return u2.build_opener(*self.u2handlers())
        else:
            return self.urlopener
        
    def u2handlers(self):
        """
        Get a collection of urllib handlers.
        @return: A list of handlers to be installed in the opener.
        @rtype: [Handler,...]
        """
        handlers = []
        handlers.append(u2.ProxyHandler(self.proxy))
        return handlers
            
    def u2ver(self):
        """
        Get the major/minor version of the urllib2 lib.
        @return: The urllib2 version.
        @rtype: float
        """
        try:
            part = u2.__version__.split('.', 1)
            n = float('.'.join(part))
            return n
        except Exception as e:
            log.exception(e)
            return 0
        
    def __deepcopy__(self, memo={}):
        clone = self.__class__()
        p = Unskin(self.options)
        cp = Unskin(clone.options)
        cp.update(p)
        return clone
Example #18
0
class QuartClient:
    http_connection_class = TestHTTPConnection
    websocket_connection_class = TestWebsocketConnection

    def __init__(self, app: "Quart", use_cookies: bool = True) -> None:
        self.app = app
        self.cookie_jar: Optional[CookieJar]
        if use_cookies:
            self.cookie_jar = CookieJar()
        else:
            self.cookie_jar = None
        self.preserve_context = False
        self.push_promises: List[Tuple[str, Headers]] = []

    async def open(
        self,
        path: str,
        *,
        method: str = "GET",
        headers: Optional[Union[dict, Headers]] = None,
        data: Optional[AnyStr] = None,
        form: Optional[dict] = None,
        query_string: Optional[dict] = None,
        json: Any = sentinel,
        scheme: str = "http",
        follow_redirects: bool = False,
        root_path: str = "",
        http_version: str = "1.1",
    ) -> Response:
        self.push_promises = []
        response = await self._make_request(
            path, method, headers, data, form, query_string, json, scheme, root_path, http_version
        )
        if follow_redirects:
            while response.status_code >= 300 and response.status_code <= 399:
                # Most browsers respond to an HTTP 302 with a GET request to the new location,
                # despite what the HTTP spec says. HTTP 303 should always be responded to with
                # a GET request.
                if response.status_code == 302 or response.status_code == 303:
                    method = "GET"
                response = await self._make_request(
                    response.location,
                    method,
                    headers,
                    data,
                    form,
                    query_string,
                    json,
                    scheme,
                    root_path,
                    http_version,
                )
        if self.preserve_context:
            _request_ctx_stack.push(self.app._preserved_context)
        return response

    def request(
        self,
        path: str,
        *,
        method: str = "GET",
        headers: Optional[Union[dict, Headers]] = None,
        query_string: Optional[dict] = None,
        scheme: str = "http",
        root_path: str = "",
        http_version: str = "1.1",
    ) -> TestHTTPConnection:
        headers, path, query_string_bytes = make_test_headers_path_and_query_string(
            self.app, path, headers, query_string
        )
        scope = self._build_scope(
            "http", path, method, headers, query_string_bytes, scheme, root_path, http_version
        )
        return self.http_connection_class(self.app, scope, _preserve_context=self.preserve_context)

    def websocket(
        self,
        path: str,
        *,
        headers: Optional[Union[dict, Headers]] = None,
        query_string: Optional[dict] = None,
        scheme: str = "ws",
        subprotocols: Optional[List[str]] = None,
        root_path: str = "",
        http_version: str = "1.1",
    ) -> TestWebsocketConnection:
        headers, path, query_string_bytes = make_test_headers_path_and_query_string(
            self.app, path, headers, query_string
        )
        scope = self._build_scope(
            "websocket", path, "GET", headers, query_string_bytes, scheme, root_path, http_version
        )
        return self.websocket_connection_class(self.app, scope)

    async def delete(self, *args: Any, **kwargs: Any) -> Response:
        """Make a DELETE request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="DELETE", **kwargs)

    async def get(self, *args: Any, **kwargs: Any) -> Response:
        """Make a GET request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="GET", **kwargs)

    async def head(self, *args: Any, **kwargs: Any) -> Response:
        """Make a HEAD request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="HEAD", **kwargs)

    async def options(self, *args: Any, **kwargs: Any) -> Response:
        """Make a OPTIONS request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="OPTIONS", **kwargs)

    async def patch(self, *args: Any, **kwargs: Any) -> Response:
        """Make a PATCH request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="PATCH", **kwargs)

    async def post(self, *args: Any, **kwargs: Any) -> Response:
        """Make a POST request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="POST", **kwargs)

    async def put(self, *args: Any, **kwargs: Any) -> Response:
        """Make a PUT request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="PUT", **kwargs)

    async def trace(self, *args: Any, **kwargs: Any) -> Response:
        """Make a TRACE request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="TRACE", **kwargs)

    def set_cookie(
        self,
        server_name: str,
        key: str,
        value: str = "",
        max_age: Optional[Union[int, timedelta]] = None,
        expires: Optional[Union[int, float, datetime]] = None,
        path: str = "/",
        domain: Optional[str] = None,
        secure: bool = False,
        httponly: bool = False,
        samesite: str = None,
        charset: str = "utf-8",
    ) -> None:
        """Set a cookie in the cookie jar.

        The arguments are the standard cookie morsels and this is a
        wrapper around the stdlib SimpleCookie code.
        """
        cookie = dump_cookie(  # type: ignore
            key,
            value=value,
            max_age=max_age,
            expires=expires,
            path=path,
            domain=domain,
            secure=secure,
            httponly=httponly,
            charset=charset,
            samesite=samesite,
        )
        self.cookie_jar.extract_cookies(
            _TestCookieJarResponse(Headers([("set-cookie", cookie)])),  # type: ignore
            U2Request(f"http://{server_name}{path}"),
        )

    def delete_cookie(
        self, server_name: str, key: str, path: str = "/", domain: Optional[str] = None
    ) -> None:
        """Delete a cookie (set to expire immediately)."""
        self.set_cookie(server_name, key, expires=0, max_age=0, path=path, domain=domain)

    @asynccontextmanager
    async def session_transaction(
        self,
        path: str = "/",
        *,
        method: str = "GET",
        headers: Optional[Union[dict, Headers]] = None,
        query_string: Optional[dict] = None,
        scheme: str = "http",
        data: Optional[AnyStr] = None,
        form: Optional[dict] = None,
        json: Any = sentinel,
        root_path: str = "",
        http_version: str = "1.1",
    ) -> AsyncGenerator[Session, None]:
        if self.cookie_jar is None:
            raise RuntimeError("Session transactions only make sense with cookies enabled.")

        if headers is None:
            headers = Headers()
        elif isinstance(headers, Headers):
            headers = headers
        elif headers is not None:
            headers = Headers(headers)
        for cookie in self.cookie_jar:
            headers.add("cookie", f"{cookie.name}={cookie.value}")

        original_request_ctx = _request_ctx_stack.top
        async with self.app.test_request_context(
            path,
            method=method,
            headers=headers,
            query_string=query_string,
            scheme=scheme,
            data=data,
            form=form,
            json=json,
            root_path=root_path,
            http_version=http_version,
        ) as ctx:
            session_interface = self.app.session_interface
            session = await session_interface.open_session(self.app, ctx.request)
            if session is None:
                raise RuntimeError("Error opening the sesion. Check the secret_key?")

            _request_ctx_stack.push(original_request_ctx)
            try:
                yield session
            finally:
                _request_ctx_stack.pop()

            response = self.app.response_class(b"")
            if not session_interface.is_null_session(session):
                await session_interface.save_session(self.app, session, response)
            self.cookie_jar.extract_cookies(
                _TestCookieJarResponse(response.headers),  # type: ignore
                U2Request(ctx.request.url),
            )

    async def __aenter__(self) -> "QuartClient":
        if self.preserve_context:
            raise RuntimeError("Cannot nest client invocations")
        self.preserve_context = True
        return self

    async def __aexit__(self, exc_type: type, exc_value: BaseException, tb: TracebackType) -> None:
        self.preserve_context = False

        while True:
            top = _request_ctx_stack.top

            if top is not None and top.preserved:
                await top.pop(None)
            else:
                break

    async def _make_request(
        self,
        path: str,
        method: str,
        headers: Optional[Union[dict, Headers]],
        data: Optional[AnyStr],
        form: Optional[dict],
        query_string: Optional[dict],
        json: Any,
        scheme: str,
        root_path: str,
        http_version: str,
    ) -> Response:
        headers, path, query_string_bytes = make_test_headers_path_and_query_string(
            self.app, path, headers, query_string
        )
        request_data, body_headers = make_test_body_with_headers(data, form, json, self.app)
        headers.update(**body_headers)  # type: ignore

        if self.cookie_jar is not None:
            for cookie in self.cookie_jar:
                headers.add("cookie", f"{cookie.name}={cookie.value}")

        scope = self._build_scope(
            "http", path, method, headers, query_string_bytes, scheme, root_path, http_version
        )
        async with self.http_connection_class(
            self.app, scope, _preserve_context=self.preserve_context
        ) as connection:
            await connection.send(request_data)
            await connection.send_complete()
        response = await connection.as_response()
        if self.cookie_jar is not None:
            self.cookie_jar.extract_cookies(
                _TestCookieJarResponse(response.headers),  # type: ignore
                U2Request(f"{scheme}://{headers['host']}{path}"),
            )
        self.push_promises.extend(connection.push_promises)
        return response

    def _build_scope(
        self,
        type_: str,
        path: str,
        method: str,
        headers: Headers,
        query_string: bytes,
        scheme: str,
        root_path: str,
        http_version: str,
    ) -> dict:
        scope = {
            "type": type_,
            "http_version": http_version,
            "asgi": {"spec_version": "2.1"},
            "method": method,
            "scheme": scheme,
            "path": path,
            "raw_path": path.encode("ascii"),
            "query_string": query_string,
            "root_path": root_path,
            "headers": encode_headers(headers),
            "_quart._preserve_context": self.preserve_context,
        }
        if type_ == "http" and http_version in {"2", "3"}:
            scope["extensions"] = {"http.response.push": {}}
        elif type_ == "websocket":
            scope["extensions"] = {"websocket.http.response": {}}
        return scope
Example #19
0
class QuartClient:
    """A Client bound to an app for testing.

    This should be used to make requests and receive responses from
    the app for testing purposes. This is best used via
    :attr:`~quart.app.Quart.test_client` method.
    """
    def __init__(self, app: "Quart", use_cookies: bool = True) -> None:
        self.cookie_jar: Optional[CookieJar]
        if use_cookies:
            self.cookie_jar = CookieJar()
        else:
            self.cookie_jar = None
        self.app = app
        self.push_promises: List[Tuple[str, Headers]] = []
        self.preserve_context = False

    async def open(
        self,
        path: str,
        *,
        method: str = "GET",
        headers: Optional[Union[dict, Headers]] = None,
        data: Optional[AnyStr] = None,
        form: Optional[dict] = None,
        query_string: Optional[dict] = None,
        json: Any = sentinel,
        scheme: str = "http",
        follow_redirects: bool = False,
        root_path: str = "",
        http_version: str = "1.1",
    ) -> Response:
        """Open a request to the app associated with this client.

        Arguments:
            path:
                The path to request. If the query_string argument is not
                defined this argument will be partitioned on a '?' with the
                following part being considered the query_string.
            method:
                The method to make the request with, defaults to 'GET'.
            headers:
                Headers to include in the request.
            data:
                Raw data to send in the request body.
            form:
                Data to send form encoded in the request body.
            query_string:
                To send as a dictionary, alternatively the query_string can be
                determined from the path.
            json:
                Data to send json encoded in the request body.
            scheme:
                The scheme to use in the request, default http.
            follow_redirects:
                Whether or not a redirect response should be followed, defaults
                to False.

        Returns:
            The response from the app handling the request.
        """
        response = await self._make_request(path, method, headers, data, form,
                                            query_string, json, scheme,
                                            root_path, http_version)
        if follow_redirects:
            while response.status_code >= 300 and response.status_code <= 399:
                # Most browsers respond to an HTTP 302 with a GET request to the new location,
                # despite what the HTTP spec says. HTTP 303 should always be responded to with
                # a GET request.
                if response.status_code == 302 or response.status_code == 303:
                    method = "GET"
                response = await self._make_request(
                    response.location,
                    method,
                    headers,
                    data,
                    form,
                    query_string,
                    json,
                    scheme,
                    root_path,
                    http_version,
                )
        return response

    async def _make_request(
        self,
        path: str,
        method: str = "GET",
        headers: Optional[Union[dict, Headers]] = None,
        data: Optional[AnyStr] = None,
        form: Optional[dict] = None,
        query_string: Optional[dict] = None,
        json: Any = sentinel,
        scheme: str = "http",
        root_path: str = "",
        http_version: str = "1.1",
    ) -> Response:
        headers, path, query_string_bytes = make_test_headers_path_and_query_string(
            self.app, path, headers, query_string)

        request_data, body_headers = make_test_body_with_headers(
            data, form, json, self.app)
        # Replace with headers.update(**body_headers) when Werkzeug
        # supports https://github.com/pallets/werkzeug/pull/1687
        for key, value in body_headers.items():
            headers[key] = value

        if self.cookie_jar is not None:
            for cookie in self.cookie_jar:
                headers.add("cookie", f"{cookie.name}={cookie.value}")

        request = self.app.request_class(
            method,
            scheme,
            path,
            query_string_bytes,
            headers,
            root_path,
            http_version,
            send_push_promise=self._send_push_promise,
        )
        request.body.set_result(request_data)
        response = await self._handle_request(request)
        if self.cookie_jar is not None:
            self.cookie_jar.extract_cookies(
                _TestCookieJarResponse(response.headers),  # type: ignore
                U2Request(request.url),
            )
        return response

    async def _handle_request(self, request: Request) -> Response:
        # In order to preserve the context it must be copied from the
        # inner task.
        async def _inner() -> Tuple[Response, RequestContext]:
            response = await self.app.handle_request(
                request, _preserve=self.preserve_context)
            ctx = None
            if self.preserve_context:
                ctx = _request_ctx_stack.top.copy()
            return (response, ctx)

        response, ctx = await asyncio.ensure_future(_inner())
        if self.preserve_context:
            _request_ctx_stack.push(ctx)
        return response

    async def _send_push_promise(self, path: str, headers: Headers) -> None:
        self.push_promises.append((path, headers))

    async def delete(self, *args: Any, **kwargs: Any) -> Response:
        """Make a DELETE request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="DELETE", **kwargs)

    async def get(self, *args: Any, **kwargs: Any) -> Response:
        """Make a GET request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="GET", **kwargs)

    async def head(self, *args: Any, **kwargs: Any) -> Response:
        """Make a HEAD request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="HEAD", **kwargs)

    async def options(self, *args: Any, **kwargs: Any) -> Response:
        """Make a OPTIONS request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="OPTIONS", **kwargs)

    async def patch(self, *args: Any, **kwargs: Any) -> Response:
        """Make a PATCH request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="PATCH", **kwargs)

    async def post(self, *args: Any, **kwargs: Any) -> Response:
        """Make a POST request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="POST", **kwargs)

    async def put(self, *args: Any, **kwargs: Any) -> Response:
        """Make a PUT request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="PUT", **kwargs)

    async def trace(self, *args: Any, **kwargs: Any) -> Response:
        """Make a TRACE request.

        See :meth:`~quart.testing.QuartClient.open` for argument
        details.
        """
        return await self.open(*args, method="TRACE", **kwargs)

    def set_cookie(
        self,
        server_name: str,
        key: str,
        value: str = "",
        max_age: Optional[Union[int, timedelta]] = None,
        expires: Optional[Union[int, float, datetime]] = None,
        path: str = "/",
        domain: Optional[str] = None,
        secure: bool = False,
        httponly: bool = False,
        samesite: str = None,
        charset: str = "utf-8",
    ) -> None:
        """Set a cookie in the cookie jar.

        The arguments are the standard cookie morsels and this is a
        wrapper around the stdlib SimpleCookie code.
        """
        cookie = dump_cookie(  # type: ignore
            key,
            value=value,
            max_age=max_age,
            expires=expires,
            path=path,
            domain=domain,
            secure=secure,
            httponly=httponly,
            charset=charset,
            samesite=samesite,
        )
        self.cookie_jar.extract_cookies(
            _TestCookieJarResponse(Headers([("set-cookie", cookie)
                                            ])),  # type: ignore
            U2Request(f"http://{server_name}{path}"),
        )

    def delete_cookie(self,
                      server_name: str,
                      key: str,
                      path: str = "/",
                      domain: Optional[str] = None) -> None:
        """Delete a cookie (set to expire immediately)."""
        self.set_cookie(server_name,
                        key,
                        expires=0,
                        max_age=0,
                        path=path,
                        domain=domain)

    @asynccontextmanager
    async def websocket(
        self,
        path: str,
        *,
        headers: Optional[Union[dict, Headers]] = None,
        query_string: Optional[dict] = None,
        scheme: str = "ws",
        subprotocols: Optional[List[str]] = None,
        root_path: str = "",
        http_version: str = "1.1",
    ) -> AsyncGenerator[_TestingWebsocket, None]:
        headers, path, query_string_bytes = make_test_headers_path_and_query_string(
            self.app, path, headers, query_string)

        if self.cookie_jar is not None:
            for cookie in self.cookie_jar:
                headers.add("cookie", f"{cookie.name}={cookie.value}")

        queue: asyncio.Queue = asyncio.Queue()
        websocket_client = _TestingWebsocket(queue)

        subprotocols = subprotocols or []
        websocket = self.app.websocket_class(
            path,
            query_string_bytes,
            scheme,
            headers,
            root_path,
            http_version,
            subprotocols,
            queue.get,
            websocket_client.local_queue.put,
            websocket_client.accept,
        )
        adapter = self.app.create_url_adapter(websocket)
        try:
            adapter.match()
        except WBadRequest:
            raise BadRequest()

        websocket_client.task = asyncio.ensure_future(
            self.app.handle_websocket(websocket,
                                      _preserve=self.preserve_context))

        try:
            yield websocket_client
        finally:
            websocket_client.task.cancel()
            try:
                await websocket_client.task
            except asyncio.CancelledError:
                pass

    @asynccontextmanager
    async def session_transaction(
        self,
        path: str = "/",
        *,
        method: str = "GET",
        headers: Optional[Union[dict, Headers]] = None,
        query_string: Optional[dict] = None,
        scheme: str = "http",
        data: Optional[AnyStr] = None,
        form: Optional[dict] = None,
        json: Any = sentinel,
        root_path: str = "",
        http_version: str = "1.1",
    ) -> AsyncGenerator[Session, None]:
        if self.cookie_jar is None:
            raise RuntimeError(
                "Session transactions only make sense with cookies enabled.")

        headers, path, query_string_bytes = make_test_headers_path_and_query_string(
            self.app, path, headers, query_string)
        request_body, body_headers = make_test_body_with_headers(
            data, form, json)
        headers.update(**body_headers)  # type: ignore

        if self.cookie_jar is not None:
            for cookie in self.cookie_jar:
                headers.add("cookie", f"{cookie.name}={cookie.value}")

        request = self.app.request_class(
            method,
            scheme,
            path,
            query_string_bytes,
            headers,
            root_path,
            http_version,
            send_push_promise=self._send_push_promise,
        )
        request.body.set_result(request_body)

        original_request_ctx = _request_ctx_stack.top
        async with self.app.request_context(request) as ctx:  # type: ignore
            session_interface = self.app.session_interface
            session = await session_interface.open_session(
                self.app, ctx.request)
            if session is None:
                raise RuntimeError(
                    "Error opening the sesion. Check the secret_key?")

            _request_ctx_stack.push(original_request_ctx)
            try:
                yield session
            finally:
                _request_ctx_stack.pop()

            response = self.app.response_class(b"")
            if not session_interface.is_null_session(session):
                await session_interface.save_session(self.app, session,
                                                     response)
            self.cookie_jar.extract_cookies(
                _TestCookieJarResponse(response.headers),  # type: ignore
                U2Request(ctx.request.url),
            )

    async def __aenter__(self) -> "QuartClient":
        if self.preserve_context:
            raise RuntimeError("Cannot nest client invocations")
        self.preserve_context = True
        return self

    async def __aexit__(self, exc_type: type, exc_value: BaseException,
                        tb: TracebackType) -> None:
        self.preserve_context = False

        while True:
            top = _request_ctx_stack.top

            if top is not None and top.preserved:
                await top.pop(None)
            else:
                break
Example #20
0
class Session:
    def __init__(self, base, auth=None):
        self.auth = auth
        self.headers = {'User-Agent': 'foobar'}
        self.context = init_ssl()
        self.jar = CookieJar()
        split = urlsplit(base)
        self.base = '{}://{}'.format(split.scheme, split.netloc)

        if self.auth:
            auth = ':'.join(self.auth)
            if sys.version_info >= (3,):
                basic = base64.b64encode(auth.encode('ascii')).decode('ascii')
            else:
                basic = base64.b64encode(auth)
            self.headers['Authorization'] = 'Basic {}'.format(basic)

        self._get_crumb()

    def _get_crumb(self):
        """
        Get the necessary crumb header if our Jenkins instance is CSRF
        protected, and automatically add it to this session's default headers.
        """
        try:
            args = 'xpath=concat(//crumbRequestField,":",//crumb)'
            resp = self.get_url(self.base + '/crumbIssuer/api/xml?' + args)
        except HTTPError as err:
            # only ignore the error if it's a 404 (i.e. Jenkins is not CSRF
            # protected)
            if err.code != 404:
                raise
        else:
            key, value = resp.text.split(':')
            self.headers[key] = value

    def get_url(self, url, data=None, stream=False, retries=5):
        headers = self.headers.copy()
        if data is not None:
            data = urlencode(data).encode('utf-8')
            headers['Content-Type'] = 'application/x-www-form-urlencoded'
            retries = 1  # do not retry POSTs
        req = Request(url, data, headers=headers)
        self.jar.add_cookie_header(req)
        for i in range(retries):  # pragma: nocover
            try:
                response = urlopen(req, context=self.context)
            except HTTPError:
                if i == retries - 1:
                    raise
                time.sleep(0.1)
            else:
                break
        self.jar.extract_cookies(response, req)
        if sys.version_info >= (3,):
            response.headers = CaseInsensitiveDict(response.headers._headers)
        else:
            response.headers = CaseInsensitiveDict(response.headers.dict)
        if stream:
            return stream_response(response)
        else:
            response.text = response.read().decode('utf-8')
            return response

    def get_job_params(self, url):
        """
        Get the list of allowed parameters and their respective choices.
        """
        url = url.rstrip('/') + '/api/json'
        response = self.get_url(url)
        response = json.loads(response.text)
        props = response.get('property', [])
        definition_prop = 'hudson.model.ParametersDefinitionProperty'
        defs = next(
            (
                p['parameterDefinitions']
                for p in props
                if p.get('_class', '') == definition_prop
            ),
            [],
        )
        if not defs:
            return {}

        params = {}
        for definition in defs:
            params[definition['name']] = definition.get('choices', None)
        return params

    def launch_build(self, url, params=None):
        """
        Submit job and return the queue item location.
        """
        url = url.rstrip('/') + '/'
        job_params = self.get_job_params(url)
        validate_params(job_params, params)

        url += 'buildWithParameters' if job_params else 'build'
        url += '?delay=0'
        log('Sending build request')
        data = params or ""  # urllib will send a POST with an empty string
        response = self.get_url(url, data=data)

        assert (
            'Location' in response.headers
        ), 'Something went wrong with the Jenkins API'
        location = response.headers['Location']

        assert 'queue' in location, 'Something went wrong with the Jenkins API'
        return location

    def get_queue_status(self, location):
        """
        Check the status of a queue item. Returns the build url if the job is
        already executing, or None if it's still in the queue.
        """
        queue = location.rstrip('/') + '/api/json'
        response = self.get_url(queue)
        response = json.loads(response.text)
        if response.get('cancelled', False):
            raise RuntimeError('Build was cancelled')
        if response.get('executable', False):
            return response['executable']['url']
        return None

    @deprecate(instead='wait_queue')
    def wait_queue_item(self, *args, **kwargs):
        pass

    def wait_queue(self, location, interval=5.0):
        """
        Wait until the item starts building.
        """
        while True:
            job_url = self.get_queue_status(location)
            if job_url is not None:
                break
            show_progress('Job queued', interval)
        log('')
        return job_url

    @deprecate(instead='job_status')
    def get_job_status(self, *args, **kwargs):
        pass

    def job_status(self, build_url):
        """
        Check the status of a running build.

        Returns a tuple with the status of the build and the current stage.
        The status is True on successful exit, False on failure or None if the
        build is still running.
        """
        poll_url = build_url.rstrip('/') + '/wfapi/describe'
        try:
            response = self.get_url(poll_url)
        except HTTPError as error:
            if error.code == 404:
                build_number = build_url.rstrip('/').rpartition('/')[2]
                error.msg = 'Build #%s does not exist' % build_number
            raise
        response = json.loads(response.text)

        status = response.get('status', '')
        stages = response.get('stages', [{}])
        if status == 'NOT_EXECUTED':
            if response.get('durationMillis', 0) == 0:
                # Build has just been launched. Report it as in_progress
                return None, {}
            # Build finished as not_executed. Probably an in your Jenkinsfile
            return False, stages[-1]
        elif status == 'IN_PROGRESS':
            in_progress = [
                s for s in stages if s.get('status', '') == 'IN_PROGRESS'
            ]
            in_progress = in_progress or [{}]
            return None, in_progress[0]
        else:
            # Jenkins returns false negatives in the 'status' field sometimes.
            # Instead of trusting 'status', we will determine if the build
            # failed by checking if any of the stages failed.
            last = stages[-1]
            status = all(
                s.get('status', '') in ('SUCCESS', 'NOT_EXECUTED')
                for s in stages
            )
            return status, last

    @deprecate(instead='wait_job')
    def wait_for_job(self, *args, **kwargs):
        pass

    def wait_job(self, build_url, interval=5.0):
        """
        Wait until the build finishes.
        """
        name = '#' + build_url.rstrip('/').split('/')[-1]
        last_stage = None
        while True:
            status, stage = self.job_status(build_url)
            if status is not None:
                status_name = 'SUCCESS' if status else 'FAILURE'
                log('\nJob', name, 'ended in', status_name)
                return status

            stage_name = stage.get('name', '')
            msg = stage_name or 'Build %s in progress' % name
            millis = stage.get('durationMillis', None)
            if stage_name != last_stage:
                last_stage = stage_name
                msg = '\n' + msg
            show_progress(msg, interval, millis=millis)

    def retrieve_log(self, build_url):
        """
        Get the build log and return it as a string.
        """
        build_url = build_url.rstrip('/') + '/'
        url = build_url + 'consoleText'
        log = ''.join(
            block.text.decode('utf-8', errors='ignore')
            for block in self.get_url(url, stream=True)
        )
        return log

    @deprecate(instead='dump_log')
    def save_log_to_file(self, *args, **kwargs):
        pass

    def dump_log(self, build_url, filename=None):
        """
        Save the build log to a file.
        """
        build_url = build_url.rstrip('/') + '/'
        if filename:
            file = filename
        elif CONFIG['output'] and CONFIG['output'] is not True:
            file = CONFIG['output']
        else:
            job_name = build_url[build_url.find('/job/') :]
            job_name = (
                job_name.replace('/', '_').replace('_job_', '_').strip('_')
            )
            file = job_name + '.txt'

        isfile = hasattr(file, 'write')
        if not isfile:
            file = io.open(file, 'w', encoding='utf-8')
        file.write(self.retrieve_log(build_url))

        if not isfile:
            file.close()
            log('Job output saved to', file)
Example #21
0
class HttpTransport(Transport):
    """
    HTTP transport using urllib2.  Provided basic http transport
    that provides for cookies, proxies but no authentication.
    """
    def __init__(self, **kwargs):
        """
        @param kwargs: Keyword arguments.
            - B{proxy} - An http proxy to be specified on requests.
                 The proxy is defined as {protocol:proxy,}
                    - type: I{dict}
                    - default: {}
            - B{timeout} - Set the url open timeout (seconds).
                    - type: I{float}
                    - default: 90
        """
        Transport.__init__(self)
        Unskin(self.options).update(kwargs)
        self.cookiejar = CookieJar()
        self.proxy = {}
        self.urlopener = None

    def open(self, request):
        try:
            url = request.url
            log.debug('opening (%s)', url)
            u2request = urllib.request.Request(url)
            self.proxy = self.options.proxy
            return self.u2open(u2request)
        except urllib.error.HTTPError as e:
            raise TransportError(str(e), e.code, e.fp)

    def send(self, request):
        result = None
        url = request.url
        msg = request.message
        headers = request.headers
        try:
            u2request = urllib.request.Request(url, msg, headers)
            self.addcookies(u2request)
            self.proxy = self.options.proxy
            request.headers.update(u2request.headers)
            log.debug('sending:\n%s', request)
            fp = self.u2open(u2request)
            self.getcookies(fp, u2request)
            result = Reply(200, fp.headers.dict, fp.read())
            log.debug('received:\n%s', result)
        except urllib.error.HTTPError as e:
            if e.code in (202, 204):
                result = None
            else:
                raise TransportError(e.msg, e.code, e.fp)
        return result

    def addcookies(self, u2request):
        """
        Add cookies in the cookiejar to the request.
        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Requet.
        """
        self.cookiejar.add_cookie_header(u2request)

    def getcookies(self, fp, u2request):
        """
        Add cookies in the request to the cookiejar.
        @param u2request: A urllib2 request.
        @rtype: u2request: urllib2.Requet.
        """
        self.cookiejar.extract_cookies(fp, u2request)

    def u2open(self, u2request):
        """
        Open a connection.
        @param u2request: A urllib2 request.
        @type u2request: urllib2.Requet.
        @return: The opened file-like urllib2 object.
        @rtype: fp
        """
        tm = self.options.timeout
        url = self.u2opener()
        if self.u2ver() < 2.6:
            socket.setdefaulttimeout(tm)
            return url.open(u2request)
        else:
            return url.open(u2request, timeout=tm)

    def u2opener(self):
        """
        Create a urllib opener.
        @return: An opener.
        @rtype: I{OpenerDirector}
        """
        if self.urlopener is None:
            return urllib.request.build_opener(*self.u2handlers())
        else:
            return self.urlopener

    def u2handlers(self):
        """
        Get a collection of urllib handlers.
        @return: A list of handlers to be installed in the opener.
        @rtype: [Handler,...]
        """
        handlers = []
        handlers.append(urllib.request.ProxyHandler(self.proxy))
        return handlers

    def u2ver(self):
        """
        Get the major/minor version of the urllib2 lib.
        @return: The urllib2 version.
        @rtype: float
        """
        try:
            part = urllib.request.__version__.split('.', 1)
            n = float('.'.join(part))
            return n
        except Exception as e:
            log.exception(e)
            return 0

    def __deepcopy__(self, memo={}):
        clone = self.__class__()
        p = Unskin(self.options)
        cp = Unskin(clone.options)
        cp.update(p)
        return clone
Example #22
0
class HttpClient(ObjectWithLogger):
    """An HTTP client."""

    ATTEMPTS = 10
    LOGGER_NAME = 'HTTP'
    SLEEP_TIME = 1  # s
    TIMEOUT = 3  # s

    DEFAULT_ACCEPT_HEADERS = {
        'Accept': '*/*;q=0.1',
        'Accept-Charset': 'utf-8;q=1.0, *;q=0.1',
        'Accept-Encoding': 'gzip, br, deflate;q=1.0, *;q=0.5',
        'Accept-Language': 'en-US, en;q=1.0, *;q=0.5'
    }
    DEFAULT_ACCESS_CONTROL_REQUEST_HEADERS = {
        'Access-Control-Request-Headers':
        'Accept, Accept-Charset, Accept-Encoding, Accept-Language,'
        ' Connection, Content-Type, DNT',
        'Access-Control-Request-Method':
        'GET, HEAD, OPTIONS'
    }
    DEFAULT_HEADERS = {
        'Connection': 'keep-alive',
        'DNT': '1',
        'Upgrade-Insecure-Requests': '1'
    }
    DEFAULT_HEAD_HEADERS = dict(**DEFAULT_HEADERS, **DEFAULT_ACCEPT_HEADERS)
    DEFAULT_OPTIONS_HEADERS = dict(**DEFAULT_HEADERS,
                                   **DEFAULT_ACCESS_CONTROL_REQUEST_HEADERS)

    isHeadAllowed = None
    isOptionsAllowed = None

    def __init__(self):
        """Initialize an HttpClient instance."""
        super().__init__()

        self.cookieJar = CookieJar()

    @contextmanager
    def connect(self, server, protocol='https', attempts=ATTEMPTS):
        """Connect to an HTTP server."""
        if not can_connect_to_internet():
            self.logError('No Internet connection.')

            raise ExitException()

        self.url = urlsplit(urlunsplit((protocol, server, '', '', '')))

        if self.url.scheme == 'https':
            connectionClass = HTTPSConnection
            port = HTTPS_PORT
        elif self.url.scheme == 'http':
            connectionClass = HTTPConnection
            port = HTTP_PORT
        else:
            self.logError('Unknown protocol.')

            raise ExitException()

        while attempts:
            try:
                self.connection = connectionClass(self.url.netloc,
                                                  port=port,
                                                  timeout=self.TIMEOUT)

                self.logInfo('Connecting to ' + self.url.netloc + '...')
                self.connection.connect()
                self.logInfo('...connected; checking server options...')
                self.requestServerOptions()
                self.logInfo('...connection negotiated; yielding...')

                yield self.connection
            except gaierror as error:
                self.logDebug('...error getting address info; error code ' +
                              str(error.errno) + ': "' + error.strerror +
                              '"...')
                if error.errno == -2:  # Name or service not known
                    self.logDebug('netloc = ' + str(self.url.netloc))
                attempts = 1
            except HTTPException:
                raise
            except HttpRedirect as redirect:
                self.logInfo('...following redirect...')
                with self.connect(redirect.url.netloc, attempts=attempts) \
                        as connection:
                    yield connection
            else:
                return
            finally:
                self.logInfo('...closing connection...')
                self.connection.close()

            attempts -= 1
            if attempts:
                self.logDebug('...retrying (' + str(attempts) +
                              ' more attempts)...')
                sleep(self.SLEEP_TIME)

        self.logError('...couldn\'t connect.')

        raise ExitException()

    def download(self, downloads, baseDownloadPath):
        """TODO."""
        make_directory_if_not_exists(baseDownloadPath)

        for server, files in downloads.items():
            with self.connect(server):
                for file in files:
                    self.downloadFile(file['url'],
                                      baseDownloadPath / file['saveAs'])

    def downloadFile(self,
                     source,
                     destination,
                     attempts=ATTEMPTS,
                     force=False):
        """TODO."""
        self.logInfo('Downloading ' + source + '...')

        if isinstance(source, str):
            self.url = urlsplit(source)
        self.logDebug(self.url)

        doHead = self.isHeadAllowed
        doOptions = self.isOptionsAllowed
        headers = dict(**self.DEFAULT_HEADERS, **self.DEFAULT_ACCEPT_HEADERS)
        resource = urlunsplit(('', '', self.url.path, self.url.query, ''))

        if not force:
            last_modified = get_last_modified_time(destination)
            if last_modified:
                headers['If-Modified-Since'] = last_modified
            # TODO: use ETag
            # etag = etag(destination)
            # if etag:
            #     headers['If-None-Match'] = etag

        while attempts:
            if doOptions:
                with self.requestOptions(resource,
                                         headers=headers) as response:
                    pass

            if doHead:
                with self.requestHead(resource, headers=headers) as response:
                    # TODO: if 'Last-Modified' >
                    if not force and response.status == 304:
                        self.logInfo('...remote file not modified; skipping.')

                        return

                    if response.getheader('Content-Length') \
                            == get_file_size(destination):
                        self.logInfo('...file already exists; skipping.')

                        return

            try:
                with self.request(resource, headers=headers) as response:
                    if response.status in REDIRECT_STATUS_CODES:
                        raise HttpRedirect(response.status,
                                           response.getheader('Location'))
                    elif not force and response.status == 304:
                        self.logInfo('...remote file not modified; skipping.')

                        return
                    elif response.status == 200:
                        self.logInfo('...saving as ' + str(destination) +
                                     '...')
                        with destination.open('w+b') as destinationFile:
                            destinationFile.write(response.read())
                            # for chunk in response.iter_content(ONE_MEGABYTE):
                            #     if chunk:
                            #         destinationFile.write(chunk)
                            #         destinationFile.flush()

                        # TODO: store ETag
                        # if response.getheader('ETag'):
                        #     save_etag(response.headers['ETag'])
            except HTTPException as error:
                self.logError(error.args[0])
                self.logError('...HTTP error...')
            except HttpRedirect as redirect:
                if redirect.url.scheme != self.url.scheme \
                        or redirect.url.netloc != self.url.netloc:
                    raise redirect

                self.url = redirect.url
                resource = \
                    urlunsplit(('', '', self.url.path, self.url.query, ''))
                doHead = False

                continue
            else:
                self.logInfo('...' + source + ' downloaded.')

                return

            attempts -= 1
            if attempts:
                self.logDebug('...retrying (' + str(attempts) +
                              ' more attempts)...')
                sleep(self.SLEEP_TIME)

        self.logWarning('...couldn\'t download ' + source + '.')

    @staticmethod
    def formatHeader(header, argument):
        """Format an HTTP header."""
        return format_http_header(header, argument)

    @contextmanager
    def request(self, resource, method=GET, headers=None, messageBody=None):
        """TODO."""
        if headers is None:
            headers = {}
            skipAcceptEncoding = skipHost = False
        else:
            skipAcceptEncoding = 'Accept-Encoding' in headers
            skipHost = 'Host' in headers
        request = Request(urlunsplit(self.url), headers=headers, method=method)
        self.cookieJar.add_cookie_header(request)
        headers = sorted(request.header_items())

        self.logInfo('...starting request...')
        self.logDebug(method + ' ' + resource + ' HTTP/1.1')
        self.connection.putrequest(method,
                                   resource,
                                   skip_host=skipHost,
                                   skip_accept_encoding=skipAcceptEncoding)

        if headers:
            self.logDebug('...sending headers...')
            for header, argument in headers:
                self.logDebug(self.formatHeader(header, argument))
                self.connection.putheader(header, argument)

        if messageBody:
            self.logDebug('...sending message body...')
            # self.logDebug(messageBody)
            self.connection.endheaders(messageBody)
        else:
            self.connection.endheaders()

        self.logDebug('...getting response...')
        with self.connection.getresponse() as response:
            self.logInfo('...response received...')
            self.logDebug(str(response.status) + ' ' + response.reason)
            for header, argument in response.getheaders():
                self.logDebug(self.formatHeader(header, argument))
            # self.logDebug(response.read())

            self.cookieJar.extract_cookies(response, request)

            yield response

    def requestHead(self, resource, headers=None):
        """TODO."""
        if headers is None:
            headers = self.DEFAULT_HEAD_HEADERS

        return self.request(resource, HEAD, headers=headers)

    def requestOptions(self, resource, headers=None):
        """TODO."""
        if headers is None:
            headers = self.DEFAULT_OPTIONS_HEADERS

        return self.request(resource, OPTIONS, headers=headers)

    def requestServerOptions(self, headers=None):
        """TODO."""
        if headers is None:
            headers = self.DEFAULT_HEADERS

        with self.request(SERVER_WIDE_REQUEST_TARGET, OPTIONS,
                          headers=headers) as response:
            if response.status == 200:
                if HEAD in response.getheader('Allow'):
                    self.isHeadAllowed = True
            elif response.status in REDIRECT_STATUS_CODES:
                location = response.getheader('Location')
                if location.split('://')[1] \
                        == self.server + SERVER_WIDE_REQUEST_TARGET:
                    location = location[:-1]

                raise HttpRedirect(response.status, location)
            elif response.status >= 400:
                with self.requestOptions(SERVER_ROOT) as rootOptionsResponse:
                    if rootOptionsResponse.status == 200:
                        self.isOptionsAllowed = True
                    elif rootOptionsResponse.status >= 400:
                        self.isOptionsAllowed = False

                if not self.isOptionsAllowed:
                    with self.requestHead(SERVER_ROOT) as rootHeadResponse:
                        if rootHeadResponse.status == 200:
                            self.isHeadAllowed = True
                        elif rootHeadResponse.status >= 404:
                            self.isHeadAllowed = False