Esempio n. 1
0
def _read_from_url(url):
    """Reads data from *url* with an HTTP *GET*.

    This function supports fetching from resources which use basic HTTP auth as
    laid out by RFC1738 § 3.1. See § 5 for grammar definitions for URLs.

    .. seealso:

       https://www.ietf.org/rfc/rfc1738.txt

    :param url: URL of an HTTP resource
    :type url: ``str``

    :return: data read from resource described by *url*
    :rtype: ``file``-like object
    """
    url, username, password = _strip_basic_auth(url)
    if username is not None and password is not None:
        # case: url contains basic auth creds
        password_mgr = request.HTTPPasswordMgrWithDefaultRealm()
        password_mgr.add_password(None, url, username, password)
        handler = request.HTTPBasicAuthHandler(password_mgr)
        opener = request.build_opener(default_handlers + [handler])
    else:
        opener = default_opener

    return opener.open(url)
Esempio n. 2
0
def pytest_unconfigure(config):
    """
    Called after all tests are completed.
    """
    global enabled, submit_url, commit, branch, environment, username, password

    if not enabled:
        return
    executable = "{}-{}-{}".format(platform.python_implementation(),
                                   platform.python_version(),
                                   platform.system())
    data = [
        x.to_codespeed_dict(commit=commit,
                            branch=branch,
                            environment=environment,
                            executable=executable)
        for x in function_profile_list
    ]

    try:
        json_submit_url = submit_url + 'result/add/json/'

        if username:
            password_mgr = urllib_request.HTTPPasswordMgrWithDefaultRealm()
            password_mgr.add_password(None, json_submit_url, username,
                                      password)
            handler = urllib_request.HTTPBasicAuthHandler(password_mgr)
            opener = urllib_request.build_opener(handler)
        else:
            opener = urllib_request.build_opener()

        # use the opener to fetch a URL
        f = opener.open(json_submit_url,
                        urlencode({
                            'json': json.dumps(data)
                        }).encode('UTF-8'))
        response = f.read()
    except HTTPError as e:
        print('Error while connecting to Codespeed:')
        print('Exception: {}'.format(str(e)))
        fd, name = tempfile.mkstemp(suffix='.html')
        os.close(fd)
        with open(name, 'wb') as f:
            f.write(e.read())
        print('HTTP Response written to {}'.format(name))
        raise e

    if not response.startswith(
            'All result data saved successfully'.encode('UTF-8')):
        print("Unexpected response while connecting to Codespeed:")
        raise ValueError(
            'Unexpected response from Codespeed server: {}'.format(response))
    else:
        print("{} test benchmarks sumbitted.".format(
            len(function_profile_list)))
Esempio n. 3
0
def download_tile(tile, url, pid, version, username, password):

    grass.debug("Download tile: %s" % tile, debug=1)
    local_tile = "NASADEM_HGT_" + str(tile) + ".zip"

    urllib2.urlcleanup()

    remote_tile = str(url) + "/" + version + "/2000.02.11/" + local_tile
    goturl = 1

    try:
        password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
        password_manager.add_password(
            None, "https://urs.earthdata.nasa.gov", username, password
        )

        cookie_jar = CookieJar()

        opener = urllib2.build_opener(
            urllib2.HTTPBasicAuthHandler(password_manager),
            # urllib2.HTTPHandler(debuglevel=1),    # Uncomment these two lines to see
            # urllib2.HTTPSHandler(debuglevel=1),   # details of the requests/responses
            urllib2.HTTPCookieProcessor(cookie_jar),
        )
        urllib2.install_opener(opener)

        request = urllib2.Request(remote_tile)
        response = urllib2.urlopen(request)

        fo = open(local_tile, "w+b")
        fo.write(response.read())
        fo.close
        time.sleep(0.5)
    except:
        goturl = 0
        pass

    return goturl
Esempio n. 4
0
def download_tile(tile, url, pid, srtmv3, one, username, password):

    grass.debug("Download tile: %s" % tile, debug = 1)
    output = tile + '.r.in.srtm.tmp.' + str(pid)
    if srtmv3:
        if one:
            local_tile = str(tile) + '.SRTMGL1.hgt.zip'
        else:
            local_tile = str(tile) + '.SRTMGL3.hgt.zip'
    else:
        local_tile = str(tile) + '.hgt.zip'

    urllib2.urlcleanup()

    if srtmv3:
        remote_tile = str(url) + local_tile
        goturl = 1

        try:
            password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
            password_manager.add_password(None, "https://urs.earthdata.nasa.gov", username, password)

            cookie_jar = CookieJar()

            opener = urllib2.build_opener(
                urllib2.HTTPBasicAuthHandler(password_manager),
                #urllib2.HTTPHandler(debuglevel=1),    # Uncomment these two lines to see
                #urllib2.HTTPSHandler(debuglevel=1),   # details of the requests/responses
                urllib2.HTTPCookieProcessor(cookie_jar))
            urllib2.install_opener(opener)

            request = urllib2.Request(remote_tile)
            response = urllib2.urlopen(request)

            fo = open(local_tile, 'w+b')
            fo.write(response.read())
            fo.close
            time.sleep(0.5)
        except:
            goturl = 0
            pass

        return goturl

    # SRTM subdirs: Africa, Australia, Eurasia, Islands, North_America, South_America
    for srtmdir in ('Africa', 'Australia', 'Eurasia', 'Islands', 'North_America', 'South_America'):
        remote_tile = str(url) + str(srtmdir) + '/' + local_tile
        goturl = 1

        try:
            response = urllib2.urlopen(request)
            fo = open(local_tile, 'w+b')
            fo.write(response.read())
            fo.close
            time.sleep(0.5)
            # does not work:
            #urllib.urlretrieve(remote_tile, local_tile, data = None)
        except:
            goturl = 0
            pass

        if goturl == 1:
            return 1

    return 0
Esempio n. 5
0
    def _open_url(self, url):
        """
        Open an URL 'url' and return the file-like object of the opened URL.
        """
        def _print_warning(timeout):
            """
            This is a small helper function for printing a warning if we cannot
            open the URL for some time.
            """
            _log.warning("failed to open the URL with %d sec timeout, is the "
                         "proxy configured correctly? Keep trying ..." %
                         timeout)

        import socket

        from six.moves import http_client as httplib
        from six.moves.urllib import request as urllib
        from six.moves.urllib.error import URLError

        parsed_url = urlparse.urlparse(url)

        if parsed_url.scheme == "ssh":
            # Unfortunately, urllib2 does not handle "ssh://" URLs
            self._open_url_ssh(parsed_url)
            return

        username = parsed_url.username
        password = parsed_url.password

        if username and password:
            # Unfortunately, in order to handle URLs which contain user name
            # and password (e.g., http://user:[email protected]), we need to
            # do few extra things.
            new_url = list(parsed_url)
            if parsed_url.port:
                new_url[1] = "%s:%s" % (parsed_url.hostname, parsed_url.port)
            else:
                new_url[1] = parsed_url.hostname
            url = urlparse.urlunparse(new_url)

            # Build an URL opener which will do the authentication
            password_manager = urllib.HTTPPasswordMgrWithDefaultRealm()
            password_manager.add_password(None, url, username, password)
            auth_handler = urllib.HTTPBasicAuthHandler(password_manager)
            opener = urllib.build_opener(auth_handler)
        else:
            opener = urllib.build_opener()

        opener.addheaders = [('User-Agent', 'Mozilla/5.0')]
        urllib.install_opener(opener)

        # Open the URL. First try with a short timeout, and print a message
        # which should supposedly give the a clue that something may be going
        # wrong.
        # The overall purpose of this is to improve user experience. For
        # example, if one tries to open a file but did not setup the proxy
        # environment variables propely, there will be a very long delay before
        # the failure message. And it is much nicer to pre-warn the user early
        # about something possibly being wrong.
        for timeout in (10, None):
            try:
                f_obj = opener.open(url, timeout=timeout)
            # Handling the timeout case in Python 2.7
            except socket.timeout as err:
                if timeout is not None:
                    _print_warning(timeout)
                else:
                    raise Error("cannot open URL '%s': %s" % (url, err))
            except URLError as err:
                # Handling the timeout case in Python 2.6
                if timeout is not None and \
                   isinstance(err.reason, socket.timeout):
                    _print_warning(timeout)
                else:
                    raise Error("cannot open URL '%s': %s" % (url, err))
            except (IOError, ValueError, httplib.InvalidURL) as err:
                raise Error("cannot open URL '%s': %s" % (url, err))
            except httplib.BadStatusLine:
                raise Error("cannot open URL '%s': server responds with an "
                            "HTTP status code that we don't understand" % url)

        self.is_url = True
        self._f_objs.append(f_obj)
Esempio n. 6
0
 def url_auth(self, url):
     password_manager = self.password_manager_for_url(url)
     if password_manager is not None:
         return urlrequest.HTTPBasicAuthHandler(password_manager)
Esempio n. 7
0
    def get_handlers(self):
        handlers = []
        if self._verify_cert == False:
            ctx = ssl.create_default_context()
            ctx.check_hostname = False
            ctx.verify_mode = ssl.CERT_NONE
            handler = request.HTTPSHandler(context=ctx)
            handlers.append(handler)

        from urllib.request import HTTPRedirectHandler
        redirect_handler = HTTPRedirectHandler()
        redirect_handler.max_redirections = 30
        redirect_handler.max_repeats = 30
        handlers.append(redirect_handler)
        if self._username and self._password:

            passman = request.HTTPPasswordMgrWithDefaultRealm()
            passman.add_password(None, self._parsed_org_url, self._username,
                                 self._password)
            handlers.append(request.HTTPBasicAuthHandler(passman))
            passman = request.HTTPPasswordMgrWithDefaultRealm()
            passman.add_password(None, self._parsed_org_url, self._username,
                                 self._password)
            handlers.append(request.HTTPDigestAuthHandler(passman))
            if os.name == 'nt':
                try:
                    from arcgis._impl.common._iwa import NtlmSspiAuthHandler, KerberosSspiAuthHandler

                    auth_krb = KerberosSspiAuthHandler()
                    handlers.append(auth_krb)

                    try:
                        auth_NTLM = NtlmSspiAuthHandler()
                        handlers.append(auth_NTLM)
                    except:
                        pass

                except Error as err:
                    _log.error(
                        "winkerberos packages is required for IWA authentication (NTLM and Kerberos)."
                    )
                    _log.error(
                        "Please install it:\n\tconda install winkerberos")
                    _log.error(str(err))
            else:
                _log.error(
                    'The GIS uses Integrated Windows Authentication which is currently only supported on the Windows platform'
                )


        if self._auth == "PKI" or \
           (self.cert_file is not None and self.key_file is not None):
            handlers.append(
                HTTPSClientAuthHandler(self.key_file, self.cert_file))
        elif self._portal_connection and \
             self._portal_connection.cert_file is not None and \
             self._portal_connection.key_file is not None:
            handlers.append(
                HTTPSClientAuthHandler(self._portal_connection.key_file,
                                       self._portal_connection.cert_file))

        cj = cookiejar.CookieJar()

        if self.proxy_host:  # Simple Proxy Support
            from urllib.request import ProxyHandler
            if self.proxy_port is None:
                self.proxy_port = 80
            proxies = {
                "http": "http://%s:%s" % (self.proxy_host, self.proxy_port),
                "https": "https://%s:%s" % (self.proxy_host, self.proxy_port)
            }
            proxy_support = ProxyHandler(proxies)
            handlers.append(proxy_support)

        handlers.append(request.HTTPCookieProcessor(cj))
        return handlers
Esempio n. 8
0
def open_url(url,
             data=None,
             headers=None,
             method=None,
             use_proxy=True,
             force=False,
             last_mod_time=None,
             timeout=10,
             validate_certs=True,
             url_username=None,
             url_password=None,
             http_agent=None,
             force_basic_auth=False,
             follow_redirects='urllib2',
             client_cert=None,
             client_key=None,
             cookies=None):
    '''
    Sends a request via HTTP(S) or FTP using urllib2 (Python2) or urllib (Python3)

    Does not require the module environment
    '''
    handlers = []
    ssl_handler = maybe_add_ssl_handler(url, validate_certs)
    if ssl_handler:
        handlers.append(ssl_handler)

    parsed = generic_urlparse(urlparse(url))
    if parsed.scheme != 'ftp':
        username = url_username

        if headers is None:
            headers = {}

        if username:
            password = url_password
            netloc = parsed.netloc
        elif '@' in parsed.netloc:
            credentials, netloc = parsed.netloc.split('@', 1)
            if ':' in credentials:
                username, password = credentials.split(':', 1)
            else:
                username = credentials
                password = ''

            parsed_list = parsed.as_list()
            parsed_list[1] = netloc

            # reconstruct url without credentials
            url = urlunparse(parsed_list)

        if username and not force_basic_auth:
            passman = urllib_request.HTTPPasswordMgrWithDefaultRealm()

            # this creates a password manager
            passman.add_password(None, netloc, username, password)

            # because we have put None at the start it will always
            # use this username/password combination for  urls
            # for which `theurl` is a super-url
            authhandler = urllib_request.HTTPBasicAuthHandler(passman)
            digest_authhandler = urllib_request.HTTPDigestAuthHandler(passman)

            # create the AuthHandler
            handlers.append(authhandler)
            handlers.append(digest_authhandler)

        elif username and force_basic_auth:
            headers["Authorization"] = basic_auth_header(username, password)

        else:
            try:
                rc = netrc.netrc(os.environ.get('NETRC'))
                login = rc.authenticators(parsed.hostname)
            except IOError:
                login = None

            if login:
                username, _, password = login
                if username and password:
                    headers["Authorization"] = basic_auth_header(
                        username, password)

    if not use_proxy:
        proxyhandler = urllib_request.ProxyHandler({})
        handlers.append(proxyhandler)

    if HAS_SSLCONTEXT and not validate_certs:
        # In 2.7.9, the default context validates certificates
        context = SSLContext(ssl.PROTOCOL_SSLv23)
        context.options |= ssl.OP_NO_SSLv2
        context.options |= ssl.OP_NO_SSLv3
        context.verify_mode = ssl.CERT_NONE
        context.check_hostname = False
        handlers.append(
            HTTPSClientAuthHandler(client_cert=client_cert,
                                   client_key=client_key,
                                   context=context))
    elif client_cert:
        handlers.append(
            HTTPSClientAuthHandler(client_cert=client_cert,
                                   client_key=client_key))

    # pre-2.6 versions of python cannot use the custom https
    # handler, since the socket class is lacking create_connection.
    # Some python builds lack HTTPS support.
    if hasattr(socket, 'create_connection') and CustomHTTPSHandler:
        handlers.append(CustomHTTPSHandler)

    handlers.append(RedirectHandlerFactory(follow_redirects, validate_certs))

    # add some nicer cookie handling
    if cookies is not None:
        handlers.append(urllib_request.HTTPCookieProcessor(cookies))

    opener = urllib_request.build_opener(*handlers)
    urllib_request.install_opener(opener)

    data = to_bytes(data, nonstring='passthru')
    if method:
        if method.upper() not in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
                                  'DELETE', 'TRACE', 'CONNECT', 'PATCH'):
            raise ConnectionError('invalid HTTP request method; %s' %
                                  method.upper())
        request = RequestWithMethod(url, method.upper(), data)
    else:
        request = urllib_request.Request(url, data)

    # add the custom agent header, to help prevent issues
    # with sites that block the default urllib agent string
    if http_agent:
        request.add_header('User-agent', http_agent)

    # Cache control
    # Either we directly force a cache refresh
    if force:
        request.add_header('cache-control', 'no-cache')
    # or we do it if the original is more recent than our copy
    elif last_mod_time:
        tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000')
        request.add_header('If-Modified-Since', tstamp)

    # user defined headers now, which may override things we've set above
    if headers:
        if not isinstance(headers, dict):
            raise ValueError("headers provided to fetch_url() must be a dict")
        for header in headers:
            request.add_header(header, headers[header])

    urlopen_args = [request, None]
    if sys.version_info >= (2, 6, 0):
        # urlopen in python prior to 2.6.0 did not
        # have a timeout parameter
        urlopen_args.append(timeout)

    r = urllib_request.urlopen(*urlopen_args)
    return r