def _get_opener(self):
     if not self.opener:
         if (CONF.dashboard.disable_ssl_certificate_validation and
            self._ssl_default_context_supported()):
             ctx = ssl.create_default_context()
             ctx.check_hostname = False
             ctx.verify_mode = ssl.CERT_NONE
             self.opener = request.build_opener(
                 request.HTTPSHandler(context=ctx),
                 request.HTTPCookieProcessor())
         else:
             self.opener = request.build_opener(
                 request.HTTPCookieProcessor())
     return self.opener
예제 #2
0
    def _set_cookies(self, src):
        '''
        function that returns a urllib2 opener for retrieving data from *src*

        input:
            *src* : 'asos' or 'wunderground' or 'wunder_nonairport'
        '''
        jar = http_cookiejar.CookieJar()
        handler = request.HTTPCookieProcessor(jar)
        opener = request.build_opener(handler)
        try:
            if src.lower() == 'wunderground':
                url1 = 'http://www.wunderground.com/history/airport/%s/2011/12/4/DailyHistory.html?' % self.sta_id
                url2 = 'http://www.wunderground.com/cgi-bin/findweather/getForecast?setpref=SHOWMETAR&value=1'
                url3 = 'http://www.wunderground.com/history/airport/%s/2011/12/4/DailyHistory.html?&&theprefset=SHOWMETAR&theprefvalue=1&format=1' % self.sta_id

                opener.open(url1)
                opener.open(url2)
                opener.open(url3)

            elif src.lower() == 'asos':
                url = 'ftp://ftp.ncdc.noaa.gov/pub/data/asos-fivemin/'
                opener.open(url)

            elif src.lower() == 'wunder_nonairport':
                url = 'http://www.wunderground.com/weatherstation/WXDailyHistory.asp?ID=MEGKO3&day=1&year=2013&month=1&graphspan=day&format=1'
                opener.open(url)

        except error.URLError:
            print(('connection to %s not available. working locally' % src))

        return opener
예제 #3
0
    def user_login(self, username, password):
        self.opener = request.build_opener(request.HTTPCookieProcessor())
        response = self.opener.open(CONF.dashboard.dashboard_url).read()

        # Grab the CSRF token and default region
        parser = HorizonHTMLParser()
        parser.feed(response)

        # construct login url for dashboard, discovery accommodates non-/ web
        # root for dashboard
        login_url = parse.urljoin(CONF.dashboard.dashboard_url, parser.login)

        # Prepare login form request
        req = request.Request(login_url)
        req.add_header('Content-type', 'application/x-www-form-urlencoded')
        req.add_header('Referer', CONF.dashboard.dashboard_url)

        # Pass the default domain name regardless of the auth version in order
        # to test the scenario of when horizon is running with keystone v3
        params = {
            'username': username,
            'password': password,
            'region': parser.region,
            'domain': CONF.auth.default_credentials_domain_name,
            'csrfmiddlewaretoken': parser.csrf_token
        }
        self.opener.open(req, parse.urlencode(params))
예제 #4
0
    def opener(self, purl, puser=None, ppass=None, pscheme=('http', 'https')):
        if REDIRECT:
            h1, h2 = REDIRECT.BindableHTTPHandler, REDIRECT.BindableHTTPSHandler
        else:
            h1, h2 = request.HTTPHandler, request.HTTPSHandler

        handlers = [h1(), h2(), request.HTTPCookieProcessor(self.cookies)]

        if self.pref != 'noproxy' and purl and len(purl) > 1:
            #if not noproxy and a proxy url is provided (and its not the placeholder url ie noproxy=':') add a handler
            handlers += [
                request.ProxyHandler({ps: purl
                                      for ps in pscheme}),
            ]
            #handlers += [request.ProxyHandler({ps:purl}) for ps in pscheme]

            if puser and ppass:
                #if proxy user/pass provided and a proxy auth handler
                pm = request.HTTPPasswordMgrWithDefaultRealm()
                pm.add_password(None, purl, puser, ppass)
                handlers += [
                    request.ProxyBasicAuthHandler(pm),
                ]

        return request.build_opener(*handlers)
예제 #5
0
    def do_call(self,
                url,
                data=None,
                method=None,
                calltimeout=constants.SOCKET_TIMEOUT):
        """Send requests to server.

        Send HTTPS call, get response in JSON.
        Convert response into Python Object and return it.
        """
        if self.url:
            url = self.url + url
        if "xx/sessions" not in url:
            LOG.debug(
                'Request URL: %(url)s\n'
                'Call Method: %(method)s\n'
                'Request Data: %(data)s\n', {
                    'url': url,
                    'method': method,
                    'data': data
                })
        opener = urlreq.build_opener(urlreq.HTTPCookieProcessor(self.cookie))
        urlreq.install_opener(opener)
        result = None

        try:
            req = urlreq.Request(url, data, self.headers)
            if method:
                req.get_method = lambda: method
            res_temp = urlreq.urlopen(req, timeout=calltimeout)
            res = res_temp.read().decode("utf-8")

            LOG.debug('Response Data: %(res)s.', {'res': res})

        except Exception as err:
            LOG.error(
                _LE('\nBad response from server: %(url)s.'
                    ' Error: %(err)s'), {
                        'url': url,
                        'err': err
                    })
            res = '{"error":{"code":%s,' \
                  '"description":"Connect server error"}}' \
                  % constants.ERROR_CONNECT_TO_SERVER

        try:
            result = jsonutils.loads(res)
        except Exception as err:
            err_msg = (_('JSON transfer error: %s.') % err)
            LOG.error(err_msg)
            raise exception.InvalidInput(reason=err_msg)

        return result
예제 #6
0
파일: http.py 프로젝트: qtacore/QT4S
 def create_session(self):
     if self.use_requests:
         session = requests.Session()
         session.verify = self._ssl_verify
         session.cert = self._ssl_cert
         session.trust_env = False  # let qt4s handle proxying
         cookie_jar = session.cookies
     else:
         from six.moves.urllib import request
         cookie_jar = CookieJar()
         cookie_processor = request.HTTPCookieProcessor(cookie_jar)
         session = request.build_opener(request.ProxyHandler({}),
                                        cookie_processor)
     return session, cookie_jar
예제 #7
0
 def __init__(self, configuration, debug=True):
     super(XMLAPIConnector, self).__init__()
     self.storage_ip = configuration.emc_nas_server
     self.username = configuration.emc_nas_login
     self.password = configuration.emc_nas_password
     self.debug = debug
     self.auth_url = 'https://' + self.storage_ip + '/Login'
     self._url = ('https://' + self.storage_ip +
                  '/servlets/CelerraManagementServices')
     https_handler = url_request.HTTPSHandler()
     cookie_handler = url_request.HTTPCookieProcessor(
         http_cookiejar.CookieJar())
     self.url_opener = url_request.build_opener(https_handler,
                                                cookie_handler)
     self._do_setup()
예제 #8
0
    def _check_cookie_session_persistence(self):
        """Check cookie persistence types by injecting cookies in requests."""

        # Send first request and get cookie from the server's response
        cj = cookielib.CookieJar()
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
        opener.open("http://{0}/".format(self.vip_ip))
        resp = []
        # Send 10 subsequent requests with the cookie inserted in the headers.
        for count in range(10):
            request = urllib2.Request("http://{0}/".format(self.vip_ip))
            cj.add_cookie_header(request)
            response = urllib2.urlopen(request)
            resp.append(response.read())
        self.assertEqual(len(set(resp)), 1, message=resp)
예제 #9
0
    def user_login(self, username, password):
        self.opener = request.build_opener(request.HTTPCookieProcessor())
        response = self.opener.open(CONF.dashboard.dashboard_url).read()

        # Grab the CSRF token and default region
        parser = HorizonHTMLParser()
        parser.feed(response)

        # Prepare login form request
        req = request.Request(CONF.dashboard.login_url)
        req.add_header('Content-type', 'application/x-www-form-urlencoded')
        req.add_header('Referer', CONF.dashboard.dashboard_url)
        params = {
            'username': username,
            'password': password,
            'region': parser.region,
            'csrfmiddlewaretoken': parser.csrf_token
        }
        self.opener.open(req, parse.urlencode(params))
예제 #10
0
    def __init__(self, base_url, endpoint, username, password):
        self.base_url = base_url.strip()
        # remove left '/' if any
        self.endpoint = endpoint.strip().lstrip('/')
        self.username = username.strip()
        self.password = password.strip()

        # set other things
        self.services_link = "%s/%s" % (base_url, endpoint)
        self.services_session_token = None
        self.http_user_agent = 'DrupalComputingAgent'
        self.http_content_type = 'application/json'

        # set cookie handler

        # first, create an opener than has cookie support.
        opener = urllib_request.build_opener(
            urllib_request.HTTPCookieProcessor())
        # then install the opener to request instead of using the default BaseHandler.
        urllib_request.install_opener(opener)
예제 #11
0
 def __init__(self, configuration, debug=True):
     super(XMLAPIConnector, self).__init__()
     self.storage_ip = enas_utils.convert_ipv6_format_if_needed(
         configuration.emc_nas_server)
     self.username = configuration.emc_nas_login
     self.password = configuration.emc_nas_password
     self.debug = debug
     self.auth_url = 'https://' + self.storage_ip + '/Login'
     self._url = 'https://{}/servlets/CelerraManagementServices'.format(
         self.storage_ip)
     context = enas_utils.create_ssl_context(configuration)
     if context:
         https_handler = url_request.HTTPSHandler(context=context)
     else:
         https_handler = url_request.HTTPSHandler()
     cookie_handler = url_request.HTTPCookieProcessor(
         http_cookiejar.CookieJar())
     self.url_opener = url_request.build_opener(https_handler,
                                                cookie_handler)
     self._do_setup()
예제 #12
0
    def call(self, url, data=None, method=None):
        """Send requests to server.

        Send HTTPS call, get response in JSON.
        Convert response into Python Object and return it.
        """
        if "xx/sessions" not in url:
            LOG.debug(
                'Request URL: %(url)s\n'
                'Call Method: %(method)s\n'
                'Request Data: %(data)s\n', {
                    'url': url,
                    'method': method,
                    'data': data
                })
        opener = urlreq.build_opener(urlreq.HTTPCookieProcessor(self.cookie))
        urlreq.install_opener(opener)

        try:
            req = urlreq.Request(url, data, self.headers)
            if method:
                req.get_method = lambda: method
            res_temp = urlreq.urlopen(req, timeout=constants.SOCKET_TIMEOUT)
            res = res_temp.read().decode("utf-8")

            LOG.debug('Response Data: %(res)s.', {'res': res})

        except Exception as err:
            LOG.error(_LE('Bad response from server: %s.') % err)
            raise err

        try:
            res_json = jsonutils.loads(res)
        except Exception as err:
            err_msg = (_('JSON transfer error: %s.') % err)
            LOG.error(err_msg)
            raise exception.InvalidShare(reason=err_msg)

        return res_json
예제 #13
0
def download_tile(tile, url, pid, version, username, password):

    grass.debug("Download tile: %s" % tile, debug=1)
    local_tile = "NASADEM_HGT_" + str(tile) + ".zip"

    urllib2.urlcleanup()

    remote_tile = str(url) + "/" + version + "/2000.02.11/" + local_tile
    goturl = 1

    try:
        password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
        password_manager.add_password(
            None, "https://urs.earthdata.nasa.gov", username, password
        )

        cookie_jar = CookieJar()

        opener = urllib2.build_opener(
            urllib2.HTTPBasicAuthHandler(password_manager),
            # urllib2.HTTPHandler(debuglevel=1),    # Uncomment these two lines to see
            # urllib2.HTTPSHandler(debuglevel=1),   # details of the requests/responses
            urllib2.HTTPCookieProcessor(cookie_jar),
        )
        urllib2.install_opener(opener)

        request = urllib2.Request(remote_tile)
        response = urllib2.urlopen(request)

        fo = open(local_tile, "w+b")
        fo.write(response.read())
        fo.close
        time.sleep(0.5)
    except:
        goturl = 0
        pass

    return goturl
예제 #14
0
def request(url, timeout=None, debug=False):
    """Request the given URL using LIGO.ORG SAML authentication.

    This requires an active Kerberos ticket for the user, to get one:

        >>> from ligo.org import kinit
        >>> kinit('albert.einstein')

    Then request as follows

        >>> from ligo.org import request
        >>> response = request(myurl)
        >>> print(response.read())

    Parameters
    ----------
    url : `str`
        URL path for request
    timeout : `int`, optional, default: no timeout
        number of seconds to wait for server response,
    debug : `bool`, optional, default: `False`
        Query in verbose debugging mode

    Returns
    -------
    response : `file`-like
        the raw response from the URL, probably XML/HTML or JSON

    Examples
    --------
    >>> from ligo.org import request
    >>> response = request('https://ldas-jobs.ligo.caltech.edu/')
    >>> print(response.read())
    """
    # set debug to 1 to see all HTTP(s) traffic
    debug = int(debug)

    # need an instance of HTTPS handler to do HTTPS
    httpshandler = urllib2.HTTPSHandler(debuglevel=debug)

    # use a cookie jar to store session cookies
    jar = http_cookiejar.LWPCookieJar()

    # if a cookie jar exists open it and read the cookies
    # and make sure it has the right permissions
    if os.path.exists(COOKIE_JAR):
        os.chmod(COOKIE_JAR, stat.S_IRUSR | stat.S_IWUSR)
        # set ignore_discard so that session cookies are preserved
        try:
            jar.load(COOKIE_JAR, ignore_discard=True)
        except http_cookiejar.LoadError as e:
            warnings.warn('http_cookiejar.LoadError caught: %s' % str(e))

    # create a cookie handler from the cookie jar
    cookiehandler = urllib2.HTTPCookieProcessor(jar)
    # need a redirect handler to follow redirects
    redirecthandler = urllib2.HTTPRedirectHandler()

    # need an auth handler that can do negotiation.
    # input parameter is the Kerberos service principal.
    auth_handler = HTTPNegotiateAuthHandler(service_principal='HTTP@%s' %
                                            LIGO_LOGIN_URL)

    # create the opener.
    opener = urllib2.build_opener(auth_handler, cookiehandler, httpshandler,
                                  redirecthandler)

    # prepare the request object
    req = urllib2.Request(url)

    # use the opener and the request object to make the request.
    if timeout is None:
        timeout = socket._GLOBAL_DEFAULT_TIMEOUT
    response = opener.open(req, timeout=timeout)

    # save the session cookies to a file so that they can
    # be used again without having to authenticate
    jar.save(COOKIE_JAR, ignore_discard=True)

    return response
예제 #15
0
def download_tile(tile, url, pid, srtmv3, one, username, password):

    grass.debug("Download tile: %s" % tile, debug = 1)
    output = tile + '.r.in.srtm.tmp.' + str(pid)
    if srtmv3:
        if one:
            local_tile = str(tile) + '.SRTMGL1.hgt.zip'
        else:
            local_tile = str(tile) + '.SRTMGL3.hgt.zip'
    else:
        local_tile = str(tile) + '.hgt.zip'

    urllib2.urlcleanup()

    if srtmv3:
        remote_tile = str(url) + local_tile
        goturl = 1

        try:
            password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
            password_manager.add_password(None, "https://urs.earthdata.nasa.gov", username, password)

            cookie_jar = CookieJar()

            opener = urllib2.build_opener(
                urllib2.HTTPBasicAuthHandler(password_manager),
                #urllib2.HTTPHandler(debuglevel=1),    # Uncomment these two lines to see
                #urllib2.HTTPSHandler(debuglevel=1),   # details of the requests/responses
                urllib2.HTTPCookieProcessor(cookie_jar))
            urllib2.install_opener(opener)

            request = urllib2.Request(remote_tile)
            response = urllib2.urlopen(request)

            fo = open(local_tile, 'w+b')
            fo.write(response.read())
            fo.close
            time.sleep(0.5)
        except:
            goturl = 0
            pass

        return goturl

    # SRTM subdirs: Africa, Australia, Eurasia, Islands, North_America, South_America
    for srtmdir in ('Africa', 'Australia', 'Eurasia', 'Islands', 'North_America', 'South_America'):
        remote_tile = str(url) + str(srtmdir) + '/' + local_tile
        goturl = 1

        try:
            response = urllib2.urlopen(request)
            fo = open(local_tile, 'w+b')
            fo.write(response.read())
            fo.close
            time.sleep(0.5)
            # does not work:
            #urllib.urlretrieve(remote_tile, local_tile, data = None)
        except:
            goturl = 0
            pass

        if goturl == 1:
            return 1

    return 0
예제 #16
0
파일: urls.py 프로젝트: alikins/mazer_old
def open_url(url,
             data=None,
             headers=None,
             method=None,
             use_proxy=True,
             force=False,
             last_mod_time=None,
             timeout=10,
             validate_certs=True,
             url_username=None,
             url_password=None,
             http_agent=None,
             force_basic_auth=False,
             follow_redirects='urllib2',
             client_cert=None,
             client_key=None,
             cookies=None):
    '''
    Sends a request via HTTP(S) or FTP using urllib2 (Python2) or urllib (Python3)

    Does not require the module environment
    '''
    handlers = []
    ssl_handler = maybe_add_ssl_handler(url, validate_certs)
    if ssl_handler:
        handlers.append(ssl_handler)

    parsed = generic_urlparse(urlparse(url))
    if parsed.scheme != 'ftp':
        username = url_username

        if headers is None:
            headers = {}

        if username:
            password = url_password
            netloc = parsed.netloc
        elif '@' in parsed.netloc:
            credentials, netloc = parsed.netloc.split('@', 1)
            if ':' in credentials:
                username, password = credentials.split(':', 1)
            else:
                username = credentials
                password = ''

            parsed_list = parsed.as_list()
            parsed_list[1] = netloc

            # reconstruct url without credentials
            url = urlunparse(parsed_list)

        if username and not force_basic_auth:
            passman = urllib_request.HTTPPasswordMgrWithDefaultRealm()

            # this creates a password manager
            passman.add_password(None, netloc, username, password)

            # because we have put None at the start it will always
            # use this username/password combination for  urls
            # for which `theurl` is a super-url
            authhandler = urllib_request.HTTPBasicAuthHandler(passman)
            digest_authhandler = urllib_request.HTTPDigestAuthHandler(passman)

            # create the AuthHandler
            handlers.append(authhandler)
            handlers.append(digest_authhandler)

        elif username and force_basic_auth:
            headers["Authorization"] = basic_auth_header(username, password)

        else:
            try:
                rc = netrc.netrc(os.environ.get('NETRC'))
                login = rc.authenticators(parsed.hostname)
            except IOError:
                login = None

            if login:
                username, _, password = login
                if username and password:
                    headers["Authorization"] = basic_auth_header(
                        username, password)

    if not use_proxy:
        proxyhandler = urllib_request.ProxyHandler({})
        handlers.append(proxyhandler)

    if HAS_SSLCONTEXT and not validate_certs:
        # In 2.7.9, the default context validates certificates
        context = SSLContext(ssl.PROTOCOL_SSLv23)
        context.options |= ssl.OP_NO_SSLv2
        context.options |= ssl.OP_NO_SSLv3
        context.verify_mode = ssl.CERT_NONE
        context.check_hostname = False
        handlers.append(
            HTTPSClientAuthHandler(client_cert=client_cert,
                                   client_key=client_key,
                                   context=context))
    elif client_cert:
        handlers.append(
            HTTPSClientAuthHandler(client_cert=client_cert,
                                   client_key=client_key))

    # pre-2.6 versions of python cannot use the custom https
    # handler, since the socket class is lacking create_connection.
    # Some python builds lack HTTPS support.
    if hasattr(socket, 'create_connection') and CustomHTTPSHandler:
        handlers.append(CustomHTTPSHandler)

    handlers.append(RedirectHandlerFactory(follow_redirects, validate_certs))

    # add some nicer cookie handling
    if cookies is not None:
        handlers.append(urllib_request.HTTPCookieProcessor(cookies))

    opener = urllib_request.build_opener(*handlers)
    urllib_request.install_opener(opener)

    data = to_bytes(data, nonstring='passthru')
    if method:
        if method.upper() not in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
                                  'DELETE', 'TRACE', 'CONNECT', 'PATCH'):
            raise ConnectionError('invalid HTTP request method; %s' %
                                  method.upper())
        request = RequestWithMethod(url, method.upper(), data)
    else:
        request = urllib_request.Request(url, data)

    # add the custom agent header, to help prevent issues
    # with sites that block the default urllib agent string
    if http_agent:
        request.add_header('User-agent', http_agent)

    # Cache control
    # Either we directly force a cache refresh
    if force:
        request.add_header('cache-control', 'no-cache')
    # or we do it if the original is more recent than our copy
    elif last_mod_time:
        tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000')
        request.add_header('If-Modified-Since', tstamp)

    # user defined headers now, which may override things we've set above
    if headers:
        if not isinstance(headers, dict):
            raise ValueError("headers provided to fetch_url() must be a dict")
        for header in headers:
            request.add_header(header, headers[header])

    urlopen_args = [request, None]
    if sys.version_info >= (2, 6, 0):
        # urlopen in python prior to 2.6.0 did not
        # have a timeout parameter
        urlopen_args.append(timeout)

    r = urllib_request.urlopen(*urlopen_args)
    return r
예제 #17
0
def request(url,
            endpoint=IDP_ENDPOINTS['LIGO.ORG'],
            use_kerberos=None,
            debug=False):
    """Request the given URL using ECP shibboleth authentication

    This requires an active Kerberos ticket for the user, to get one:

        >>> from ligo.org import kinit
        >>> kinit('albert.einstein')

    Then request as follows

        >>> from ligo.org import request
        >>> response = request(myurl)
        >>> print(response.read())

    Adapted from
    https://wiki.shibboleth.net/confluence/download/attachments/4358416/ecp.py

    Parameters
    ----------
    url : `str`
        URL path for request

    endpoint : `str`
        ECP endpoint URL for request

    use_kerberos : `bool`, optional
        use existing kerberos credential for login, default is to try, but
        fall back to username/password prompt

    debug : `bool`, optional, default: `False`
        query in verbose debugging mode

    Returns
    -------
    response : `str`
        the raw (decoded) response from the URL, probably XML/HTML or JSON
    """
    login_host = urlparse(endpoint).netloc

    # create a cookie jar and cookie handler (and read existing cookies)
    cookie_jar = ECPCookieJar()

    if os.path.exists(COOKIE_JAR):
        try:
            cookie_jar.load(COOKIE_JAR, ignore_discard=True)
        except http_cookiejar.LoadError as e:
            warnings.warn('Caught error loading ECP cookie: %s' % str(e))

    cookie_handler = urllib_request.HTTPCookieProcessor(cookie_jar)

    # need an instance of HTTPS handler to do HTTPS
    httpsHandler = urllib_request.HTTPSHandler(debuglevel=0)
    if debug:
        httpsHandler.set_http_debuglevel(1)

    # create the base opener object
    opener = urllib_request.build_opener(cookie_handler, httpsHandler)

    # get kerberos credentials if available
    if use_kerberos is None:
        try:
            creds = klist()
        except KerberosError:
            use_kerberos = False
        else:
            if creds:
                use_kerberos = True
            else:
                use_kerberos = False
    if use_kerberos:
        opener.add_handler(
            HTTPNegotiateAuthHandler(service_principal='HTTP@%s' % login_host))

    # -- intiate ECP request --------------------

    # headers needed to indicate to the SP an ECP request
    headers = {
        'Accept':
        'text/html; application/vnd.paos+xml',
        'PAOS':
        'ver="urn:liberty:paos:2003-08";'
        '"urn:oasis:names:tc:SAML:2.0:profiles:SSO:ecp"',
    }

    # request target from SP
    request = urllib_request.Request(url=url, headers=headers)
    response = opener.open(request)

    # convert the SP resonse from string to etree Element object
    sp_response = etree.XML(response.read())

    # pick out the relay state element from the SP so that it can
    # be included later in the response to the SP
    namespaces = {
        'ecp': 'urn:oasis:names:tc:SAML:2.0:profiles:SSO:ecp',
        'S': 'http://schemas.xmlsoap.org/soap/envelope/',
        'paos': 'urn:liberty:paos:2003-08'
    }

    relay_state = sp_response.xpath("//ecp:RelayState",
                                    namespaces=namespaces)[0]

    # make a deep copy of the SP response and then remove the header
    # in order to create the package for the IdP
    idp_request = deepcopy(sp_response)
    header = idp_request[0]
    idp_request.remove(header)

    # -- authenticate with endpoint -------------

    request = urllib_request.Request(endpoint,
                                     data=etree.tostring(idp_request))
    request.get_method = lambda: 'POST'
    request.add_header('Content-Type', 'test/xml; charset=utf-8')

    # get credentials for non-kerberos request
    if not use_kerberos:
        # prompt the user for a password
        login = input("Enter username for %s: " % login_host)
        password = getpass.getpass("Enter password for login '%s': " % login)
        # combine the login and password, base64 encode, and send
        # using the Authorization header
        base64string = base64.encodestring(
            ('%s:%s' % (login, password)).encode()).decode().replace('\n', '')
        request.add_header('Authorization', 'Basic %s' % base64string)

    response = opener.open(request)
    idp_response = etree.XML(response.read())

    assertion_consumer_service = idp_response.xpath(
        "/S:Envelope/S:Header/ecp:Response/@AssertionConsumerServiceURL",
        namespaces=namespaces)[0]

    # make a deep copy of the IdP response and replace its
    # header contents with the relay state initially sent by
    # the SP
    sp_package = deepcopy(idp_response)
    sp_package[0][0] = relay_state

    headers = {'Content-Type': 'application/vnd.paos+xml'}

    # POST the package to the SP
    request = urllib_request.Request(url=assertion_consumer_service,
                                     data=etree.tostring(sp_package),
                                     headers=headers)
    request.get_method = lambda: 'POST'
    response = opener.open(request)

    # -- cache cookies --------------------------

    cookie_jar.save(COOKIE_JAR, ignore_discard=True)

    # -- actually send GET ----------------------

    request = urllib_request.Request(url=url)
    response = opener.open(request)
    return response.read()
예제 #18
0
파일: _base.py 프로젝트: phpmaps/ArcREST
    def _get(self,
             url,
             param_dict={},
             securityHandler=None,
             additional_headers=[],
             handlers=[],
             proxy_url=None,
             proxy_port=None,
             compress=True,
             custom_handlers=[],
             out_folder=None,
             file_name=None):
        """
        Performs a GET operation
        Inputs:

        Output:
           returns dictionary, string or None
        """
        self._last_method = "GET"
        CHUNK = 4056
        param_dict, handler, cj = self._processHandler(securityHandler,
                                                       param_dict)
        headers = [] + additional_headers
        if compress:
            headers.append(('Accept-encoding', 'gzip'))
        else:
            headers.append(('Accept-encoding', ''))
        headers.append(('User-Agent', self.useragent))
        if len(param_dict.keys()) == 0:
            param_dict = None
        if handlers is None:
            handlers = []
        if handler is not None:
            handlers.append(handler)
        handlers.append(RedirectHandler())
        if cj is not None:
            handlers.append(request.HTTPCookieProcessor(cj))
        if proxy_url is not None:
            if proxy_port is None:
                proxy_port = 80
            proxies = {
                "http": "http://%s:%s" % (proxy_url, proxy_port),
                "https": "https://%s:%s" % (proxy_url, proxy_port)
            }
            proxy_support = request.ProxyHandler(proxies)
            handlers.append(proxy_support)
        opener = request.build_opener(*handlers)
        opener.addheaders = headers
        if param_dict is None:
            resp = opener.open(url, data=param_dict)
        elif len(str(urlencode(param_dict))) + len(url) >= 1999:
            resp = opener.open(url.encode('ascii'), data=urlencode(param_dict))
        else:
            format_url = url.encode('ascii') + "?%s" % urlencode(param_dict)
            resp = opener.open(fullurl=format_url)
        self._last_code = resp.getcode()
        self._last_url = resp.geturl()
        #  Get some headers from the response
        maintype = self._mainType(resp)
        contentDisposition = resp.headers.get('content-disposition')
        contentEncoding = resp.headers.get('content-encoding')
        contentType = resp.headers.get('content-Type').split(';')[0].lower()
        contentLength = resp.headers.get('content-length')
        if maintype.lower() in ('image',
                                'application/x-zip-compressed') or \
           contentType == 'application/x-zip-compressed' or \
           (contentDisposition is not None and \
            contentDisposition.lower().find('attachment;') > -1):

            fname = self._get_file_name(contentDisposition=contentDisposition,
                                        url=url)
            if out_folder is None:
                out_folder = tempfile.gettempdir()
            if contentLength is not None:
                max_length = int(contentLength)
                if max_length < CHUNK:
                    CHUNK = max_length
            file_name = os.path.join(out_folder, fname)
            with open(file_name, 'wb') as writer:
                for data in self._chunk(response=resp, size=CHUNK):
                    writer.write(data)
                    writer.flush()
                writer.flush()
                del writer
            return file_name
        else:
            read = ""
            for data in self._chunk(response=resp, size=CHUNK):
                try:
                    read += data.decode('ascii')
                except:
                    read += data.decode('utf-8')

                del data
            try:
                results = json.loads(read)
                if 'error' in results:
                    if 'message' in results['error']:
                        if results['error'][
                                'message'] == 'Request not made over ssl':
                            if url.startswith('http://'):
                                url = url.replace('http://', 'https://')
                                return self._get(url, param_dict,
                                                 securityHandler,
                                                 additional_headers, handlers,
                                                 proxy_url, proxy_port,
                                                 compress, custom_handlers,
                                                 out_folder, file_name)
                return results
            except:
                return read
예제 #19
0
파일: _base.py 프로젝트: phpmaps/ArcREST
    def _post(self,
              url,
              param_dict={},
              files={},
              securityHandler=None,
              additional_headers={},
              custom_handlers=[],
              proxy_url=None,
              proxy_port=80,
              compress=True,
              out_folder=None,
              file_name=None):
        """
        Performs a POST operation on a URL.

        Inputs:
           param_dict - key/value pair of values
              ex: {"foo": "bar"}
           files - key/value pair of file objects where the key is
              the input name and the value is the file path
              ex: {"file": r"c:\temp\myfile.zip"}
           securityHandler - object that handles the token or other site
              security.  It must inherit from the base security class.
              ex: arcrest.AGOLSecurityHandler("SomeUsername", "SOMEPASSWORD")
           additional_headers - are additional key/value headers that a user
              wants to pass during the operation.
              ex: {"accept-encoding": "gzip"}
           custom_handlers - this is additional web operation handlers as a
              list of objects.
              Ex: [CustomAuthHandler]
           proxy_url - url of the proxy
           proxy_port - default 80, port number of the proxy
           compress - default true, determines if gzip should be used of not for
              the web operation.
           out_folder - if the URL requested returns a file, this will be the
              disk save location
           file_name - if the operation returns a file and the file name is not
             given in the header or a user wishes to override the return saved
             file name, provide value here.
        Output:
           returns dictionary or string depending on web operation.
        """
        self._last_method = "POST"
        headers = {}
        opener = None
        return_value = None
        handlers = [RedirectHandler()]
        param_dict, handler, cj = self._processHandler(securityHandler,
                                                       param_dict)
        if handler is not None:
            handlers.append(handler)
        if cj is not None:
            handlers.append(request.HTTPCookieProcessor(cj))
        if isinstance(custom_handlers, list) and \
           len(custom_handlers) > 0:
            for h in custom_handlers:
                handlers.append(h)
        if compress:
            headers['Accept-Encoding'] = 'gzip'
        else:
            headers['Accept-Encoding'] = ''
        for k, v in additional_headers.items():
            headers[k] = v
            del k, v
        opener = request.build_opener(*handlers)
        request.install_opener(opener)
        opener.addheaders = [(k, v) for k, v in headers.items()]
        if len(files) == 0:
            data = urlencode(param_dict)
            if self.PY3:
                data = data.encode('ascii')
            opener.data = data
            resp = opener.open(url.encode('ascii'), data=data)
        else:
            mpf = MultiPartForm(param_dict=param_dict, files=files)
            req = request.Request(url.encode('ascii'))
            body = mpf.make_result
            req.add_header('User-agent', self.useragent)
            req.add_header('Content-type', mpf.get_content_type())
            req.add_header('Content-length', len(body))
            req.data = body
            resp = request.urlopen(req)
            del body, mpf
        self._last_code = resp.getcode()
        self._last_url = resp.geturl()
        return_value = self._process_response(resp=resp, out_folder=out_folder)
        if isinstance(return_value, dict):
            if "error" in return_value and \
               'message' in return_value['error']:
                if return_value['error']['message'].lower(
                ) == 'request not made over ssl':
                    if url.startswith('http://'):
                        url = url.replace('http://', 'https://')
                        return self._post(url, param_dict, files,
                                          securityHandler, additional_headers,
                                          custom_handlers, proxy_url,
                                          proxy_port, compress, out_folder,
                                          file_name)
            return return_value
        else:
            return return_value
        return return_value
예제 #20
0
 def __init__(self, encoding='utf-8'):
     self.cj = cookielib.CookieJar()
     self.opener = request.build_opener(request.HTTPCookieProcessor(
         self.cj))
     self.encoding = encoding
예제 #21
0
    def get_handlers(self):
        handlers = []
        if self._verify_cert == False:
            ctx = ssl.create_default_context()
            ctx.check_hostname = False
            ctx.verify_mode = ssl.CERT_NONE
            handler = request.HTTPSHandler(context=ctx)
            handlers.append(handler)

        from urllib.request import HTTPRedirectHandler
        redirect_handler = HTTPRedirectHandler()
        redirect_handler.max_redirections = 30
        redirect_handler.max_repeats = 30
        handlers.append(redirect_handler)
        if self._username and self._password:

            passman = request.HTTPPasswordMgrWithDefaultRealm()
            passman.add_password(None, self._parsed_org_url, self._username,
                                 self._password)
            handlers.append(request.HTTPBasicAuthHandler(passman))
            passman = request.HTTPPasswordMgrWithDefaultRealm()
            passman.add_password(None, self._parsed_org_url, self._username,
                                 self._password)
            handlers.append(request.HTTPDigestAuthHandler(passman))
            if os.name == 'nt':
                try:
                    from arcgis._impl.common._iwa import NtlmSspiAuthHandler, KerberosSspiAuthHandler

                    auth_krb = KerberosSspiAuthHandler()
                    handlers.append(auth_krb)

                    try:
                        auth_NTLM = NtlmSspiAuthHandler()
                        handlers.append(auth_NTLM)
                    except:
                        pass

                except Error as err:
                    _log.error(
                        "winkerberos packages is required for IWA authentication (NTLM and Kerberos)."
                    )
                    _log.error(
                        "Please install it:\n\tconda install winkerberos")
                    _log.error(str(err))
            else:
                _log.error(
                    'The GIS uses Integrated Windows Authentication which is currently only supported on the Windows platform'
                )


        if self._auth == "PKI" or \
           (self.cert_file is not None and self.key_file is not None):
            handlers.append(
                HTTPSClientAuthHandler(self.key_file, self.cert_file))
        elif self._portal_connection and \
             self._portal_connection.cert_file is not None and \
             self._portal_connection.key_file is not None:
            handlers.append(
                HTTPSClientAuthHandler(self._portal_connection.key_file,
                                       self._portal_connection.cert_file))

        cj = cookiejar.CookieJar()

        if self.proxy_host:  # Simple Proxy Support
            from urllib.request import ProxyHandler
            if self.proxy_port is None:
                self.proxy_port = 80
            proxies = {
                "http": "http://%s:%s" % (self.proxy_host, self.proxy_port),
                "https": "https://%s:%s" % (self.proxy_host, self.proxy_port)
            }
            proxy_support = ProxyHandler(proxies)
            handlers.append(proxy_support)

        handlers.append(request.HTTPCookieProcessor(cj))
        return handlers