Ejemplo n.º 1
0
def _create_opener():
    """
    Creates an opener for the internet.

    It also attaches the :class:`CachingRedirectHandler` to the opener and
    sets its User-agent to ``Mozilla/5.0``.

    If the Network Proxy settings are set and recognized, it creates the
    opener and attaches the proxy_handler to it. The opener is tested and
    returned if the test passes.

    If the test fails an opener without the proxy settings is created instead
    and is returned instead.
    """
    use_proxy = False
    proxy_handler = None

    if NETWORK_PROXY_TYPE == 'http':
        use_proxy = True
        proxyurl = _get_http_proxy_url()
        proxy_handler = ProxyHandler({'http': proxyurl,
                                      'https': proxyurl})
    if use_proxy:
        openr = build_opener(HTTPHandler(), HTTPSHandler(),
                             proxy_handler, CachingRedirectHandler)
    else:
        openr = build_opener(HTTPSHandler(), HTTPSHandler(),
                             CachingRedirectHandler)
    openr.addheaders = [('User-agent', 'Mozilla/5.0')]
    global _internet_connected
    _internet_connected = _test_opener(openr)
    return openr
Ejemplo n.º 2
0
def putDataUrllib2(url,payload,timeout=900,logger=None):
    """
    Wrapper method for urllib2 that supports PUTs to a url.

    Parameters
    ----------
    url : `string`
        Ex: 'https://dqsegdb5.phy.syr.edu/L1/DMT-SCIENCE/1'
    payload : `string`
        JSON formatted string

    """
    socket.setdefaulttimeout(timeout)
    #BEFORE HTTPS: opener = urllib2.build_opener(urllib2.HTTPHandler)
    if urlparse(url).scheme == 'https':
        opener=urllib_request.build_opener(HTTPSClientAuthHandler)
    else:
        opener = urllib_request.build_opener(urllib_request.HTTPHandler)
    request = urllib_request.Request(url, data=payload)
    request.add_header('Content-Type', 'JSON')
    request.get_method = lambda: 'PUT'
    if logger:
        logger.debug("Beginning url call: %s" % url)
    try:
        urlreturned = opener.open(request)
    except urllib_error.HTTPError as e:
        handleHTTPError("PUT",url,e)
        ##print(e.read())
        #if int(e.code)==404:
        #    print("Flag does not exist in database yet for url: %s" % url)
        #else:
        #    print("Warning: Issue accessing url: %s" % url)
        #    print("Code: ")
        #    print(e.code)
        #    print("Message: ")
        #    print(e.msg)
        #    #print(e.reason)
        #    #print(url)
        #    print("May be handled cleanly by calling instance: otherwise will result in an error.")
        ##print(e.reason)
        ##print(urlreturned)
        raise
    except urllib_error.URLError as e:
        #print(e.read())
        warnmsg="Warning: Issue accessing url: %s" % url
        warnmsg+="; "
        warnmsg+=str(e.reason)
        warnmsg+="; "
        warnmsg+="May be handled cleanly by calling instance: otherwise will result in an error."
        warn(warnmsg)
        raise
    if logger:
        logger.debug("Completed url call: %s" % url)
    return url
Ejemplo n.º 3
0
def open_no_proxy(*args, **kwargs):
    # NOTE(jamespage):
    # Deal with more secure certification chain verficiation
    # introduced in python 2.7.9 under PEP-0476
    # https://github.com/python/peps/blob/master/pep-0476.txt
    if hasattr(ssl, "_create_unverified_context"):
        opener = urlrequest.build_opener(
            urlrequest.ProxyHandler({}), urlrequest.HTTPSHandler(context=ssl._create_unverified_context())
        )
    else:
        opener = urlrequest.build_opener(urlrequest.ProxyHandler({}))
    return opener.open(*args, **kwargs)
 def _get_opener(self):
     if not self.opener:
         if (CONF.dashboard.disable_ssl_certificate_validation and
            self._ssl_default_context_supported()):
             ctx = ssl.create_default_context()
             ctx.check_hostname = False
             ctx.verify_mode = ssl.CERT_NONE
             self.opener = request.build_opener(
                 request.HTTPSHandler(context=ctx),
                 request.HTTPCookieProcessor())
         else:
             self.opener = request.build_opener(
                 request.HTTPCookieProcessor())
     return self.opener
Ejemplo n.º 5
0
def url_get(base_url, password_mgr=None, pathspec=None, params=None):
    """Make contact with the uri provided and return any contents."""
    # Uses system proxy settings if they exist.
    proxy = urlrequest.ProxyHandler()
    if password_mgr is not None:
        auth = urlrequest.HTTPDigestAuthHandler(password_mgr)
        urlopener = urlrequest.build_opener(proxy, auth)
    else:
        urlopener = urlrequest.build_opener(proxy)
    urlrequest.install_opener(urlopener)
    full_url = build_url(base_url, pathspec=pathspec, params=params)
    response = urlopener.open(full_url)
    content = response.read()
    response.close()
    return content
Ejemplo n.º 6
0
def set_proxy(proxy, user=None, password=''):
    """
    Set the HTTP proxy for Python to download through.

    If ``proxy`` is None then tries to set proxy from environment or system
    settings.

    :param proxy: The HTTP proxy server to use. For example:
        'http://proxy.example.com:3128/'
    :param user: The username to authenticate with. Use None to disable
        authentication.
    :param password: The password to authenticate with.
    """
    from nltk import compat

    if proxy is None:
        # Try and find the system proxy settings
        try:
            proxy = getproxies()['http']
        except KeyError:
            raise ValueError('Could not detect default proxy settings')

    # Set up the proxy handler
    proxy_handler = ProxyHandler({'https': proxy, 'http': proxy})
    opener = build_opener(proxy_handler)

    if user is not None:
        # Set up basic proxy authentication if provided
        password_manager = HTTPPasswordMgrWithDefaultRealm()
        password_manager.add_password(realm=None, uri=proxy, user=user, passwd=password)
        opener.add_handler(ProxyBasicAuthHandler(password_manager))
        opener.add_handler(ProxyDigestAuthHandler(password_manager))

    # Overide the existing url opener
    install_opener(opener)
    def __init__(self, url, username=None, password=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
        super().__init__(url, username, password, timeout)

        # Setting ssl key verificationas false
        context = ssl._create_stdlib_context(check_hostname=False)
        unverified_handler = HTTPSHandler(context=context, check_hostname=False)
        install_opener(build_opener(unverified_handler))
Ejemplo n.º 8
0
def _read_from_url(url):
    """Reads data from *url* with an HTTP *GET*.

    This function supports fetching from resources which use basic HTTP auth as
    laid out by RFC1738 § 3.1. See § 5 for grammar definitions for URLs.

    .. seealso:

       https://www.ietf.org/rfc/rfc1738.txt

    :param url: URL of an HTTP resource
    :type url: ``str``

    :return: data read from resource described by *url*
    :rtype: ``file``-like object
    """
    url, username, password = _strip_basic_auth(url)
    if username is not None and password is not None:
        # case: url contains basic auth creds
        password_mgr = request.HTTPPasswordMgrWithDefaultRealm()
        password_mgr.add_password(None, url, username, password)
        handler = request.HTTPBasicAuthHandler(password_mgr)
        opener = request.build_opener(default_handlers + [handler])
    else:
        opener = default_opener

    return opener.open(url)
Ejemplo n.º 9
0
 def __init__(self, settings, address, handler_class, backend_port, username):
     # type: (CtlSettings, Tuple[str, int], Type[BaseHTTPRequestHandler], int, str) -> None
     self.backend_port = backend_port
     self.user_auth_header = settings.user_auth_header
     self.username = username
     self.opener = build_opener(NoRedirectHandler)
     super(ProxyServer, self).__init__(address, handler_class)
Ejemplo n.º 10
0
 def open(self, proxy=None, cache=True):
     if self.local:
         self._last_update_time = os.stat(self.url).st_mtime
         return open(self.url)
     elif cache and os.path.exists(self.cache):
         self._last_update_time = os.stat(self.cache).st_mtime
         return open(self.cache)
     else:
         r = Request(self.url)
         if proxy or self.proxy:
             proxy = proxy if proxy else self.proxy
             opener = build_opener(SocksiPyHandler(
                 socks.PROXY_TYPES[proxy['type'].upper()],
                 proxy['ip'],
                 proxy['port'],
             ))
             data_io = opener.open(r)
         else:
             data_io = urlopen(r)
         data = data_io.read()
         if self.is_base64(data):
             logger.debug('BASE64 decode...')
             data = base64.b64decode(data)
         if not isinstance(data, str):
             data = data.decode('utf-8')
         if self.cache:
             with open(self.cache, 'w') as f:
                 f.write(data)
             self._last_update_time = os.stat(self.cache).st_mtime
             return open(self.cache)
         self._last_update_time = time.time()
         return StringIO(data)
Ejemplo n.º 11
0
    def user_login(self, username, password):
        self.opener = request.build_opener(request.HTTPCookieProcessor())
        response = self.opener.open(CONF.dashboard.dashboard_url).read()

        # Grab the CSRF token and default region
        parser = HorizonHTMLParser()
        parser.feed(response)

        # construct login url for dashboard, discovery accommodates non-/ web
        # root for dashboard
        login_url = parse.urljoin(CONF.dashboard.dashboard_url, parser.login)

        # Prepare login form request
        req = request.Request(login_url)
        req.add_header("Content-type", "application/x-www-form-urlencoded")
        req.add_header("Referer", CONF.dashboard.dashboard_url)

        # Pass the default domain name regardless of the auth version in order
        # to test the scenario of when horizon is running with keystone v3
        params = {
            "username": username,
            "password": password,
            "region": parser.region,
            "domain": CONF.auth.default_credentials_domain_name,
            "csrfmiddlewaretoken": parser.csrf_token,
        }
        self.opener.open(req, parse.urlencode(params))
Ejemplo n.º 12
0
def srtm_login_or_skip(monkeypatch):
    import os
    try:
        srtm_username = os.environ['SRTM_USERNAME']
    except KeyError:
        pytest.skip('SRTM_USERNAME environment variable is unset.')
    try:
        srtm_password = os.environ['SRTM_PASSWORD']
    except KeyError:
        pytest.skip('SRTM_PASSWORD environment variable is unset.')

    from six.moves.urllib.request import (HTTPBasicAuthHandler,
                                          HTTPCookieProcessor,
                                          HTTPPasswordMgrWithDefaultRealm,
                                          build_opener)
    from six.moves.http_cookiejar import CookieJar

    password_manager = HTTPPasswordMgrWithDefaultRealm()
    password_manager.add_password(
        None,
        "https://urs.earthdata.nasa.gov",
        srtm_username,
        srtm_password)
    cookie_jar = CookieJar()
    opener = build_opener(HTTPBasicAuthHandler(password_manager),
                          HTTPCookieProcessor(cookie_jar))

    monkeypatch.setattr(cartopy.io, 'urlopen', opener.open)
Ejemplo n.º 13
0
    def _set_cookies(self, src):
        '''
        function that returns a urllib2 opener for retrieving data from *src*

        input:
            *src* : 'asos' or 'wunderground' or 'wunder_nonairport'
        '''
        jar = http_cookiejar.CookieJar()
        handler = request.HTTPCookieProcessor(jar)
        opener = request.build_opener(handler)
        try:
            if src.lower() == 'wunderground':
                url1 = 'http://www.wunderground.com/history/airport/%s/2011/12/4/DailyHistory.html?' % self.sta_id
                url2 = 'http://www.wunderground.com/cgi-bin/findweather/getForecast?setpref=SHOWMETAR&value=1'
                url3 = 'http://www.wunderground.com/history/airport/%s/2011/12/4/DailyHistory.html?&&theprefset=SHOWMETAR&theprefvalue=1&format=1' % self.sta_id

                opener.open(url1)
                opener.open(url2)
                opener.open(url3)

            elif src.lower() == 'asos':
                url = 'ftp://ftp.ncdc.noaa.gov/pub/data/asos-fivemin/'
                opener.open(url)

            elif src.lower() == 'wunder_nonairport':
                url = 'http://www.wunderground.com/weatherstation/WXDailyHistory.asp?ID=MEGKO3&day=1&year=2013&month=1&graphspan=day&format=1'
                opener.open(url)

        except error.URLError:
            print(('connection to %s not available. working locally' % src))

        return opener
Ejemplo n.º 14
0
def func(args):
    if args.outputdocument:
        fdout = open(args.outputdocument, 'w')
    else:
        fdout = sys.stdout

    if args.useragent:
        useragent = args.useragent
    else:
        useragent = 'Cmdix/' + cmdix.__version__

    opener = build_opener()
    opener.addheaders = [('User-agent', useragent)]

    for url in args.url:
        try:
            fdin = opener.open(url)
        except HTTPError as e:
            exception.StdErrException(
                "HTTP error opening {0}: {1}".format(url, e))

        length = int(fdin.headers['content-length'])
        print("Getting {0} bytes from {1}...".format(length, url))

        shutil.copyfileobj(fdin, fdout)
        print("Done")
Ejemplo n.º 15
0
 def __init__(self, url, keystone_url, credentials, **kwargs):
     logger.info('Initiate HTTPClient with url %s', url)
     self.url = url
     self.keystone_url = keystone_url
     self.creds = dict(credentials, **kwargs)
     self.keystone = None
     self.opener = request.build_opener(request.HTTPHandler)
Ejemplo n.º 16
0
def pytest_unconfigure(config):
    """
    Called after all tests are completed.
    """
    global enabled, submit_url, commit, branch, environment, username, password

    if not enabled:
        return
    executable = "{}-{}-{}".format(platform.python_implementation(),
                                   platform.python_version(),
                                   platform.system())
    data = [x.to_codespeed_dict(commit=commit,
                                branch=branch,
                                environment=environment,
                                executable=executable,
                                project=project)
            for x in function_profile_list]

    try:
        json_submit_url = submit_url + 'result/add/json/'

        if username:
            password_mgr = urllib_request.HTTPPasswordMgrWithDefaultRealm()
            password_mgr.add_password(None, json_submit_url, username, password)
            handler = urllib_request.HTTPBasicAuthHandler(password_mgr)
            opener = urllib_request.build_opener(handler)
        else:
            opener = urllib_request.build_opener()

        # use the opener to fetch a URL
        f = opener.open(json_submit_url, urlencode({'json': json.dumps(data)}).encode('UTF-8'))
        response = f.read()
    except HTTPError as e:
        print('Error while connecting to Codespeed:')
        print('Exception: {}'.format(str(e)))
        fd, name = tempfile.mkstemp(suffix='.html')
        os.close(fd)
        with open(name, 'wb') as f:
            f.write(e.read())
        print('HTTP Response written to {}'.format(name))
        raise e

    if not response.startswith('All result data saved successfully'.encode('UTF-8')):
        print("Unexpected response while connecting to Codespeed:")
        raise ValueError('Unexpected response from Codespeed server: {}'.format(response))
    else:
        print("{} test benchmarks sumbitted.".format(len(function_profile_list)))
Ejemplo n.º 17
0
	def reload(self, store):
		"""
		Force a reload of this response
		"""
		opener = request.build_opener()
		cr = self.from_response(opener.open(self.url))
		self.__dict__.update(vars(cr))
		store.set(self.url, self.save())
Ejemplo n.º 18
0
 def __insert_binary_file(self, url):
     """
     Insert a binary file. First download the file and then insert.
     """
     opener = build_opener()
     page = opener.open(url)
     image = binascii.b2a_hex(page.read())
     return image.decode()
Ejemplo n.º 19
0
    def _make_opener(self, realm, base_url, username, password):
        """uTorrent API need HTTP Basic Auth and cookie support for token verify."""

        auth_handler = HTTPBasicAuthHandler()
        auth_handler.add_password(realm=realm,
                                  uri=base_url,
                                  user=username,
                                  passwd=password)
        opener = build_opener(auth_handler)
        install_opener(opener)

        cookie_jar = CookieJar()
        cookie_handler = HTTPCookieProcessor(cookie_jar)

        handlers = [auth_handler, cookie_handler]
        opener = build_opener(*handlers)
        return opener
Ejemplo n.º 20
0
 def __init__(self, url):
     warn(
         'HTTPClientZabbix is deprecated and not used now. '
         'It will be dropped in short term period.',
         DeprecationWarning
     )
     self.url = url
     self.opener = request.build_opener(request.HTTPHandler)
Ejemplo n.º 21
0
def get_cookie_opener( gs_username, gs_token, gs_toolname=None ):
    """ Create a GenomeSpace cookie opener """
    cj = http_cookiejar.CookieJar()
    for cookie_name, cookie_value in [ ( 'gs-token', gs_token ), ( 'gs-username', gs_username ) ]:
        # create a super-cookie, valid for all domains
        cookie = http_cookiejar.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False )
        cj.set_cookie( cookie )
    cookie_opener = build_opener( HTTPCookieProcessor( cj ) )
    cookie_opener.addheaders.append( ( 'gs-toolname', gs_toolname or DEFAULT_GENOMESPACE_TOOLNAME ) )
    return cookie_opener
Ejemplo n.º 22
0
 def __init__(self, server_url, opener=None):
     self.logger = logging.getLogger(__name__ + '.Client')
     self.server_url = server_url
     if opener is None:
         opener = build_opener(create_urllib_https_handler())
     elif not isinstance(opener, OpenerDirector):
         raise TypeError('opener must be {0.__module__}.{0.__name__}, not '
                         '{1!r}'.format(OpenerDirector, opener))
     self.opener = opener
     self.public_keys = PublicKeyDict(self)
Ejemplo n.º 23
0
 def debug():
     """
     Activate debugging on urllib2.
     """
     if six.PY2:
         handler = HTTPSHandler(debuglevel=1)
         opener = build_opener(handler)
         install_opener(opener)
     else:
         http_client.HTTPConnection.debuglevel = 1
Ejemplo n.º 24
0
 def authfetch(username,password,path="/",realm=realm):
     server.accept(2)
     import socket
     socket.setdefaulttimeout(5)
     uri = ("http://%s:%s" % server.server_address) + path
     auth = HTTPDigestAuthHandler()
     auth.add_password(realm,uri,username,password)
     opener = build_opener(auth)
     result = opener.open(uri)
     return result.read()
Ejemplo n.º 25
0
        def request():
            endpoint = config.endpoint
            if "://" not in endpoint:
                endpoint = config.get_endpoint()

            req = Request(endpoint, payload.encode("utf-8", "replace"), {"Content-Type": "application/json"})

            if config.proxy_host:
                proxies = ProxyHandler({"https": config.proxy_host, "http": config.proxy_host})

                opener = build_opener(proxies)
            else:
                opener = build_opener()

            resp = opener.open(req)
            status = resp.getcode()

            if status != 200:
                bugsnag.logger.warning("Notification to %s failed, status %d" % (config.endpoint, status))
Ejemplo n.º 26
0
def build_request_with_data(url, data, api_key, method):
    """Build a request with the received method."""
    http_redirect_with_data_handler = HTTPRedirectWithDataHandler(method=method)
    opener = build_opener(http_redirect_with_data_handler)
    install_opener(opener)
    url = make_url(url, api_key=api_key, args=None)
    request = Request(url, headers={'Content-Type': 'application/json'}, data=json.dumps(data))
    request_method = request.get_method()
    if request_method != method:
        request.get_method = lambda: method
    return opener, request
Ejemplo n.º 27
0
 def clear_all_queries(self, cluster_name=DEFAULT_CLUSTER):
     """
     Clear all the primed queries from a particular cluster
     :param cluster_name: cluster to clear queries from
     """
     opener = build_opener(HTTPHandler)
     request = Request("http://{0}/{1}/{2}".format(
         self.admin_addr, "prime", cluster_name))
     request.get_method = lambda: 'DELETE'
     connection = opener.open(request)
     return connection.read()
Ejemplo n.º 28
0
    def submit_request(self, query):
        opener = build_opener(HTTPHandler)
        data = json.dumps(query.fetch_json()).encode('utf8')

        request = Request("http://{}/{}{}".format(
            self.admin_addr, query.path, query.fetch_url_params()), data=data)
        request.get_method = lambda: 'POST'
        request.add_header("Content-Type", 'application/json')
        request.add_header("Content-Length", len(data))

        connection = opener.open(request)
        return connection.read().decode('utf-8')
Ejemplo n.º 29
0
 def __init__(self, configuration, debug=True):
     super(XMLAPIConnector, self).__init__()
     self.storage_ip = configuration.emc_nas_server
     self.user_name = configuration.emc_nas_login
     self.pass_word = configuration.emc_nas_password
     self.debug = debug
     self.auth_url = "https://" + self.storage_ip + "/Login"
     self._url = "https://" + self.storage_ip + "/servlets/CelerraManagementServices"
     https_handler = url_request.HTTPSHandler()
     cookie_jar = http_cookiejar.CookieJar()
     cookie_handler = url_request.HTTPCookieProcessor(cookie_jar)
     self.url_opener = url_request.build_opener(https_handler, cookie_handler)
     self._do_setup()
Ejemplo n.º 30
0
 def open_with_basic_auth(url, auth):
     """
     opens an url protected with basic http authentication
     :param url: string - the url to open
     :param auth:
     :return:
     """
     user, passwd = auth
     p = six.moves.urllib.request.HTTPPasswordMgrWithDefaultRealm()
     p.add_password(None, url, user, passwd)
     auth_handler = six.moves.urllib.request.HTTPBasicAuthHandler(p)
     opener = request.build_opener(auth_handler)
     request.install_opener(opener)
     return opener.open(url)
Ejemplo n.º 31
0
def main():
    opts, args = parse_opts()
    exitcode = 0
    if not inside_project():
        _log("Error: no Scrapy project found in this location")
        sys.exit(1)

    install_opener(build_opener(HTTPRedirectHandler))

    if opts.list_targets:
        for name, target in _get_targets().items():
            print("%-20s %s" % (name, target['url']))
        return

    if opts.list_projects:
        target = _get_target(opts.list_projects)
        req = Request(_url(target, 'listprojects.json'))
        _add_auth_header(req, target)
        f = urlopen(req)
        projects = json.loads(f.read())['projects']
        print(os.linesep.join(projects))
        return

    tmpdir = None

    if opts.build_egg:  # build egg only
        egg, tmpdir = _build_egg()
        _log("Writing egg to %s" % opts.build_egg)
        shutil.copyfile(egg, opts.build_egg)
    elif opts.deploy_all_targets:
        version = None
        for name, target in _get_targets().items():
            if version is None:
                version = _get_version(target, opts)
            _build_egg_and_deploy_target(target, version, opts)
    else:  # buld egg and deploy
        target_name = _get_target_name(args)
        target = _get_target(target_name)
        version = _get_version(target, opts)
        exitcode, tmpdir = _build_egg_and_deploy_target(target, version, opts)

    if tmpdir:
        if opts.debug:
            _log("Output dir not removed: %s" % tmpdir)
        else:
            shutil.rmtree(tmpdir)

    sys.exit(exitcode)
Ejemplo n.º 32
0
def DetectGce():
    """Determine whether or not we're running on GCE.

    This is based on:
      https://cloud.google.com/compute/docs/metadata#runninggce

    Returns:
      True iff we're running on a GCE instance.
    """
    try:
        o = urllib_request.build_opener(urllib_request.ProxyHandler({})).open(
            urllib_request.Request('http://metadata.google.internal'))
    except urllib_error.URLError:
        return False
    return (o.getcode() == http_client.OK
            and o.headers.get('metadata-flavor') == 'Google')
Ejemplo n.º 33
0
    def opener(self):
        """
        Create an opener object.

        By calling :meth:`add_authentication` before calling this property for
        the first time, authentication credentials can be set.

        EXAMPLES::

            sage: from sage.dev.digest_transport import DigestTransport
            sage: DigestTransport().opener
            <urllib2.OpenerDirector instance at 0x...>
        """
        if self._opener is None:
            self._opener = build_opener(HTTPDigestAuthHandler())
        return self._opener
Ejemplo n.º 34
0
def setup_urllib_proxies():
    global _urllib_proxies_installed, SYSTEM_PROXIES

    if _urllib_proxies_installed:
        return
    _urllib_proxies_installed = True
    if not SYSTEM_PROXIES:
        return
    proxies = dict(
        (k, "%s://%s:%s" % (k, SYSTEM_PROXIES[k][0], SYSTEM_PROXIES[k][1])) for k in SYSTEM_PROXIES
    )
    from six.moves.urllib.request import ProxyHandler, build_opener, install_opener

    proxy_handler = ProxyHandler(proxies)
    opener = build_opener(proxy_handler)
    install_opener(opener)
Ejemplo n.º 35
0
    def run(self, args, opts):
        try:
            import setuptools
        except ImportError:
            raise UsageError("setuptools not installed")

        request.install_opener(request.build_opener(HTTPRedirectHandler))

        if opts.list_targets:
            for name, target in _get_targets().items():
                print("%-20s %s" % (name, target['url']))
            return

        if opts.list_projects:
            target = _get_target(opts.list_projects)
            req = request.Request(_url(target, 'listprojects.json'))
            _add_auth_header(req, target)
            f = request.urlopen(req)
            projects = json.loads(f.read())['projects']
            print(os.linesep.join(projects))
            return

        tmpdir = None

        if opts.build_egg:  # build egg only
            egg, tmpdir = _build_egg()
            _log("Writing egg to %s" % opts.build_egg)
            shutil.copyfile(egg, opts.build_egg)
        else:  # buld egg and deploy
            target_name = _get_target_name(args)
            target = _get_target(target_name)
            project = _get_project(target, opts)
            version = _get_version(target, opts)
            if opts.egg:
                _log("Using egg: %s" % opts.egg)
                egg = opts.egg
            else:
                _log("Packing version %s" % version)
                egg, tmpdir = _build_egg()
            if not _upload_egg(target, egg, project, version):
                self.exitcode = 1

        if tmpdir:
            if opts.debug:
                _log("Output dir not removed: %s" % tmpdir)
            else:
                shutil.rmtree(tmpdir)
Ejemplo n.º 36
0
def _follow_redirects(uri_list, http_timeout):
    """ Follow HTTP redirects from servers.  Needed so that we can create
        RewriteRules for all repository URLs that pkg clients may encounter.

        We return a sorted list of URIs that were found having followed all
        redirects in 'uri_list'.  We also return a boolean, True if we timed out
        when following any of the URIs.
        """

    ret_uris = set(uri_list)
    timed_out = False

    class SysrepoRedirectHandler(HTTPRedirectHandler):
        """ A HTTPRedirectHandler that saves URIs we've been
                redirected to along the path to our eventual destination."""
        def __init__(self):
            self.redirects = set()

        def redirect_request(self, req, fp, code, msg, hdrs, newurl):
            self.redirects.add(newurl)
            return HTTPRedirectHandler.redirect_request(
                self, req, fp, code, msg, hdrs, newurl)

    for uri in uri_list:
        handler = SysrepoRedirectHandler()
        opener = build_opener(handler)
        if not uri.startswith("http:"):
            ret_uris.update([uri])
            continue

        # otherwise, open a known url to check for redirects
        try:
            opener.open("{0}/versions/0".format(uri), None, http_timeout)
            ret_uris.update(
                set([
                    item.replace("/versions/0", "").rstrip("/")
                    for item in handler.redirects
                ]))
        except URLError as err:
            # We need to log this, and carry on - the url
            # could become available at a later date.
            msg(
                _("WARNING: unable to access {uri} when checking "
                  "for redirects: {err}").format(**locals()))
            timed_out = True

    return sorted(list(ret_uris)), timed_out
Ejemplo n.º 37
0
    def try_del(httpd, querystr):
        """Try DEL calls to the server."""

        num_requests["del_handler"] = 0

        opener = build_opener(HTTPHandler)
        request = Request(httpd_url(httpd, "/api/resource/1", querystr))
        request.get_method = lambda: "DEL"
        f = opener.open(request)

        assert f.getcode() == 200
        assert json.loads(f.read()) == {
            "called": 1,
            "id": "1",
            "query": querystr
        }
        assert num_requests["del_handler"] == 1
Ejemplo n.º 38
0
 def __init__(self):
     self.server = server
     self.handler = handler
     self.waited = -1
     self.tool = 'echo_' + handler
     self._hda_id = None
     self._hda_state = None
     self._history_id = None
     if not self.server.startswith('http'):
         self.server = 'http://' + self.server
     self.cookie_jar = os.path.join(var_dir, "cookie_jar")
     dprint("cookie jar path: %s" % self.cookie_jar)
     if not os.access(self.cookie_jar, os.R_OK):
         dprint("no cookie jar at above path, creating")
         tc.save_cookies(self.cookie_jar)
     tc.load_cookies(self.cookie_jar)
     self.opener = build_opener(HTTPCookieProcessor(tc.get_browser().cj))
Ejemplo n.º 39
0
Archivo: cdash.py Proyecto: zygyz/spack
    def upload(self, filename):
        if not self.cdash_upload_url:
            return

        # Compute md5 checksum for the contents of this file.
        md5sum = checksum(hashlib.md5, filename, block_size=8192)

        opener = build_opener(HTTPHandler)
        with open(filename, 'rb') as f:
            url = "{0}&MD5={1}".format(self.cdash_upload_url, md5sum)
            request = Request(url, data=f)
            request.add_header('Content-Type', 'text/xml')
            request.add_header('Content-Length', os.path.getsize(filename))
            # By default, urllib2 only support GET and POST.
            # CDash needs expects this file to be uploaded via PUT.
            request.get_method = lambda: 'PUT'
            url = opener.open(request)
Ejemplo n.º 40
0
    def add_authentication(self, realm, url, username, password):
        """
        Set authentication credentials for the opener returned by
        :meth:`opener`.

        EXAMPLES::

            sage: from sage.dev.digest_transport import DigestTransport
            sage: dt = DigestTransport()
            sage: dt.add_authentication("realm", "url", "username", "password")
            sage: dt.opener
            <urllib2.OpenerDirector instance at 0x...>
        """
        assert self._opener is None
        authhandler = HTTPDigestAuthHandler()
        authhandler.add_password(realm, url, username, password)
        self._opener = build_opener(authhandler)
Ejemplo n.º 41
0
def populate_buildgroup(job_names, group_name, project, site, credentials,
                        cdash_url):
    url = "{0}/api/v1/buildgroup.php".format(cdash_url)

    headers = {
        'Authorization': 'Bearer {0}'.format(credentials),
        'Content-Type': 'application/json',
    }

    opener = build_opener(HTTPHandler)

    parent_group_id = _create_buildgroup(opener, headers, url, project,
                                         group_name, 'Daily')
    group_id = _create_buildgroup(opener, headers, url, project,
                                  'Latest {0}'.format(group_name), 'Latest')

    if not parent_group_id or not group_id:
        msg = 'Failed to create or retrieve buildgroups for {0}'.format(
            group_name)
        raise SpackError(msg)

    data = {
        'project':
        project,
        'buildgroupid':
        group_id,
        'dynamiclist': [{
            'match': name,
            'parentgroupid': parent_group_id,
            'site': site
        } for name in job_names]
    }

    enc_data = json.dumps(data).encode('utf-8')

    request = Request(url, data=enc_data, headers=headers)
    request.get_method = lambda: 'PUT'

    response = opener.open(request)
    response_code = response.getcode()

    if response_code != 200:
        msg = 'Error response code ({0}) in populate_buildgroup'.format(
            response_code)
        raise SpackError(msg)
Ejemplo n.º 42
0
def http_call(method, url, data=None):
    """Utility method for making HTTP requests."""
    LOG.debug("http_call(): Calling %s %s" % (method, url))
    opener = build_opener(HTTPHandler)
    if data:
        data = simplejson.dumps(data)
        LOG.debug("http_call(): With body: %s" % data)
    request = Request(url, data)
    request.add_header('Accept', 'application/json')
    if data:
        request.add_header('Content-Type', 'application/json')
    request.get_method = lambda: method
    resp = opener.open(request)
    if resp.getcode() >= 400:
        raise exceptions.RomanaException("Error in %s %s with payload %s: %s", method, url, data, resp)
    body = resp.read()
    data = simplejson.loads(body)
    return data
Ejemplo n.º 43
0
    def read(self, size=-1):
        if self.fileobj:
            self.fileobj.close()
        opener = build_opener(HTTPRangeHandler)
        install_opener(opener)

        if size < 0:
            rangeheader = {'Range': 'bytes=%s-' % (self.pos)}
        else:
            rangeheader = {'Range': 'bytes=%s-%s' % (self.pos, self.pos + size - 1)}

        req = Request(self.url, headers=rangeheader)
        res = urlopen(req)

        self.pos += size
        data = res.read()

        return data
Ejemplo n.º 44
0
    def http_request(self, url, method, data="", headers=None, timeout=None):
        if url[0:7].lower() != "http://":
            url = "http://%s" % url

        if hasattr(self, 'logger') and self.logger is not None:
            self.logger.debug("Sending http request. Url: %s, Data: %s, Headers: %s" % (url, str(data), str(headers)))

        req = Request(url, data, headers)
        req.get_method = lambda: method
        # The timeout parameter in urllib2.urlopen has strange behavior, and
        # seems to raise errors when set to a number. Using an opener works however.
        opener = build_opener()
        if timeout is None:
            response = opener.open(req)
        else:
            response = opener.open(req, timeout=timeout)

        return response
Ejemplo n.º 45
0
    def do_call(self, url, data=None, method=None,
                calltimeout=constants.SOCKET_TIMEOUT):
        """Send requests to server.

        Send HTTPS call, get response in JSON.
        Convert response into Python Object and return it.
        """
        if self.url:
            url = self.url + url
        if "xx/sessions" not in url:
            LOG.debug('Request URL: %(url)s\n'
                      'Call Method: %(method)s\n'
                      'Request Data: %(data)s\n',
                      {'url': url,
                       'method': method,
                       'data': data})
        opener = urlreq.build_opener(urlreq.HTTPCookieProcessor(self.cookie))
        urlreq.install_opener(opener)
        result = None

        try:
            req = urlreq.Request(url, data, self.headers)
            if method:
                req.get_method = lambda: method
            res_temp = urlreq.urlopen(req, timeout=calltimeout)
            res = res_temp.read().decode("utf-8")

            LOG.debug('Response Data: %(res)s.', {'res': res})

        except Exception as err:
            LOG.error(_LE('\nBad response from server: %(url)s.'
                          ' Error: %(err)s'), {'url': url, 'err': err})
            res = '{"error":{"code":%s,' \
                  '"description":"Connect server error"}}' \
                  % constants.ERROR_CONNECT_TO_SERVER

        try:
            result = jsonutils.loads(res)
        except Exception as err:
            err_msg = (_('JSON transfer error: %s.') % err)
            LOG.error(err_msg)
            raise exception.InvalidInput(reason=err_msg)

        return result
Ejemplo n.º 46
0
 def __init__(self, configuration, debug=True):
     super(XMLAPIConnector, self).__init__()
     self.storage_ip = configuration.emc_nas_server
     self.username = configuration.emc_nas_login
     self.password = configuration.emc_nas_password
     self.debug = debug
     self.auth_url = 'https://' + self.storage_ip + '/Login'
     self._url = 'https://{}/servlets/CelerraManagementServices'.format(
         self.storage_ip)
     context = enas_utils.create_ssl_context(configuration)
     if context:
         https_handler = url_request.HTTPSHandler(context=context)
     else:
         https_handler = url_request.HTTPSHandler()
     cookie_handler = url_request.HTTPCookieProcessor(
         http_cookiejar.CookieJar())
     self.url_opener = url_request.build_opener(https_handler,
                                                cookie_handler)
     self._do_setup()
Ejemplo n.º 47
0
    def prepare(self):
        """
        Read options for uploading, check that they're sane
        """
        super(BlazeMeterUploader, self).prepare()
        self.client.address = self.settings.get("address", self.client.address)
        self.client.data_address = self.settings.get("data-address", self.client.data_address)
        self.client.timeout = dehumanize_time(self.settings.get("timeout", self.client.timeout))
        self.send_interval = dehumanize_time(self.settings.get("send-interval", self.send_interval))
        self.browser_open = self.settings.get("browser-open", self.browser_open)
        token = self.settings.get("token", "")
        proxy_settings = self.engine.config.get("settings").get("proxy")
        if proxy_settings:
            if proxy_settings.get("address"):
                proxy_url = urlsplit(proxy_settings.get("address"))
                username = proxy_settings.get("username")
                pwd = proxy_settings.get("password")
                if username and pwd:
                    proxy_uri = "%s://%s:%s@%s" % (proxy_url.scheme, username, pwd, proxy_url.netloc)
                else:
                    proxy_uri = "%s://%s" % (proxy_url.scheme, proxy_url.netloc)
                proxy_handler = ProxyHandler({"https": proxy_uri, "http": proxy_uri})
                opener = build_opener(proxy_handler)
                install_opener(opener)

        if not token:
            self.log.warning("No BlazeMeter API key provided, will upload anonymously")
        self.client.token = token

        self.client.active_session_id = self.parameters.get("session-id", None)
        self.client.test_id = self.parameters.get("test-id", None)
        self.client.user_id = self.parameters.get("user-id", None)
        self.client.data_signature = self.parameters.get("signature", None)

        if not self.client.test_id:
            test_name = self.parameters.get("test", "Taurus Test")  # TODO: provide a way to put datetime into test name
            try:
                self.client.ping()  # to check connectivity and auth
                if token:
                    self.test_id = self.client.test_by_name(test_name, {"type": "external"})
            except HTTPError:
                self.log.error("Cannot reach online results storage, maybe the address/token is wrong")
                raise
Ejemplo n.º 48
0
def fixture_httpd_with_proxy_handler(docroot):
    """Yields a started MozHttpd server for the proxy test."""

    httpd = mozhttpd.MozHttpd(port=0, docroot=str(docroot))
    httpd.start(block=False)

    port = httpd.httpd.server_port
    proxy_support = ProxyHandler({
        "http":
        "http://127.0.0.1:{port:d}".format(port=port),
    })
    install_opener(build_opener(proxy_support))

    yield httpd

    httpd.stop()

    # Reset proxy opener in case it changed
    install_opener(None)
Ejemplo n.º 49
0
    def user_login(self, username, password):
        self.opener = request.build_opener(request.HTTPCookieProcessor())
        response = self.opener.open(CONF.dashboard.dashboard_url).read()

        # Grab the CSRF token and default region
        parser = HorizonHTMLParser()
        parser.feed(response)

        # Prepare login form request
        req = request.Request(CONF.dashboard.login_url)
        req.add_header('Content-type', 'application/x-www-form-urlencoded')
        req.add_header('Referer', CONF.dashboard.dashboard_url)
        params = {
            'username': username,
            'password': password,
            'region': parser.region,
            'csrfmiddlewaretoken': parser.csrf_token
        }
        self.opener.open(req, parse.urlencode(params))
Ejemplo n.º 50
0
    def try_del(self, server_port, querystr):
        self.resource_del_called = 0

        opener = build_opener(HTTPHandler)
        request = Request(
            self.get_url('/api/resource/1', server_port, querystr))
        request.get_method = lambda: 'DEL'
        f = opener.open(request)

        try:
            self.assertEqual(f.getcode(), 200)
        except AttributeError:
            pass  # python 2.4
        self.assertEqual(json.loads(f.read()), {
            'called': 1,
            'id': str(1),
            'query': querystr
        })
        self.assertEqual(self.resource_del_called, 1)
Ejemplo n.º 51
0
def relate_cdash_builds(spec_map, cdash_base_url, job_build_id, cdash_project,
                        cdashids_mirror_url):
    if not job_build_id:
        return

    dep_map = spec_map['deps']

    headers = {
        'Content-Type': 'application/json',
        'Accept': 'application/json',
    }

    cdash_api_url = '{0}/api/v1/relateBuilds.php'.format(cdash_base_url)

    for dep_pkg_name in dep_map:
        tty.debug('Fetching cdashid file for {0}'.format(dep_pkg_name))
        dep_spec = dep_map[dep_pkg_name]
        dep_build_id = read_cdashid_from_mirror(dep_spec, cdashids_mirror_url)

        payload = {
            "project": cdash_project,
            "buildid": job_build_id,
            "relatedid": dep_build_id,
            "relationship": "depends on"
        }

        enc_data = json.dumps(payload).encode('utf-8')

        opener = build_opener(HTTPHandler)

        request = Request(cdash_api_url, data=enc_data, headers=headers)

        response = opener.open(request)
        response_code = response.getcode()

        if response_code != 200 and response_code != 201:
            msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
                job_build_id, dep_build_id, response_code)
            raise SpackError(msg)

        response_text = response.read()
        tty.debug('Relate builds response: {0}'.format(response_text))
Ejemplo n.º 52
0
def wait_for_spark_workers(num_of_expected_workers, timeout):
    """
    This queries the spark master and checks for the expected number of workers
    """
    start_time = time.time()
    while True:
        opener = build_opener(HTTPHandler)
        request = Request("http://{0}:7080".format(CASSANDRA_IP))
        request.get_method = lambda: 'GET'
        connection = opener.open(request)
        match = re.search('Alive Workers:.*(\d+)</li>', connection.read().decode('utf-8'))
        num_workers = int(match.group(1))
        if num_workers == num_of_expected_workers:
            match = True
            break
        elif time.time() - start_time > timeout:
            match = True
            break
        time.sleep(1)
    return match
Ejemplo n.º 53
0
def get_file(url):
    opener = build_opener()
    opener.addheaders = [('User-agent', 'pgxnclient/%s' % __version__)]
    logger.debug('opening url: %s', url)
    try:
        return closing(opener.open(url))
    except HTTPError as e:
        if e.code == 404:
            raise ResourceNotFound(_("resource not found: '%s'") % e.url)
        elif e.code == 400:
            raise BadRequestError(_("bad request on '%s'") % e.url)
        elif e.code == 500:
            raise NetworkError(_("server error"))
        elif e.code == 503:
            raise NetworkError(_("service unavailable"))
        else:
            raise NetworkError(
                _("unexpected response %d for '%s'") % (e.code, e.url))
    except URLError as e:
        raise NetworkError(_("network error: %s") % e.reason)
Ejemplo n.º 54
0
    def __init__(self, base_url, endpoint, username, password):
        self.base_url = base_url.strip()
        # remove left '/' if any
        self.endpoint = endpoint.strip().lstrip('/')
        self.username = username.strip()
        self.password = password.strip()

        # set other things
        self.services_link = "%s/%s" % (base_url, endpoint)
        self.services_session_token = None
        self.http_user_agent = 'DrupalComputingAgent'
        self.http_content_type = 'application/json'

        # set cookie handler

        # first, create an opener than has cookie support.
        opener = urllib_request.build_opener(
            urllib_request.HTTPCookieProcessor())
        # then install the opener to request instead of using the default BaseHandler.
        urllib_request.install_opener(opener)
Ejemplo n.º 55
0
def make_image_object_from_url(image_url):
    # parse url
    parsed_url = urlparse(image_url)

    # handle absolute and relative urls, Assuming http for now.
    if not parsed_url.scheme:
        image_url = '%s%s' %  (get_setting('site', 'global', 'siteurl'), image_url)

    request = Request(image_url)
    request.add_header('User-Agent', settings.TENDENCI_USER_AGENT)
    opener = build_opener()

    # make image object
    try:
        socket.setdefaulttimeout(1.5)
        data = opener.open(request).read() # get data
        im = Image.open(BytesIO(data))
    except:
        im = None
    return im
Ejemplo n.º 56
0
def test_nonexistent_resources(httpd_no_urlhandlers):
    # GET: Return 404 for non-existent endpoint
    with pytest.raises(HTTPError) as excinfo:
        urlopen(httpd_url(httpd_no_urlhandlers, "/api/resource/"))
    assert excinfo.value.code == 404

    # POST: POST should also return 404
    with pytest.raises(HTTPError) as excinfo:
        urlopen(httpd_url(httpd_no_urlhandlers, "/api/resource/"),
                data=json.dumps({}))
    assert excinfo.value.code == 404

    # DEL: DEL should also return 404
    opener = build_opener(HTTPHandler)
    request = Request(httpd_url(httpd_no_urlhandlers, "/api/resource/"))
    request.get_method = lambda: "DEL"

    with pytest.raises(HTTPError) as excinfo:
        opener.open(request)
    assert excinfo.value.code == 404
Ejemplo n.º 57
0
def http_emitter(message, log, url):
    """Send payload
    """

    log.debug('http_emitter: attempting postback to ' + url)

    # Post back the data
    partial_payload = []
    for measurement in message:
        partial_payload.append(measurement)

    payload = json.dumps(partial_payload)
    if PY3:
        payload = payload.encode('utf-8')
    url = "%s/intake" % url
    headers = post_headers(payload)

    try:
        # Make sure no proxy is autodetected for this localhost connection
        proxy_handler = ProxyHandler({})
        # Should this be installed as the default opener and reused?
        opener = build_opener(proxy_handler)
        request = Request(url, payload, headers)
        response = None
        try:
            response = opener.open(request)
            log.debug('http_emitter: postback response: ' +
                      str(response.read()))
        except Exception as exc:
            log.error("""Forwarder at {0} is down or not responding...
                      Error is {1}
                      Please restart the monasca-agent.""".format(
                url, repr(exc)))
        finally:
            if response:
                response.close()
    except HTTPError as e:
        if e.code == 202:
            log.debug("http payload accepted")
        else:
            raise
Ejemplo n.º 58
0
def check_nltk():
    try:
        from nltk.tokenize import word_tokenize
        word_tokenize('It\'s.')

    except Exception:
        import nltk
        if not sslVerify:
            from ssl import _create_unverified_context
            from six.moves.urllib.request import install_opener, HTTPSHandler, build_opener
            # TODO: This needs still proxy support !
            ctx = _create_unverified_context()
            opener = build_opener(HTTPSHandler(context=ctx))
            install_opener(opener)

        if 'HTTP_PROXY' in os.environ:
            nltk.set_proxy(os.environ.get('HTTP_PROXY'))

        nltk.download('punkt')

    return
Ejemplo n.º 59
0
    def test_nonexistent_resources(self):
        # Create a server with a placeholder handler so we don't fall back
        # to serving local files
        httpd = mozhttpd.MozHttpd(port=0)
        httpd.start(block=False)
        server_port = httpd.httpd.server_port

        # GET: Return 404 for non-existent endpoint
        exception_thrown = False
        try:
            urlopen(self.get_url('/api/resource/', server_port, None))
        except HTTPError as e:
            self.assertEqual(e.code, 404)
            exception_thrown = True
        self.assertTrue(exception_thrown)

        # POST: POST should also return 404
        exception_thrown = False
        try:
            urlopen(
                self.get_url('/api/resource/', server_port, None),
                data=json.dumps({}),
            )
        except HTTPError as e:
            self.assertEqual(e.code, 404)
            exception_thrown = True
        self.assertTrue(exception_thrown)

        # DEL: DEL should also return 404
        exception_thrown = False
        try:
            opener = build_opener(HTTPHandler)
            request = Request(self.get_url('/api/resource/', server_port,
                                           None))
            request.get_method = lambda: 'DEL'
            opener.open(request)
        except HTTPError:
            self.assertEqual(e.code, 404)
            exception_thrown = True
        self.assertTrue(exception_thrown)
Ejemplo n.º 60
0
def main():
    import tempfile, sys

    validatorURL = "http://validator.w3.org/check"
    opener = build_opener(MultipartPostHandler)

    def validateFile(url):
        temp = tempfile.mkstemp(suffix=".html")
        write(temp[0], opener.open(url).read())
        params = {
            "ss": "0",  # show source
            "doctype": "Inline",
            "uploaded_file": open(temp[1], "rb")
        }
        print(opener.open(validatorURL, params).read())
        remove(temp[1])

    if len(sys.argv[1:]) > 0:
        for arg in sys.argv[1:]:
            validateFile(arg)
    else:
        validateFile("http://www.google.com")