def __init__(self, url, username=None, password=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
        super().__init__(url, username, password, timeout)

        # Setting ssl key verificationas false
        context = ssl._create_stdlib_context(check_hostname=False)
        unverified_handler = HTTPSHandler(context=context, check_hostname=False)
        install_opener(build_opener(unverified_handler))
Example #2
0
def set_proxy(proxy, user=None, password=''):
    """
    Set the HTTP proxy for Python to download through.

    If ``proxy`` is None then tries to set proxy from environment or system
    settings.

    :param proxy: The HTTP proxy server to use. For example:
        'http://proxy.example.com:3128/'
    :param user: The username to authenticate with. Use None to disable
        authentication.
    :param password: The password to authenticate with.
    """
    from nltk import compat

    if proxy is None:
        # Try and find the system proxy settings
        try:
            proxy = getproxies()['http']
        except KeyError:
            raise ValueError('Could not detect default proxy settings')

    # Set up the proxy handler
    proxy_handler = ProxyHandler({'https': proxy, 'http': proxy})
    opener = build_opener(proxy_handler)

    if user is not None:
        # Set up basic proxy authentication if provided
        password_manager = HTTPPasswordMgrWithDefaultRealm()
        password_manager.add_password(realm=None, uri=proxy, user=user, passwd=password)
        opener.add_handler(ProxyBasicAuthHandler(password_manager))
        opener.add_handler(ProxyDigestAuthHandler(password_manager))

    # Overide the existing url opener
    install_opener(opener)
 def debug():
     """
     Activate debugging on urllib2.
     """
     if six.PY2:
         handler = HTTPSHandler(debuglevel=1)
         opener = build_opener(handler)
         install_opener(opener)
     else:
         http_client.HTTPConnection.debuglevel = 1
Example #4
0
def build_request_with_data(url, data, api_key, method):
    """Build a request with the received method."""
    http_redirect_with_data_handler = HTTPRedirectWithDataHandler(method=method)
    opener = build_opener(http_redirect_with_data_handler)
    install_opener(opener)
    url = make_url(url, api_key=api_key, args=None)
    request = Request(url, headers={'Content-Type': 'application/json'}, data=json.dumps(data))
    request_method = request.get_method()
    if request_method != method:
        request.get_method = lambda: method
    return opener, request
Example #5
0
def quick_test():
	"""Quick test/example of CacheHandler"""
	from httplib2 import FileCache
	logging.basicConfig(level=logging.DEBUG)
	store = FileCache(".cache")
	opener = request.build_opener(CacheHandler(store))
	request.install_opener(opener)
	response = request.urlopen("http://www.google.com/")
	print(response.headers)
	print("Response:", response.read()[:100], '...\n')

	response.reload(store)
	print(response.headers)
	print("After reload:", response.read()[:100], '...\n')
 def open_with_basic_auth(url, auth):
     """
     opens an url protected with basic http authentication
     :param url: string - the url to open
     :param auth:
     :return:
     """
     user, passwd = auth
     p = six.moves.urllib.request.HTTPPasswordMgrWithDefaultRealm()
     p.add_password(None, url, user, passwd)
     auth_handler = six.moves.urllib.request.HTTPBasicAuthHandler(p)
     opener = request.build_opener(auth_handler)
     request.install_opener(opener)
     return opener.open(url)
Example #7
0
def url_get(base_url, password_mgr=None, pathspec=None, params=None):
    """Make contact with the uri provided and return any contents."""
    # Uses system proxy settings if they exist.
    proxy = urlrequest.ProxyHandler()
    if password_mgr is not None:
        auth = urlrequest.HTTPDigestAuthHandler(password_mgr)
        urlopener = urlrequest.build_opener(proxy, auth)
    else:
        urlopener = urlrequest.build_opener(proxy)
    urlrequest.install_opener(urlopener)
    full_url = build_url(base_url, pathspec=pathspec, params=params)
    response = urlopener.open(full_url)
    content = response.read()
    response.close()
    return content
Example #8
0
    def run(self, args, opts):
        try:
            import setuptools
        except ImportError:
            raise UsageError("setuptools not installed")

        request.install_opener(request.build_opener(HTTPRedirectHandler))

        if opts.list_targets:
            for name, target in _get_targets().items():
                print("%-20s %s" % (name, target['url']))
            return

        if opts.list_projects:
            target = _get_target(opts.list_projects)
            req = request.Request(_url(target, 'listprojects.json'))
            _add_auth_header(req, target)
            f = request.urlopen(req)
            projects = json.loads(f.read())['projects']
            print(os.linesep.join(projects))
            return

        tmpdir = None

        if opts.build_egg: # build egg only
            egg, tmpdir = _build_egg()
            _log("Writing egg to %s" % opts.build_egg)
            shutil.copyfile(egg, opts.build_egg)
        else: # buld egg and deploy
            target_name = _get_target_name(args)
            target = _get_target(target_name)
            project = _get_project(target, opts)
            version = _get_version(target, opts)
            if opts.egg:
                _log("Using egg: %s" % opts.egg)
                egg = opts.egg
            else:
                _log("Packing version %s" % version)
                egg, tmpdir = _build_egg()
            if not _upload_egg(target, egg, project, version):
                self.exitcode = 1

        if tmpdir:
            if opts.debug:
                _log("Output dir not removed: %s" % tmpdir)
            else:
                shutil.rmtree(tmpdir)
Example #9
0
    def _make_opener(self, realm, base_url, username, password):
        """uTorrent API need HTTP Basic Auth and cookie support for token verify."""

        auth_handler = HTTPBasicAuthHandler()
        auth_handler.add_password(realm=realm,
                                  uri=base_url,
                                  user=username,
                                  passwd=password)
        opener = build_opener(auth_handler)
        install_opener(opener)

        cookie_jar = CookieJar()
        cookie_handler = HTTPCookieProcessor(cookie_jar)

        handlers = [auth_handler, cookie_handler]
        opener = build_opener(*handlers)
        return opener
Example #10
0
    def do_call(self, url, data=None, method=None,
                calltimeout=constants.SOCKET_TIMEOUT):
        """Send requests to server.

        Send HTTPS call, get response in JSON.
        Convert response into Python Object and return it.
        """
        if self.url:
            url = self.url + url
        if "xx/sessions" not in url:
            LOG.debug('Request URL: %(url)s\n'
                      'Call Method: %(method)s\n'
                      'Request Data: %(data)s\n',
                      {'url': url,
                       'method': method,
                       'data': data})
        opener = urlreq.build_opener(urlreq.HTTPCookieProcessor(self.cookie))
        urlreq.install_opener(opener)
        result = None

        try:
            req = urlreq.Request(url, data, self.headers)
            if method:
                req.get_method = lambda: method
            res_temp = urlreq.urlopen(req, timeout=calltimeout)
            res = res_temp.read().decode("utf-8")

            LOG.debug('Response Data: %(res)s.', {'res': res})

        except Exception as err:
            LOG.error(_LE('\nBad response from server: %(url)s.'
                          ' Error: %(err)s'), {'url': url, 'err': err})
            res = '{"error":{"code":%s,' \
                  '"description":"Connect server error"}}' \
                  % constants.ERROR_CONNECT_TO_SERVER

        try:
            result = jsonutils.loads(res)
        except Exception as err:
            err_msg = (_('JSON transfer error: %s.') % err)
            LOG.error(err_msg)
            raise exception.InvalidInput(reason=err_msg)

        return result
Example #11
0
def _get_initial_token(url):
    """
    Create initial connection to get authentication token for future
    requests.

    Returns a string to be used in subsequent connections with the
    X-CSRFToken header or the empty string if we didn't find any token in
    the cookies.
    """
    cookiejar = CookieJar()
    opener = build_opener(HTTPCookieProcessor(cookiejar))
    install_opener(opener)
    opener.open(url)

    for cookie in cookiejar:
        if cookie.name == 'csrftoken':
            return cookie.value

    return ''
Example #12
0
    def prepare(self):
        """
        Read options for uploading, check that they're sane
        """
        super(BlazeMeterUploader, self).prepare()
        self.client.address = self.settings.get("address", self.client.address)
        self.client.data_address = self.settings.get("data-address", self.client.data_address)
        self.client.timeout = dehumanize_time(self.settings.get("timeout", self.client.timeout))
        self.send_interval = dehumanize_time(self.settings.get("send-interval", self.send_interval))
        self.browser_open = self.settings.get("browser-open", self.browser_open)
        token = self.settings.get("token", "")
        proxy_settings = self.engine.config.get("settings").get("proxy")
        if proxy_settings:
            if proxy_settings.get("address"):
                proxy_url = urlsplit(proxy_settings.get("address"))
                username = proxy_settings.get("username")
                pwd = proxy_settings.get("password")
                if username and pwd:
                    proxy_uri = "%s://%s:%s@%s" % (proxy_url.scheme, username, pwd, proxy_url.netloc)
                else:
                    proxy_uri = "%s://%s" % (proxy_url.scheme, proxy_url.netloc)
                proxy_handler = ProxyHandler({"https": proxy_uri, "http": proxy_uri})
                opener = build_opener(proxy_handler)
                install_opener(opener)

        if not token:
            self.log.warning("No BlazeMeter API key provided, will upload anonymously")
        self.client.token = token

        self.client.active_session_id = self.parameters.get("session-id", None)
        self.client.test_id = self.parameters.get("test-id", None)
        self.client.user_id = self.parameters.get("user-id", None)
        self.client.data_signature = self.parameters.get("signature", None)

        if not self.client.test_id:
            test_name = self.parameters.get("test", "Taurus Test")  # TODO: provide a way to put datetime into test name
            try:
                self.client.ping()  # to check connectivity and auth
                if token:
                    self.test_id = self.client.test_by_name(test_name, {"type": "external"})
            except HTTPError:
                self.log.error("Cannot reach online results storage, maybe the address/token is wrong")
                raise
Example #13
0
    def __init__(self, go_server, path=''):
        self.go_server = go_server

        self.logger = logging.getLogger(__name__+'.URL')

        self.contents = []

        full_url = go_server.url + path

        if self.go_server.user and self.go_server.password:
            self.logger.debug("logging in as %s"%self.go_server.user)
            passman = HTTPPasswordMgrWithDefaultRealm()
            passman.add_password(None, full_url,
                    self.go_server.user,
                    self.go_server.password)
            install_opener(build_opener(HTTPBasicAuthHandler(passman)))

        self.logger.debug("reading url %s"%full_url)

        file_handle = urlopen(full_url)
        if six.PY2:
            for line in file_handle:
                self.contents.append(line)
        else:
            for line in file_handle:
                self.contents.append(line.decode())
        self.logger.debug('line count: %d'%len(self.contents))

        file_handle.close()

        path_parts = urlsplit(full_url).path.split('/')
        last = path_parts[-1]

        # /path/to/something/
        if last is '':
            path_parts.pop()
            last = path_parts[-1]

        self.path_parts = path_parts
        self.file_name = last
        self.file_path = '/'.join(path_parts[0:-1])
Example #14
0
def _get_initial_token(url):
    """
    Create initial connection to get authentication token for future
    requests.

    Returns a string to be used in subsequent connections with the
    X-CSRFToken header or the empty string if we didn't find any token in
    the cookies.
    """
    logging.info("Getting initial CSRF token.")

    cookiejar = CookieJar()
    opener = build_opener(HTTPCookieProcessor(cookiejar))
    install_opener(opener)
    opener.open(url)

    for cookie in cookiejar:
        if cookie.name == "csrftoken":
            logging.info("Found CSRF token.")
            return cookie.value

    logging.warn("Did not find the CSRF token.")
    return ""
    def call(self, url, data=None, method=None):
        """Send requests to server.

        Send HTTPS call, get response in JSON.
        Convert response into Python Object and return it.
        """
        if "xx/sessions" not in url:
            LOG.debug('Request URL: %(url)s\n'
                      'Call Method: %(method)s\n'
                      'Request Data: %(data)s\n',
                      {'url': url,
                       'method': method,
                       'data': data})
        opener = urlreq.build_opener(urlreq.HTTPCookieProcessor(self.cookie))
        urlreq.install_opener(opener)

        try:
            req = urlreq.Request(url, data, self.headers)
            if method:
                req.get_method = lambda: method
            res_temp = urlreq.urlopen(req, timeout=constants.SOCKET_TIMEOUT)
            res = res_temp.read().decode("utf-8")

            LOG.debug('Response Data: %(res)s.', {'res': res})

        except Exception as err:
            LOG.error(_LE('Bad response from server: %s.') % err)
            raise err

        try:
            res_json = jsonutils.loads(res)
        except Exception as err:
            err_msg = (_('JSON transfer error: %s.') % err)
            LOG.error(err_msg)
            raise exception.InvalidShare(reason=err_msg)

        return res_json
Example #16
0
    def do_call(self, url, data=None, method=None):
        """Send requests to server.

        Send HTTPS call, get response in JSON.
        Convert response into Python Object and return it.
        """
        if "xx/sessions" not in url:
            LOG.debug(
                "Request URL: %(url)s\n" "Call Method: %(method)s\n" "Request Data: %(data)s\n",
                {"url": url, "method": method, "data": data},
            )
        opener = urlreq.build_opener(urlreq.HTTPCookieProcessor(self.cookie))
        urlreq.install_opener(opener)
        result = None

        try:
            req = urlreq.Request(url, data, self.headers)
            if method:
                req.get_method = lambda: method
            res_temp = urlreq.urlopen(req, timeout=constants.SOCKET_TIMEOUT)
            res = res_temp.read().decode("utf-8")

            LOG.debug("Response Data: %(res)s.", {"res": res})

        except Exception as err:
            LOG.error(_LE("\nBad response from server: %(url)s." " Error: %(err)s"), {"url": url, "err": err})
            res = '{"error":{"code":%s,' '"description":"Connect server error"}}' % constants.ERROR_CONNECT_TO_SERVER

        try:
            result = jsonutils.loads(res)
        except Exception as err:
            err_msg = _("JSON transfer error: %s.") % err
            LOG.error(err_msg)
            raise exception.InvalidInput(reason=err_msg)

        return result
Example #17
0
    def __init__(self, url, cookie_file=None, username=None, password=None,
                 api_token=None, agent=None, session=None, disable_proxy=False,
                 auth_callback=None, otp_token_callback=None,
                 verify_ssl=True, save_cookies=True):
        if not url.endswith('/'):
            url += '/'

        self.url = url + 'api/'

        self.save_cookies = save_cookies

        if self.save_cookies:
            self.cookie_jar, self.cookie_file = create_cookie_jar(
                cookie_file=cookie_file)

            try:
                self.cookie_jar.load(ignore_expires=True)
            except IOError:
                pass
        else:
            self.cookie_jar = CookieJar()
            self.cookie_file = None

        # Get the cookie domain from the url. If the domain
        # does not contain a '.' (e.g. 'localhost'), we assume
        # it is a local domain and suffix it (See RFC 2109).
        parsed_url = urlparse(url)
        self.domain = parsed_url[1].partition(':')[0]  # Remove Port.

        if self.domain.count('.') < 1:
            self.domain = '%s.local' % self.domain

        if session:
            cookie = Cookie(
                version=0,
                name=RB_COOKIE_NAME,
                value=session,
                port=None,
                port_specified=False,
                domain=self.domain,
                domain_specified=True,
                domain_initial_dot=True,
                path=parsed_url[2],
                path_specified=True,
                secure=False,
                expires=None,
                discard=False,
                comment=None,
                comment_url=None,
                rest={'HttpOnly': None})
            self.cookie_jar.set_cookie(cookie)

            if self.save_cookies:
                self.cookie_jar.save()

        if username:
            # If the username parameter is given, we have to clear the session
            # cookie manually or it will override the username:password
            # combination retrieved from the authentication callback.
            try:
                self.cookie_jar.clear(self.domain, parsed_url[2],
                                      RB_COOKIE_NAME)
            except KeyError:
                pass

        # Set up the HTTP libraries to support all of the features we need.
        password_mgr = ReviewBoardHTTPPasswordMgr(self.url,
                                                  username,
                                                  password,
                                                  api_token,
                                                  auth_callback,
                                                  otp_token_callback)
        self.preset_auth_handler = PresetHTTPAuthHandler(self.url,
                                                         password_mgr)

        handlers = []

        if not verify_ssl:
            context = ssl._create_unverified_context()
            handlers.append(HTTPSHandler(context=context))

        if disable_proxy:
            handlers.append(ProxyHandler({}))

        handlers += [
            HTTPCookieProcessor(self.cookie_jar),
            ReviewBoardHTTPBasicAuthHandler(password_mgr),
            HTTPDigestAuthHandler(password_mgr),
            self.preset_auth_handler,
            ReviewBoardHTTPErrorProcessor(),
        ]

        if agent:
            self.agent = agent
        else:
            self.agent = ('RBTools/' + get_package_version()).encode('utf-8')

        opener = build_opener(*handlers)
        opener.addheaders = [
            (b'User-agent', self.agent),
        ]
        install_opener(opener)

        self._cache = None
        self._urlopen = urlopen
Example #18
0
    def __init__(self,
                 url,
                 cookie_file=None,
                 username=None,
                 password=None,
                 api_token=None,
                 agent=None,
                 session=None,
                 disable_proxy=False,
                 auth_callback=None,
                 otp_token_callback=None,
                 verify_ssl=True,
                 save_cookies=True,
                 ext_auth_cookies=None):
        if not url.endswith('/'):
            url += '/'

        self.url = url + 'api/'

        self.save_cookies = save_cookies
        self.ext_auth_cookies = ext_auth_cookies

        if self.save_cookies:
            self.cookie_jar, self.cookie_file = create_cookie_jar(
                cookie_file=cookie_file)

            try:
                self.cookie_jar.load(ignore_expires=True)
            except IOError:
                pass
        else:
            self.cookie_jar = CookieJar()
            self.cookie_file = None

        if self.ext_auth_cookies:
            try:
                self.cookie_jar.load(ext_auth_cookies, ignore_expires=True)
            except IOError as e:
                logging.critical(
                    'There was an error while loading a '
                    'cookie file: %s', e)
                pass

        # Get the cookie domain from the url. If the domain
        # does not contain a '.' (e.g. 'localhost'), we assume
        # it is a local domain and suffix it (See RFC 2109).
        parsed_url = urlparse(url)
        self.domain = parsed_url[1].partition(':')[0]  # Remove Port.

        if self.domain.count('.') < 1:
            self.domain = '%s.local' % self.domain

        if session:
            cookie = Cookie(version=0,
                            name=RB_COOKIE_NAME,
                            value=session,
                            port=None,
                            port_specified=False,
                            domain=self.domain,
                            domain_specified=True,
                            domain_initial_dot=True,
                            path=parsed_url[2],
                            path_specified=True,
                            secure=False,
                            expires=None,
                            discard=False,
                            comment=None,
                            comment_url=None,
                            rest={'HttpOnly': None})
            self.cookie_jar.set_cookie(cookie)

            if self.save_cookies:
                self.cookie_jar.save()

        if username:
            # If the username parameter is given, we have to clear the session
            # cookie manually or it will override the username:password
            # combination retrieved from the authentication callback.
            try:
                self.cookie_jar.clear(self.domain, parsed_url[2],
                                      RB_COOKIE_NAME)
            except KeyError:
                pass

        # Set up the HTTP libraries to support all of the features we need.
        password_mgr = ReviewBoardHTTPPasswordMgr(self.url, username, password,
                                                  api_token, auth_callback,
                                                  otp_token_callback)
        self.preset_auth_handler = PresetHTTPAuthHandler(
            self.url, password_mgr)

        handlers = []

        if not verify_ssl:
            context = ssl._create_unverified_context()
            handlers.append(HTTPSHandler(context=context))

        if disable_proxy:
            handlers.append(ProxyHandler({}))

        handlers += [
            HTTPCookieProcessor(self.cookie_jar),
            ReviewBoardHTTPBasicAuthHandler(password_mgr),
            HTTPDigestAuthHandler(password_mgr),
            self.preset_auth_handler,
            ReviewBoardHTTPErrorProcessor(),
        ]

        if agent:
            self.agent = agent
        else:
            self.agent = ('RBTools/' + get_package_version()).encode('utf-8')

        opener = build_opener(*handlers)
        opener.addheaders = [
            (str('User-agent'), str(self.agent)),
        ]
        install_opener(opener)

        self._cache = None
        self._urlopen = urlopen
Example #19
0
 def setUp(self):
     """ Set up the mock objects to do our unit tests """
     my_opener = urllib2.build_opener(MyHTTPHandler)
     urllib2.install_opener(my_opener)
     self.p = dlipower.PowerSwitch(hostname='lpc.digital-loggers.com')
Example #20
0
def getPageHTML2(
        sURL, dSendHeaders = {}, tProxy = ('', -1 ), doFrames = True, bUnZip = True ):
    #
    """
    This one returns dReceiveHeaders and the page HTML.
    gets page HTML (content) and other stuff,
    but with synchronous (blocking) operation.
    If cannot get page, returns empty string.
    """
    #
    from String.Get     import getUnZipped
    from String.Test    import isGzipped
   #from Utils.Both2n3  import Request, urlopen, build_opener, install_opener, URLError
    from Web.Address    import UrlMustHaveSchemeAndPath
    from Web.HTML       import oFindFrameBeg, oFindiFrameEnd, oFindiFrameBeg, \
                            oFindFrameSpec
    from Web.Zip        import getCompressedOffChunks
    #
    sURL                = UrlMustHaveSchemeAndPath( sURL )
    #
    sHTML               = ''
    dReceiveHeaders     = {}
    #
    sProxy              = ''
    #
    sRealURL            = sURL
    #
    sReason = sErrorCode = sComment = ''
    #
    if tProxy != ('', -1 ) and isDotQuadPortTuple( tProxy ):
        #
        # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/456195
        #
        sProxy          = '%s:%s' % tProxy
        #
        opener  = build_opener(
                    ConnectHTTPHandler( proxy = sProxy ), ConnectHTTPSHandler( proxy = sProxy ) )
        #
        install_opener(opener)
        #
    #
    try:
        #
        oRequest        = Request( sURL, headers = dSendHeaders )
        #
        oPage           = urlopen( oRequest )
        #
    except URLError:
        #
        error, msg, traceback = exc_info()
        #
        if hasattr(msg, 'reason'):
            sComment    = 'We failed to reach the server.'
            sReason     = msg.reason
        elif hasattr(msg, 'code'):
            sComment    = 'The server couldn\'t fulfill the request.'
            sErrorCode  = str( msg.code )
        #
    except SocketError:
        #
        error, msg, traceback = exc_info()
        #
        sComment        = 'We hit a socket error on open.'
        sReason         = str( msg )
        #
    except Exception:
        #
        error, msg, traceback = exc_info()
        #
        sComment        = 'We hit some unknown error.'
        sReason         = str( msg )
        #
    else:
        #
        try:
            sHTML           = oPage.read()
            dReceiveHeaders = oPage.info()
            sRealURL        = oPage.geturl()
        except (ValueError, SocketError):
            error, msg, traceback = exc_info()
            sComment        = 'We hit a socket error on read.'
            sReason         = str( msg )
        #
    #
    dResult = dict(
                sReason     = sReason,
                sErrorCode  = sErrorCode,
                sComment    = sComment,
                sRealURL    = sRealURL )
    #
    if bUnZip and (
                dReceiveHeaders.get( 'content-encoding' ) == 'gzip' or
                isGzipped( sHTML ) ):
        #
        bUnZipped, sHTML = getUnZipped( sHTML )
        #
        if not bUnZipped and \
                dReceiveHeaders.get( 'transfer-encoding' ) == 'chunked':
            #
            sHTML       = getCompressedOffChunks( sHTML )
            #
            bUnZipped, sHTML = getUnZipped( sHTML )
            #
        #
    #
    if sHTML and doFrames:
        #
        liFrames        = oFindiFrameBeg.findall( sHTML )
        lFrames         = oFindFrameBeg.findall(  sHTML )
        #
        sCookie         = dReceiveHeaders.get( 'cookie' )
        #
        if liFrames:
            #
            liFrameSpecs = [ lFrame[ 1 ] for lFrame in
                                [ oFindFrameSpec.split( sFrame ) for sFrame in liFrames ]
                                if len( lFrame ) > 1 ]
            #
            sHTML = _getFrameText(
                        sHTML, liFrameSpecs, sURL, dSendHeaders, sCookie, tProxy,
                        oFindiFrameBeg, oFindiFrameEnd )
            #
        #
        if lFrames:
            #
            lParts      = oFindFrameBeg.split( sHTML )
            #
            if lParts: del lParts[ 0 ]
            #
            lFrameSpecs     = [ lFrame[ 1 ] for lFrame in
                                [ oFindFrameSpec.split( sPart ) for sPart in lParts ]
                                if len( lFrame ) > 1 ]
            #
            sHTML = _getFrameText(
                        sHTML, lFrameSpecs, sURL, dSendHeaders, sCookie, tProxy, oFindFrameBeg )
            #
    #
    return sHTML, dReceiveHeaders, dResult
Example #21
0
def download_scripts(proxies=None, install_dir=None):
    import visdom
    print("Downloading scripts. It might take a while.")

    # location in which to download stuff:
    if install_dir is None:
        install_dir = os.path.dirname(visdom.__file__)

    # all files that need to be downloaded:
    b = 'https://unpkg.com/'
    bb = '%[email protected]/dist/' % b
    ext_files = {
        # - js
        '%[email protected]/dist/jquery.min.js' % b: 'jquery.min.js',
        '%[email protected]/dist/js/bootstrap.min.js' % b: 'bootstrap.min.js',
        '%[email protected]/umd/react.production.min.js' % b: 'react-react.min.js',
        '%[email protected]/umd/react-dom.production.min.js' % b: 'react-dom.min.js',  # noqa
        '%[email protected]/dist/react-modal.min.js' % b: 'react-modal.min.js',  # noqa
        'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_SVG':  # noqa
            'mathjax-MathJax.js',
        # here is another url in case the cdn breaks down again.
        # https://raw.githubusercontent.com/plotly/plotly.js/master/dist/plotly.min.js
        'https://cdn.plot.ly/plotly-latest.min.js': 'plotly-plotly.min.js',
        # Stanford Javascript Crypto Library for Password Hashing
        '%[email protected]/sjcl.js' % b: 'sjcl.js',

        # - css
        '%[email protected]/css/styles.css' % b: 'react-resizable-styles.css',  # noqa
        '%[email protected]/css/styles.css' % b: 'react-grid-layout-styles.css',  # noqa
        '%scss/bootstrap.min.css' % bb: 'bootstrap.min.css',

        # - fonts
        '%[email protected]' % b: 'classnames',
        '%[email protected]' % b: 'layout_bin_packer',
        '%sfonts/glyphicons-halflings-regular.eot' % bb:
            'glyphicons-halflings-regular.eot',
        '%sfonts/glyphicons-halflings-regular.woff2' % bb:
            'glyphicons-halflings-regular.woff2',
        '%sfonts/glyphicons-halflings-regular.woff' % bb:
            'glyphicons-halflings-regular.woff',
        '%sfonts/glyphicons-halflings-regular.ttf' % bb:
            'glyphicons-halflings-regular.ttf',
        '%sfonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular' % bb:  # noqa
            'glyphicons-halflings-regular.svg#glyphicons_halflingsregular',
    }

    # make sure all relevant folders exist:
    dir_list = [
        '%s' % install_dir,
        '%s/static' % install_dir,
        '%s/static/js' % install_dir,
        '%s/static/css' % install_dir,
        '%s/static/fonts' % install_dir,
    ]
    for directory in dir_list:
        if not os.path.exists(directory):
            os.makedirs(directory)

    # set up proxy handler:
    from six.moves.urllib import request
    from six.moves.urllib.error import HTTPError, URLError
    handler = request.ProxyHandler(proxies) if proxies is not None \
        else request.BaseHandler()
    opener = request.build_opener(handler)
    request.install_opener(opener)

    built_path = os.path.join(here, 'static/version.built')
    is_built = False
    if os.path.exists(built_path):
        with open(built_path, 'r') as build_file:
            build_version = build_file.read().strip()
        if build_version == visdom.__version__:
            is_built = True
        else:
            os.remove(built_path)

    # download files one-by-one:
    for (key, val) in ext_files.items():

        # set subdirectory:
        sub_dir = 'fonts'
        if '.js' in key:
            sub_dir = 'js'
        if '.css' in key:
            sub_dir = 'css'

        # download file:
        filename = '%s/static/%s/%s' % (install_dir, sub_dir, val)
        if not os.path.exists(filename) or not is_built:
            req = request.Request(key,
                                  headers={'User-Agent': 'Chrome/30.0.0.0'})
            try:
                data = opener.open(req).read()
                with open(filename, 'wb') as fwrite:
                    fwrite.write(data)
            except HTTPError as exc:
                logging.error('Error {} while downloading {}'.format(
                    exc.code, key))
            except URLError as exc:
                logging.error('Error {} while downloading {}'.format(
                    exc.reason, key))

    if not is_built:
        with open(built_path, 'w+') as build_file:
            build_file.write(visdom.__version__)
Example #22
0
    def _post(self, url,
              param_dict={},
              files={},
              securityHandler=None,
              additional_headers={},
              custom_handlers=[],
              proxy_url=None,
              proxy_port=80,
              compress=True,
              out_folder=None,
              file_name=None):
        """
        Performs a POST operation on a URL.

        Inputs:
           param_dict - key/value pair of values
              ex: {"foo": "bar"}
           files - key/value pair of file objects where the key is
              the input name and the value is the file path
              ex: {"file": r"c:\temp\myfile.zip"}
           securityHandler - object that handles the token or other site
              security.  It must inherit from the base security class.
              ex: arcrest.AGOLSecurityHandler("SomeUsername", "SOMEPASSWORD")
           additional_headers - are additional key/value headers that a user
              wants to pass during the operation.
              ex: {"accept-encoding": "gzip"}
           custom_handlers - this is additional web operation handlers as a
              list of objects.
              Ex: [CustomAuthHandler]
           proxy_url - url of the proxy
           proxy_port - default 80, port number of the proxy
           compress - default true, determines if gzip should be used of not for
              the web operation.
           out_folder - if the URL requested returns a file, this will be the
              disk save location
           file_name - if the operation returns a file and the file name is not
             given in the header or a user wishes to override the return saved
             file name, provide value here.
        Output:
           returns dictionary or string depending on web operation.
        """
        self._last_method = "POST"
        headers = {}
        opener = None
        return_value = None
        handlers = [RedirectHandler()]
        param_dict, handler, cj = self._processHandler(securityHandler, param_dict)
        if handler is not None:
            handlers.append(handler)
        if cj is not None:
            handlers.append(request.HTTPCookieProcessor(cj))
        if isinstance(custom_handlers, list) and \
           len(custom_handlers) > 0:
            for h in custom_handlers:
                handlers.append(h)
        if compress:
            headers['Accept-Encoding'] = 'gzip'
        else:
            headers['Accept-Encoding'] = ''
        for k,v in additional_headers.items():
            headers[k] = v
            del k,v
        opener = request.build_opener(*handlers)
        request.install_opener(opener)
        opener.addheaders = [(k,v) for k,v in headers.items()]
        if len(files) == 0:
            data = urlencode(param_dict)
            if self.PY3:
                data = data.encode('ascii')
            opener.data = data
            resp = opener.open(url.encode('ascii'), data=data)
        else:
            mpf = MultiPartForm(param_dict=param_dict,
                                files=files)
            req = request.Request(url.encode('ascii'))
            body = mpf.make_result
            req.add_header('User-agent', self.useragent)
            req.add_header('Content-type', mpf.get_content_type())
            req.add_header('Content-length', len(body))
            req.data = body
            resp = request.urlopen(req)
            del body, mpf
        self._last_code = resp.getcode()
        self._last_url = resp.geturl()
        return_value = self._process_response(resp=resp,
                                              out_folder=out_folder)
        if isinstance(return_value, dict):
            if "error" in return_value and \
               'message' in return_value['error']:
                if return_value['error']['message'].lower() == 'request not made over ssl':
                    if url.startswith('http://'):
                        url = url.replace('http://', 'https://')
                        return self._post(url,
                                          param_dict,
                                          files,
                                          securityHandler,
                                          additional_headers,
                                          custom_handlers,
                                          proxy_url,
                                          proxy_port,
                                          compress,
                                          out_folder,
                                          file_name)
            return return_value
        else:
            return return_value
        return return_value
Example #23
0
# -*- coding: utf-8 -*-
"""Exposes urllib imports with additional request handlers."""
from __future__ import (absolute_import, division, print_function,
                        unicode_literals, with_statement)

# pylint: disable=import-error
from six.moves.urllib import error, parse, request  # noqa: F401

from watchmaker.utils.urllib.request_handlers import HAS_BOTO3, S3Handler

if HAS_BOTO3:
    request.install_opener(request.build_opener(S3Handler))
Example #24
0
from six.moves.urllib import request
from docutils import nodes
from docutils.utils import relative_path

import sphinx
from sphinx.locale import _
from sphinx.builders.html import INVENTORY_FILENAME


handlers = [request.ProxyHandler(), request.HTTPRedirectHandler(), request.HTTPHandler()]
try:
    handlers.append(request.HTTPSHandler)
except AttributeError:
    pass

request.install_opener(request.build_opener(*handlers))

UTF8StreamReader = codecs.lookup("utf-8")[2]


def read_inventory_v1(f, uri, join):
    f = UTF8StreamReader(f)
    invdata = {}
    line = next(f)
    projname = line.rstrip()[11:]
    line = next(f)
    version = line.rstrip()[11:]
    for line in f:
        name, type, location = line.rstrip().split(None, 2)
        location = join(uri, location)
        # version 1 did not add anchors to the location
Example #25
0
    def conn(self, req):
        '''URL connection wrappercatching common exceptions and retrying where necessary
		param: connreq can be either a url string or a request object
		'''
        sr = self.__sec, 'Connection Manager'
        self.setRequest(req)
        req_str = self.getRequestStr()

        #if self.auth is set it should have been added to the request header... might be legacy where that hasn't happened
        if self.auth:
            self.addRequestHeader("Authorization", self.auth)

        request.install_opener(self.opener(purl=self.openerstrs_ntlm))

        retry = INIT_MAX_RETRY_ATTEMPTS
        while retry > 0:
            retry -= 1
            try:
                handle = request.urlopen(self.getRequest())  #,data)
                if handle:
                    if handle.geturl() != req_str:
                        msg = 'Redirect Warning'
                        #cannot easily mask redirected url so logging original
                        LM.info(
                            msg, LM._LogExtra(*sr,
                                              exc=None,
                                              url=req_str,
                                              rty=0))
                    return handle
                #self.setResponse(handle)
                #break
            except HTTPError as he:
                last_exc = he
                if re.search('429', str(he)):
                    msg = 'RateLimit Error {0}. Sleep awaiting 429 expiry. Attempt {1}'.format(
                        he, MAX_RETRY_ATTEMPTS - retry)
                    LM.error(msg,
                             LM._LogExtra(*sr, exc=he, url=req_str, rty=retry))
                    LDSAPI.sleepIncr(retry)
                    continue
                elif retry:
                    # I'm leaving this code here to test with because LDS was
                    # somehow throwing exceptions as well as redirecting
                    #
                    #if re.search('301',str(he)):
                    #	msg = 'Redirect Error {0}'.format(he)
                    #	#if we have a valid response and its a 301 see if it contains a redirect-to
                    #	if handle and handle.geturl():
                    #		retry = 1
                    #		self.setRequest(handle.geturl()) #TODO reauth?
                    #		msg += '. Attempting alternate connect'
                    #	else:
                    #		retry = 0
                    #	LM.error(msg,LM._LogExtra(*sr,exc=he,url=self.getRequestStr(),rty=0))
                    #	continue
                    if re.search('401|500', str(he)):
                        msg = 'HTTP Error {0} Returns {1}. Attempt {2}'.format(
                            req_str, he, MAX_RETRY_ATTEMPTS - retry)
                        LM.error(
                            msg,
                            LM._LogExtra(*sr, exc=he, url=req_str, rty=retry))
                        continue
                    elif re.search('403', str(he)):
                        msg = 'HTTP Error {0} Returns {1}. Attempt {2} (consider proxy)'.format(
                            req_str, he, MAX_RETRY_ATTEMPTS - retry)
                        LM.error(
                            msg,
                            LM._LogExtra(*sr, exc=he, url=req_str, rty=retry))
                        continue
                    elif re.search('502', str(he)):
                        msg = 'Proxy Error {0} Returns {1}. Attempt {2}'.format(
                            req_str, he, MAX_RETRY_ATTEMPTS - retry)
                        LM.error(
                            msg,
                            LM._LogExtra(*sr, exc=he, url=req_str, rty=retry))
                        continue
                    elif re.search('410', str(he)):
                        msg = 'Layer removed {0} Returns {1}. Attempt {2}'.format(
                            req_str, he, MAX_RETRY_ATTEMPTS - retry)
                        LM.error(
                            msg,
                            LM._LogExtra(*sr, exc=he, url=req_str, rty=retry))
                        retry = 0
                        continue
                    else:
                        msg = 'Error with request {0} returns {1}'.format(
                            req_str, he)
                        LM.error(
                            msg,
                            LM._LogExtra(*sr, exc=he, url=req_str, rty=retry))
                        continue
                else:
                    #Retries have been exhausted, raise the active httpexception
                    raise HTTPError(he.msg + msg)
            except HttpResponseError as rd:
                LM.warning('Disconnect. {}'.format(rd),
                           LM._LogExtra(*sr, exc=rd, url=req_str, rty=retry))
                LDSAPI.sleepIncr(retry)
                continue
            except URLError as ue:
                LM.warning('URL error on connect {}'.format(ue),
                           LM._LogExtra(*sr, exc=ue, url=req_str, rty=retry))
                if re.search('Connection refused|violation of protocol',
                             str(ue)):
                    LDSAPI.sleepIncr(retry)
                continue
                #raise ue
            except ConnectionError as ce:
                LM.warning('Error on connection. {}'.format(ce),
                           LM._LogExtra(*sr, exc=ce, url=req_str, rty=retry))
                LDSAPI.sleepIncr(retry)
                continue
            except ValueError as ve:
                LM.error('Value error on connect {}'.format(ve),
                         LM._LogExtra(*sr, exc=ve, url=req_str, rty=retry))
                raise ve
            except Exception as xe:
                LM.error('Other error on connect {}'.format(xe),
                         LM._LogExtra(*sr, exc=xe, url=req_str, rty=retry))
                raise xe
        else:
            raise last_exc
Example #26
0
def download_scripts(proxies=None, install_dir=None):

    print("Downloading scripts. It might take a while.")

    # location in which to download stuff:
    if install_dir is None:
        import visdom
        install_dir = os.path.dirname(visdom.__file__)

    # all files that need to be downloaded:
    b = 'https://unpkg.com/'
    bb = '%[email protected]/dist/' % b
    ext_files = {
        '%[email protected]/dist/jquery.min.js' % b: 'jquery.min.js',
        '%[email protected]/dist/js/bootstrap.min.js' % b: 'bootstrap.min.js',
        '%[email protected]/css/styles.css' % b: 'react-resizable-styles.css',
        '%[email protected]/css/styles.css' % b: 'react-grid-layout-styles.css',
        '%[email protected]/dist/react.min.js' % b: 'react-react.min.js',
        '%[email protected]/dist/react-dom.min.js' % b: 'react-dom.min.js',
        '%[email protected]' % b: 'classnames',
        '%[email protected]' % b: 'layout_bin_packer',
        'https://raw.githubusercontent.com/STRML/react-grid-layout/0.14.0/dist/' +
        'react-grid-layout.min.js': 'react-grid-layout.min.js',
        'https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_SVG':
            'mathjax-MathJax.js',
        # here is another url in case the cdn breaks down again.
        # https://raw.githubusercontent.com/plotly/plotly.js/master/dist/plotly.min.js
        'https://cdn.plot.ly/plotly-latest.min.js':
            'plotly-plotly.min.js',
        '%scss/bootstrap.min.css' % bb: 'bootstrap.min.css',
        '%sfonts/glyphicons-halflings-regular.eot' % bb:
            'glyphicons-halflings-regular.eot',
        '%sfonts/glyphicons-halflings-regular.woff2' % bb:
            'glyphicons-halflings-regular.woff2',
        '%sfonts/glyphicons-halflings-regular.woff' % bb:
            'glyphicons-halflings-regular.woff',
        '%sfonts/glyphicons-halflings-regular.ttf' % bb:
            'glyphicons-halflings-regular.ttf',
        '%sfonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular' % bb:
            'glyphicons-halflings-regular.svg#glyphicons_halflingsregular',
    }

    # make sure all relevant folders exist:
    dir_list = [
        '%s' % install_dir,
        '%s/static' % install_dir,
        '%s/static/js' % install_dir,
        '%s/static/css' % install_dir,
        '%s/static/fonts' % install_dir,
    ]
    for directory in dir_list:
        if not os.path.exists(directory):
            os.makedirs(directory)

    # set up proxy handler:
    from six.moves.urllib import request
    from six.moves.urllib.error import HTTPError, URLError
    handler = request.ProxyHandler(proxies) if proxies is not None \
         else request.BaseHandler()
    opener = request.build_opener(handler)
    request.install_opener(opener)

    # download files one-by-one:
    for (key, val) in ext_files.items():

        # set subdirectory:
        sub_dir = 'fonts'
        if '.js' in key:
            sub_dir = 'js'
        if '.css' in key:
            sub_dir = 'css'

        # download file:
        filename = '%s/static/%s/%s' % (install_dir, sub_dir, val)
        if not os.path.exists(filename):
            req = request.Request(key, headers={'User-Agent': 'Chrome/30.0.0.0'})
            try:
                data = opener.open(req).read()
                with open(filename, 'wb') as fwrite:
                    fwrite.write(data)
            except (HTTPError, URLError) as exc:
                logging.error('Error {} while downloading {}'.format(exc.code, key))
Example #27
0
def download_scripts(proxies=None, install_dir=None):

    print("Downloading scripts. It might take a while.")

    # location in which to download stuff:
    if install_dir is None:
        import visdom
        install_dir = os.path.dirname(visdom.__file__)

    # all files that need to be downloaded:
    b = 'https://unpkg.com/'
    bb = '%[email protected]/dist/' % b
    ext_files = {
        '%[email protected]/dist/jquery.min.js' % b:
        'jquery.min.js',
        '%[email protected]/dist/js/bootstrap.min.js' % b:
        'bootstrap.min.js',
        '%[email protected]/css/styles.css' % b:
        'react-resizable-styles.css',
        '%[email protected]/css/styles.css' % b:
        'react-grid-layout-styles.css',
        '%[email protected]/dist/react-modal.min.js' % b:
        'react-modal.min.js',
        '%[email protected]/dist/react.min.js' % b:
        'react-react.min.js',
        '%[email protected]/dist/react-dom.min.js' % b:
        'react-dom.min.js',
        '%[email protected]' % b:
        'classnames',
        '%[email protected]' % b:
        'layout_bin_packer',
        'https://raw.githubusercontent.com/STRML/react-grid-layout/0.14.0/dist/' + 'react-grid-layout.min.js':
        'react-grid-layout.min.js',
        'https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_SVG':
        'mathjax-MathJax.js',
        # here is another url in case the cdn breaks down again.
        # https://raw.githubusercontent.com/plotly/plotly.js/master/dist/plotly.min.js
        'https://cdn.plot.ly/plotly-latest.min.js':
        'plotly-plotly.min.js',
        '%scss/bootstrap.min.css' % bb:
        'bootstrap.min.css',
        '%sfonts/glyphicons-halflings-regular.eot' % bb:
        'glyphicons-halflings-regular.eot',
        '%sfonts/glyphicons-halflings-regular.woff2' % bb:
        'glyphicons-halflings-regular.woff2',
        '%sfonts/glyphicons-halflings-regular.woff' % bb:
        'glyphicons-halflings-regular.woff',
        '%sfonts/glyphicons-halflings-regular.ttf' % bb:
        'glyphicons-halflings-regular.ttf',
        '%sfonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular' % bb:
        'glyphicons-halflings-regular.svg#glyphicons_halflingsregular',
    }

    # make sure all relevant folders exist:
    dir_list = [
        '%s' % install_dir,
        '%s/static' % install_dir,
        '%s/static/js' % install_dir,
        '%s/static/css' % install_dir,
        '%s/static/fonts' % install_dir,
    ]
    for directory in dir_list:
        if not os.path.exists(directory):
            os.makedirs(directory)

    # set up proxy handler:
    from six.moves.urllib import request
    from six.moves.urllib.error import HTTPError, URLError
    handler = request.ProxyHandler(proxies) if proxies is not None \
         else request.BaseHandler()
    opener = request.build_opener(handler)
    request.install_opener(opener)

    # download files one-by-one:
    for (key, val) in ext_files.items():

        # set subdirectory:
        sub_dir = 'fonts'
        if '.js' in key:
            sub_dir = 'js'
        if '.css' in key:
            sub_dir = 'css'

        # download file:
        filename = '%s/static/%s/%s' % (install_dir, sub_dir, val)
        if not os.path.exists(filename):
            req = request.Request(key,
                                  headers={'User-Agent': 'Chrome/30.0.0.0'})
            try:
                data = opener.open(req).read()
                with open(filename, 'wb') as fwrite:
                    fwrite.write(data)
            except HTTPError as exc:
                logging.error('Error {} while downloading {}'.format(
                    exc.code, key))
            except URLError as exc:
                logging.error('Error {} while downloading {}'.format(
                    exc.reason, key))
Example #28
0
 def setUp(self):
     self.http_mock = Mock(side_effect=[])
     opener = OpenerDirector()
     opener.add_handler(TestHandler(self.http_mock))
     install_opener(opener)
def install(buildout=None, pwd_path=None):
    pwdsf = StringIO()
    combined_creds = []
    github_creds = None
    creds = []
    local_pwd_path = ''
    github_repos = None
    if buildout is not None:
        local_pwd_path = os.path.join(
            buildout['buildout']['directory'],
            '.httpauth')
        if 'github-repos' in buildout['buildout']:
            github_repos = buildout['buildout']['github-repos'].split('\n')
    system_pwd_path = os.path.join(
        os.path.expanduser('~'),
        '.buildout',
        '.httpauth')

    def combine_cred_file(file_path, combined_creds):
        if file_path is None or not os.path.exists(file_path):
            return
        cred_file = open(file_path)
        combined_creds += [l.strip()
                            for l in cred_file.readlines() if l.strip()]
        cred_file.close()
    # combine all the possible .httpauth files together
    combine_cred_file(pwd_path, combined_creds)
    combine_cred_file(local_pwd_path, combined_creds)
    combine_cred_file(system_pwd_path, combined_creds)
    pwdsf_len = pwdsf.write(u"\n".join(combined_creds))
    pwdsf.seek(0)
    if not pwdsf_len:
        pwdsf = None
        log.warn('Could not load authentication information')
    try:
        auth_handler = CredHandler()
        github_creds = get_github_credentials()
        new_handlers = []
        if github_creds:
            new_handlers.append(GithubHandler(github_creds, github_repos))
        if pwdsf:
            for l, row in enumerate(csv.reader(pwdsf)):
                if len(row) == 3:
                    realm, uris, user = (el.strip() for el in row)
                    password = prompt_passwd(realm, user)
                elif len(row) == 4:
                    realm, uris, user, password = (el.strip() for el in row)
                else:
                    raise RuntimeError(
                        "Authentication file cannot be parsed %s:%s" % (
                            pwd_path, l + 1))
                creds.append((realm, uris, user, password))
                log.debug('Added credentials %r, %r' % (realm, uris))
                auth_handler.add_password(realm, uris, user, password)
        if creds:
            new_handlers.append(auth_handler)
        if creds or github_creds:
            download.url_opener = URLOpener(creds, github_creds, github_repos)
        if new_handlers:
            if url_opener is not None:
                handlers = url_opener.handlers[:]
                handlers[:0] = new_handlers
            else:
                handlers = new_handlers
            urllib_request.build_opener = lambda *a: original_build_opener(*handlers)
            urllib_request.install_opener(urllib_request.build_opener())
    finally:
        if pwdsf:
            pwdsf.close()
Example #30
0
def _mule_fixup():
    from six.moves.urllib.request import install_opener
    install_opener(None)
Example #31
0
 def tearDown(self):
     # Uninstall custom opener
     install_opener(None)
Example #32
0
    def setup(self):
        # Can't import this at the top because it's circular.
        # XXX Someone smarter than me, please figure out the right
        # XXX way to do this.
        from spambayes.FileCorpus import ExpiryFileCorpus, FileMessageFactory

        username = options["globals", "proxy_username"]
        password = options["globals", "proxy_password"]
        server = options["globals", "proxy_server"]
        if server.find(":") != -1:
            server, port = server.split(':', 1)
        else:
            port = 8080
        if server:
            # Build a new opener that uses a proxy requiring authorization
            proxy_support = urllib2.ProxyHandler({"http" : \
                                                  "http://%s:%s@%s:%d" % \
                                                  (username, password,
                                                   server, port)})
            opener = urllib2.build_opener(proxy_support,
                                          urllib2.HTTPHandler)
        else:
            # Build a new opener without any proxy information.
            opener = urllib2.build_opener(urllib2.HTTPHandler)

        # Install it
        urllib2.install_opener(opener)

        # Setup the cache for retrieved urls
        age = options["URLRetriever", "x-cache_expiry_days"]*24*60*60
        dir = options["URLRetriever", "x-cache_directory"]
        if not os.path.exists(dir):
            # Create the directory.
            if options["globals", "verbose"]:
                print("Creating URL cache directory", file=sys.stderr)
            os.makedirs(dir)

        self.urlCorpus = ExpiryFileCorpus(age, FileMessageFactory(),
                                          dir, cacheSize=20)
        # Kill any old information in the cache
        self.urlCorpus.removeExpiredMessages()

        # Setup caches for unretrievable urls
        self.bad_url_cache_name = os.path.join(dir, "bad_urls.pck")
        self.http_error_cache_name = os.path.join(dir, "http_error_urls.pck")
        if os.path.exists(self.bad_url_cache_name):
            try:
                self.bad_urls = pickle_read(self.bad_url_cache_name)
            except (IOError, ValueError):
                # Something went wrong loading it (bad pickle,
                # probably).  Start afresh.
                if options["globals", "verbose"]:
                    print("Bad URL pickle, using new.", file=sys.stderr)
                self.bad_urls = {"url:non_resolving": (),
                                 "url:non_html": (),
                                 "url:unknown_error": ()}
        else:
            if options["globals", "verbose"]:
                print("URL caches don't exist: creating")
            self.bad_urls = {"url:non_resolving": (),
                        "url:non_html": (),
                        "url:unknown_error": ()}
        if os.path.exists(self.http_error_cache_name):
            try:
                self.http_error_urls = pickle_read(self.http_error_cache_name)
            except (IOError, ValueError):
                # Something went wrong loading it (bad pickle,
                # probably).  Start afresh.
                if options["globals", "verbose"]:
                    print("Bad HHTP error pickle, using new.", file=sys.stderr)
                self.http_error_urls = {}
        else:
            self.http_error_urls = {}
Example #33
0
def _mule_fixup():
    from six.moves.urllib.request import install_opener
    install_opener(None)
Example #34
0
def download_scripts(proxies=None, install_dir=None):
    import visdom
    print("Downloading scripts. It might take a while.")

    # location in which to download stuff:
    if install_dir is None:
        install_dir = os.path.dirname(visdom.__file__)

    # all files that need to be downloaded:
    b = 'https://unpkg.com/'
    bb = '%[email protected]/dist/' % b
    ext_files = {
        # - js
        '%[email protected]/dist/jquery.min.js' % b: 'jquery.min.js',
        '%[email protected]/dist/js/bootstrap.min.js' % b: 'bootstrap.min.js',
        '%[email protected]/umd/react.production.min.js' % b: 'react-react.min.js',
        '%[email protected]/umd/react-dom.production.min.js' % b: 'react-dom.min.js',  # noqa
        '%[email protected]/dist/react-modal.min.js' % b: 'react-modal.min.js',  # noqa
        'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_SVG':  # noqa
            'mathjax-MathJax.js',
        # here is another url in case the cdn breaks down again.
        # https://raw.githubusercontent.com/plotly/plotly.js/master/dist/plotly.min.js
        'https://cdn.plot.ly/plotly-latest.min.js': 'plotly-plotly.min.js',
        # Stanford Javascript Crypto Library for Password Hashing
        '%[email protected]/sjcl.js' % b: 'sjcl.js',

        # - css
        '%[email protected]/css/styles.css' % b: 'react-resizable-styles.css',  # noqa
        '%[email protected]/css/styles.css' % b: 'react-grid-layout-styles.css',  # noqa
        '%scss/bootstrap.min.css' % bb: 'bootstrap.min.css',

        # - fonts
        '%[email protected]' % b: 'classnames',
        '%[email protected]' % b: 'layout_bin_packer',
        '%sfonts/glyphicons-halflings-regular.eot' % bb:
            'glyphicons-halflings-regular.eot',
        '%sfonts/glyphicons-halflings-regular.woff2' % bb:
            'glyphicons-halflings-regular.woff2',
        '%sfonts/glyphicons-halflings-regular.woff' % bb:
            'glyphicons-halflings-regular.woff',
        '%sfonts/glyphicons-halflings-regular.ttf' % bb:
            'glyphicons-halflings-regular.ttf',
        '%sfonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular' % bb:  # noqa
            'glyphicons-halflings-regular.svg#glyphicons_halflingsregular',
    }

    # make sure all relevant folders exist:
    dir_list = [
        '%s' % install_dir,
        '%s/static' % install_dir,
        '%s/static/js' % install_dir,
        '%s/static/css' % install_dir,
        '%s/static/fonts' % install_dir,
    ]
    for directory in dir_list:
        if not os.path.exists(directory):
            os.makedirs(directory)

    # set up proxy handler:
    from six.moves.urllib import request
    from six.moves.urllib.error import HTTPError, URLError
    handler = request.ProxyHandler(proxies) if proxies is not None \
        else request.BaseHandler()
    opener = request.build_opener(handler)
    request.install_opener(opener)

    built_path = os.path.join(here, 'static/version.built')
    is_built = visdom.__version__ == 'no_version_file'
    if os.path.exists(built_path):
        with open(built_path, 'r') as build_file:
            build_version = build_file.read().strip()
        if build_version == visdom.__version__:
            is_built = True
        else:
            os.remove(built_path)

    # download files one-by-one:
    for (key, val) in ext_files.items():

        # set subdirectory:
        sub_dir = 'fonts'
        if '.js' in key:
            sub_dir = 'js'
        if '.css' in key:
            sub_dir = 'css'

        # download file:
        filename = '%s/static/%s/%s' % (install_dir, sub_dir, val)
        if not os.path.exists(filename) or not is_built:
            req = request.Request(key,
                                  headers={'User-Agent': 'Chrome/30.0.0.0'})
            try:
                data = opener.open(req).read()
                with open(filename, 'wb') as fwrite:
                    fwrite.write(data)
            except HTTPError as exc:
                logging.error('Error {} while downloading {}'.format(
                    exc.code, key))
            except URLError as exc:
                logging.error('Error {} while downloading {}'.format(
                    exc.reason, key))

    if not is_built:
        with open(built_path, 'w+') as build_file:
            build_file.write(visdom.__version__)
def unload(buildout=None):
    urllib_request.build_opener = original_build_opener
    urllib_request.install_opener(urllib_request.build_opener())