Exemplo n.º 1
0
 def test_proxy_cgi_ignore(self):
     try:
         self.env.set('HTTP_PROXY', 'http://somewhere:3128')
         proxies = urllib.getproxies_environment()
         self.assertEqual('http://somewhere:3128', proxies['http'])
         self.env.set('REQUEST_METHOD', 'GET')
         proxies = urllib.getproxies_environment()
         self.assertNotIn('http', proxies)
     finally:
         self.env.unset('REQUEST_METHOD')
         self.env.unset('HTTP_PROXY')
Exemplo n.º 2
0
 def test_proxy_cgi_ignore(self):
     try:
         self.env.set('HTTP_PROXY', 'http://somewhere:3128')
         proxies = urllib.getproxies_environment()
         self.assertEqual('http://somewhere:3128', proxies['http'])
         self.env.set('REQUEST_METHOD', 'GET')
         proxies = urllib.getproxies_environment()
         self.assertNotIn('http', proxies)
     finally:
         self.env.unset('REQUEST_METHOD')
         self.env.unset('HTTP_PROXY')
Exemplo n.º 3
0
def testHost(url):
    '''
    Test if we can connect to the host of `url` (not to the complete URL).
    '''
    (scheme, netloc, _path, _params, _query, _fragment) = urlparse(str(url))

    if len(urllib.getproxies_environment()) > 0:
        return testUrl(scheme + "://" + netloc)

    if ":" in netloc:
        (host, port) = netloc.split(":")
    else:
        host = netloc
        if scheme == "https":
            port = "443"
        else:
            port = "80"

    logger = ObsLightPrintManager.getLogger()
    logger.info("Testing connection to '%s:%s'", host, port)
    conn = createConn(host, port, scheme)

    try:
        conn.connect()
    except BaseException, e:
        message = "Could not connect to %s: %s" % (str(host), str(e))
        logger.warning(message)
        return False
Exemplo n.º 4
0
    def __init__(self, service, operation, region_name, endpoint_url=None, session=None,
                 connect_timeout=None, request_timeout=None):

        # set credentials manually
        session = session or botocore.session.get_session()
        # get_session accepts access_key, secret_key
        self.client = session.create_client(
            service,
            region_name=region_name,
            endpoint_url=endpoint_url
        )
        self.endpoint = self.client._endpoint
        self.operation = operation
        self.http_client = AsyncHTTPClient()

        self.proxy_host = None
        self.proxy_port = None
        https_proxy = getproxies_environment().get('https')
        if https_proxy:
            proxy_parts = https_proxy.split(':')
            if len(proxy_parts) == 2 and proxy_parts[-1].isdigit():
                self.proxy_host, self.proxy_port = proxy_parts
                self.proxy_port = int(self.proxy_port)
            else:
                proxy = urlparse(https_proxy)
                self.proxy_host = proxy.hostname
                self.proxy_port = proxy.port

        self.request_timeout = request_timeout
        self.connect_timeout = connect_timeout
Exemplo n.º 5
0
    def __init__(self, service, operation, region_name, endpoint_url=None, session=None,
                 connect_timeout=None, request_timeout=None):
        # set credentials manually
        session = session or botocore.session.get_session()
        # get_session accepts access_key, secret_key
        self.client = session.create_client(
            service,
            region_name=region_name,
            endpoint_url=endpoint_url
        )
        try:
            self.endpoint = self.client.endpoint
        except AttributeError:
            self.endpoint = self.client._endpoint

        self.operation = operation
        self.http_client = AsyncHTTPClient()

        self.proxy_host = None
        self.proxy_port = None
        https_proxy = getproxies_environment().get('https')
        if https_proxy:
            self._enable_curl_httpclient()

            proxy_parts = https_proxy.split(':')
            if len(proxy_parts) == 2 and proxy_parts[-1].isdigit():
                self.proxy_host, self.proxy_port = proxy_parts
                self.proxy_port = int(self.proxy_port)
            else:
                proxy = urlparse(https_proxy)
                self.proxy_host = proxy.hostname
                self.proxy_port = proxy.port

        self.request_timeout = request_timeout
        self.connect_timeout = connect_timeout
Exemplo n.º 6
0
def importCert(url):
    '''
    Import the SSL certificate of an HTTPS server into osc configuration.
    '''
    (scheme, netloc, _path, _params, _query, _fragment) = urlparse(str(url))
    if scheme == "http":
        return
    if ":" in netloc:
        (host, port) = netloc.split(":")
        port = int(port)
    else:
        host = netloc
        port = 443

    ctx = SSL.Context()
    ctx.set_allow_unknown_ca(True)
    ctx.set_verify(SSL.verify_none, 1)

    proxyEnv = urllib.getproxies_environment()
    # If there is a proxy environment and the host is not in no_proxy
    useProxy = len(proxyEnv) > 0 and host not in proxyEnv.get('no', [])

    if useProxy:
        valProxy = proxyEnv['https']
        netlocProxy = urlparse(valProxy)[1]
        [__PROXYHOST__, __PROXYPORT__] = netlocProxy.split(":")
        conn = M2Crypto.httpslib.ProxyHTTPSConnection(host=__PROXYHOST__,
                                                      port=__PROXYPORT__)
        conn.ssl_ctx = ctx
        conn.putrequest('HEAD', url)
        try:
            conn.connect()
        except:
            raise

    else:
        conn = SSL.Connection(ctx)
        conn.postConnectionCheck = None
        timeout = SSL.timeout(SOCKETTIMEOUT)
        conn.set_socket_read_timeout(timeout)
        conn.set_socket_write_timeout(timeout)
        try:
            conn.connect((host, port))
        except:
            raise

    if useProxy:
        cert = conn.sock.get_peer_cert()
    else:
        cert = conn.get_peer_cert()

    # if the peer did not provide a certificate chain, cert is None.
    if cert is not None:
        dirpath = expanduser('~/.config/osc/trusted-certs')
        if not exists(dirpath):
            makedirs(dirpath)
        filePath = dirpath + '/%s_%d.pem' % (host, port)
        cert.save_pem(filePath)
    conn.close()
Exemplo n.º 7
0
 def test_getproxies_environment_keep_no_proxies(self):
     self.env.set('NO_PROXY', 'localhost')
     proxies = urllib.getproxies_environment()
     # getproxies_environment use lowered case truncated (no '_proxy') keys
     self.assertEqual('localhost', proxies['no'])
     # List of no_proxies with space.
     self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com')
     self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com'))
Exemplo n.º 8
0
 def test_getproxies_environment_keep_no_proxies(self):
     self.env.set('NO_PROXY', 'localhost')
     proxies = urllib.getproxies_environment()
     # getproxies_environment use lowered case truncated (no '_proxy') keys
     self.assertEqual('localhost', proxies['no'])
     # List of no_proxies with space.
     self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com')
     self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com'))
Exemplo n.º 9
0
 def test_getproxies_environment_keep_no_proxies(self):
     self.env.set("NO_PROXY", "localhost")
     proxies = urllib.getproxies_environment()
     # getproxies_environment use lowered case truncated (no '_proxy') keys
     self.assertEqual("localhost", proxies["no"])
     # List of no_proxies with space.
     self.env.set("NO_PROXY", "localhost, anotherdomain.com, newdomain.com")
     self.assertTrue(urllib.proxy_bypass_environment("anotherdomain.com"))
Exemplo n.º 10
0
def getUrl(scheme, netloc, path):

    if len(urllib.getproxies_environment()) > 0:
        if path == "":
            return scheme + "://" + netloc + "/"
        else:
            return scheme + "://" + netloc + path
    else:
        return path
Exemplo n.º 11
0
    def _request(self, url, method):
        scheme, host = urlparse(url)[:2]
        scheme = scheme.lower()
        proxies = getproxies_environment()
        if scheme in proxies:
            scheme, host = urlparse(proxies[scheme])[:2]
            scheme = scheme.lower()

        kwargs = {}
        if version_info[1] >= 6:
            kwargs['timeout'] = self.timeout
        else:
            socket.setdefaulttimeout(self.timeout)

        if scheme == "https":
            conn = HTTPSConnection(host, **kwargs)
        else:
            conn = HTTPConnection(host, **kwargs)

        headers = {}
        if method == 'GET':
            headers['Range'] = 'bytes=0-%s' % self.max_size

        try:
            try:
                conn.request(method.upper(),
                             url_to_bytestring(url),
                             headers=headers)
                response = conn.getresponse()
                data = response.read(self.max_size)
                conn.close()
            except socket.error, e:
                raise HTTPException(e.message or e.args[1])
        finally:
            if version_info[1] < 6:
                socket.setdefaulttimeout(None)

        contenttype = response.getheader('Content-Type', None)
        if contenttype:
            match = re.search('^charset=([a-zA-Z0-9-]+)', contenttype)
            try:
                if match:
                    data = data.decode(match.group(1))
                elif contenttype.startswith('text/'):
                    data = data.decode('utf-8')
            except UnicodeDecodeError:
                guessed = detect(data)
                if guessed['confidence'] > 0.5:
                    charset = guessed['encoding']
                    # Common guessing mistake:
                    if charset.startswith('ISO-8859') and '\x92' in data:
                        charset = 'windows-1252'
                    data = unicode(data, charset, errors='replace')

        return response.status, response.reason, data, response.getheaders()
Exemplo n.º 12
0
    def _request(self, url, method):
        scheme, host = urlparse(url)[:2]
        scheme = scheme.lower()
        proxies = getproxies_environment()
        if scheme in proxies:
            scheme, host = urlparse(proxies[scheme])[:2]
            scheme = scheme.lower()

        kwargs = {}
        if version_info[1] >= 6:
            kwargs["timeout"] = self.timeout
        else:
            socket.setdefaulttimeout(self.timeout)

        if scheme == "https":
            conn = HTTPSConnection(host, **kwargs)
        else:
            conn = HTTPConnection(host, **kwargs)

        headers = {}
        if method == "GET":
            headers["Range"] = "bytes=0-%s" % self.max_size

        try:
            try:
                conn.request(method.upper(), iri_to_uri(url), headers=headers)
                response = conn.getresponse()
                data = response.read(self.max_size)
                conn.close()
            except socket.error, e:
                raise HTTPException(e.message or e.args[1])
        finally:
            if version_info[1] < 6:
                socket.setdefaulttimeout(None)

        contenttype = response.getheader("Content-Type", None)
        if contenttype:
            match = re.search("^charset=([a-zA-Z0-9-]+)", contenttype)
            try:
                if match:
                    data = data.decode(match.group(1))
                elif contenttype.startswith("text/"):
                    data = data.decode("utf-8")
            except UnicodeDecodeError:
                guessed = detect(data)
                if guessed["confidence"] > 0.5:
                    charset = guessed["encoding"]
                    # Common guessing mistake:
                    if charset.startswith("ISO-8859") and "\x92" in data:
                        charset = "windows-1252"
                    data = unicode(data, charset, errors="replace")

        return response.status, response.reason, data, response.getheaders()
Exemplo n.º 13
0
def createConn(host, port, scheme):
    if scheme == "https":
        if 'https' in urllib.getproxies_environment():
            proxies_env = urllib.getproxies_environment()
            if 'https' in proxies_env.keys():
                valProxy = ['https']
            else:
                valProxy = ['http']
            netlocProxy = urlparse(valProxy)[2]
            [__PROXYHOST__, __PROXYPORT__] = netlocProxy.split(":")
            conn = httplib.HTTPConnection(host=__PROXYHOST__,
                                          port=__PROXYPORT__,
                                          timeout=SOCKETTIMEOUT)
            conn.set_tunnel(host=host, port=int(port))

            ctx = SSL.Context()
            ctx.set_allow_unknown_ca(True)
            ctx.set_verify(SSL.verify_none, 1)
            conn = M2Crypto.httpslib.ProxyHTTPSConnection(host=__PROXYHOST__,
                                                          port=__PROXYPORT__)
            conn.ssl_ctx = ctx
            conn.putrequest('HEAD', scheme + "://" + ":".join(host, port))
            return conn
        else:
            return httplib.HTTPSConnection(host=host,
                                           port=port,
                                           timeout=SOCKETTIMEOUT)
    else:
        if 'http' in urllib.getproxies_environment():
            valProxy = urllib.getproxies_environment()['http']
            netlocProxy = urlparse(valProxy)[2]
            [__PROXYHOST__, __PROXYPORT__] = netlocProxy.split(":")
            return httplib.HTTPConnection(host=__PROXYHOST__,
                                          port=__PROXYPORT__,
                                          timeout=SOCKETTIMEOUT)
        else:
            return httplib.HTTPConnection(host=host,
                                          port=port,
                                          timeout=SOCKETTIMEOUT)
Exemplo n.º 14
0
 def test_getproxies_environment_prefer_lowercase(self):
     # Test lowercase preference with removal
     os.environ['no_proxy'] = ''
     os.environ['No_Proxy'] = 'localhost'
     self.assertFalse(urllib.proxy_bypass_environment('localhost'))
     self.assertFalse(urllib.proxy_bypass_environment('arbitrary'))
     os.environ['http_proxy'] = ''
     os.environ['HTTP_PROXY'] = 'http://somewhere:3128'
     proxies = urllib.getproxies_environment()
     self.assertEqual({}, proxies)
     # Test lowercase preference of proxy bypass and correct matching including ports
     os.environ['no_proxy'] = 'localhost, noproxy.com, my.proxy:1234'
     os.environ['No_Proxy'] = 'xyz.com'
     self.assertTrue(urllib.proxy_bypass_environment('localhost'))
     self.assertTrue(urllib.proxy_bypass_environment('noproxy.com:5678'))
     self.assertTrue(urllib.proxy_bypass_environment('my.proxy:1234'))
     self.assertFalse(urllib.proxy_bypass_environment('my.proxy'))
     self.assertFalse(urllib.proxy_bypass_environment('arbitrary'))
     # Test lowercase preference with replacement
     os.environ['http_proxy'] = 'http://somewhere:3128'
     os.environ['Http_Proxy'] = 'http://somewhereelse:3128'
     proxies = urllib.getproxies_environment()
     self.assertEqual('http://somewhere:3128', proxies['http'])
Exemplo n.º 15
0
 def test_getproxies_environment_prefer_lowercase(self):
     # Test lowercase preference with removal
     os.environ['no_proxy'] = ''
     os.environ['No_Proxy'] = 'localhost'
     self.assertFalse(urllib.proxy_bypass_environment('localhost'))
     self.assertFalse(urllib.proxy_bypass_environment('arbitrary'))
     os.environ['http_proxy'] = ''
     os.environ['HTTP_PROXY'] = 'http://somewhere:3128'
     proxies = urllib.getproxies_environment()
     self.assertEqual({}, proxies)
     # Test lowercase preference of proxy bypass and correct matching including ports
     os.environ['no_proxy'] = 'localhost, noproxy.com, my.proxy:1234'
     os.environ['No_Proxy'] = 'xyz.com'
     self.assertTrue(urllib.proxy_bypass_environment('localhost'))
     self.assertTrue(urllib.proxy_bypass_environment('noproxy.com:5678'))
     self.assertTrue(urllib.proxy_bypass_environment('my.proxy:1234'))
     self.assertFalse(urllib.proxy_bypass_environment('my.proxy'))
     self.assertFalse(urllib.proxy_bypass_environment('arbitrary'))
     # Test lowercase preference with replacement
     os.environ['http_proxy'] = 'http://somewhere:3128'
     os.environ['Http_Proxy'] = 'http://somewhereelse:3128'
     proxies = urllib.getproxies_environment()
     self.assertEqual('http://somewhere:3128', proxies['http'])
Exemplo n.º 16
0
 def __init__(self, gitlab_url, gitlab_token):
     self.dag = dag.DAG()
     self.gitlab_url = gitlab_url
     self.gitlab_token = gitlab_token
     self.gitlab = gitlab.Gitlab(gitlab_url, gitlab_token, api_version=4)
     # TODO: This should be automatic but it's not (python-gitlab bug)
     self.gitlab.session.proxies.update(urllib.getproxies_environment())
     # Cache group/project to Gitlab project objects. If projects
     # changes the deamon must be restarted
     self.projects = {}
     # cache for triggers
     self.triggers = {}
     # Projects which must not be built
     self.locked_projects = set()
     self.finished_at = {}
Exemplo n.º 17
0
def testUrl(url):
    '''
    Test if we can reach `url`.
    '''
    logger = ObsLightPrintManager.getLogger()
    logger.info("Testing URL '%s'", url)
    opener = urllib2.build_opener(
        urllib2.ProxyHandler(urllib.getproxies_environment()))
    urllib2.install_opener(opener)
    try:
        _test = urllib2.urlopen(url, timeout=SOCKETTIMEOUT)
        return True
    except urllib2.URLError, e:
        message = "Could not reach %s: %s" % (str(url), str(e))
        logger.warning(message)
        return False
Exemplo n.º 18
0
    def createImage(self):
        """
        Launch the build of an image.
        """
        self.failIsUserNotInUserGroup()
        timeString = time.strftime("%Y-%m-%d_%Hh%Mm") + str(time.time() % 1).split(".")[1]
        logFilePath = os.path.join(self.projectDirectory, "build_" + timeString, "buildLog")
#        cacheDirPath = os.path.join(self.projectDirectory, "cache")
        proxies = urllib.getproxies_environment()
        for scheme in proxies.keys():
            if scheme == 'http':
                cmd = "sudo sed -r -i 's,(; *)*proxy =.*,proxy = " + proxies[scheme] + ",' /etc/mic/mic.conf"
                self.__subprocess(cmd)

        cmd = "sudo mic --debug --verbose create " + self.getImageType()
        cmd += " " + self.getKickstartFile()
        cmd += " --logfile=" + logFilePath
#        cmd += " --cachedir=" + cacheDirPath
        cmd += " --outdir=" + self.projectDirectory
        cmd += " --arch=" + self.__architecture
#        cmd += " --release=build_" + timeString
        cmd += " --local-pkgs-path=" + self.localPackagesDirectory
        self.__subprocess(cmd)
Exemplo n.º 19
0
def getproxy(schema = 'http'):
    p = urllib.getproxies_environment()
    return p.get(schema,None)
Exemplo n.º 20
0
 def test_getproxies_environment_keep_no_proxies(self):
     self.env.set("NO_PROXY", "localhost")
     proxies = urllib.getproxies_environment()
     # getproxies_environment use lowered case truncated (no '_proxy') keys
     self.assertEqual("localhost", proxies["no"])
Exemplo n.º 21
0
# Current version of webcheck.
VERSION = '1.9.8'

# The homepage of webcheck.
HOMEPAGE = 'http://ch.tudelft.nl/~arthur/webcheck/'

# Whether to consider any URL not starting with the base URL to be external.
# This is the state of the -b command line option.
BASE_URLS_ONLY = False

# Avoid checking external links at all. This is the state of the -a command
# line option.
AVOID_EXTERNAL_LINKS = False

# The proxy configuration.
PROXIES = urllib.getproxies_environment()

# IO timeout as passed to socket.setdefaulttimeout()
# value is a float in seconds None disables the timeout
IOTIMEOUT = 10.0

# Output directory. This is the state of the -o command line option.
OUTPUT_DIR = '.'

# Whether to try to read a state file to continue from.
CONTINUE = False

# This is the time in seconds to wait between requests. This is the state of
# the -w command line option.
WAIT_BETWEEN_REQUESTS = 0
Exemplo n.º 22
0
import httplib, mimetypes, urllib

proxies = urllib.getproxies_environment()

def post_multipart(host, selector, fields):
	"""
Post fields and files to an http host as multipart/form-data.
fields is a sequence of (name, filename, value) elements for form fields.
If filename is None, the field is treated as a regular field;
otherwise, the field is uploaded as a file.
Return the server's response page.
"""
	return post_multipart_formdata(host, selector, fields)[3]

def post_multipart_formdata(host, selector, fields):
	content_type, body = encode_multipart_formdata(fields)
	try:
		realhost = proxies["http"]
	except KeyError:
		realhost = host
	h = httplib.HTTP(realhost)
	h.putrequest('POST', selector)
	h.putheader('content-type', content_type)
	h.putheader('content-length', str(len(body)))
	h.putheader('host', host)
	h.endheaders()
	h.send(body)
	retcode, retmsg, headers = h.getreply()
	return retcode, retmsg, headers, h.file.read()

def encode_multipart_formdata(fields):
import httplib, mimetypes, urllib

proxies = urllib.getproxies_environment()


def post_multipart(host, selector, fields):
    """
Post fields and files to an http host as multipart/form-data.
fields is a sequence of (name, filename, value) elements for form fields.
If filename is None, the field is treated as a regular field;
otherwise, the field is uploaded as a file.
Return the server's response page.
"""
    return post_multipart_formdata(host, selector, fields)[3]


def post_multipart_formdata(host, selector, fields):
    content_type, body = encode_multipart_formdata(fields)
    try:
        realhost = proxies["http"]
    except KeyError:
        realhost = host
    h = httplib.HTTP(realhost)
    h.putrequest('POST', selector)
    h.putheader('content-type', content_type)
    h.putheader('content-length', str(len(body)))
    h.putheader('host', host)
    h.endheaders()
    h.send(body)
    retcode, retmsg, headers = h.getreply()
    return retcode, retmsg, headers, h.file.read()
Exemplo n.º 24
0
 def test_getproxies_environment_keep_no_proxies(self):
     self.env.set('NO_PROXY', 'localhost')
     proxies = urllib.getproxies_environment()
     # getproxies_environment use lowered case truncated (no '_proxy') keys
     self.assertEquals('localhost', proxies['no'])
Exemplo n.º 25
0
 def test_getproxies_environment_keep_no_proxies(self):
     self.env.set('NO_PROXY', 'localhost')
     proxies = urllib.getproxies_environment()
     # getproxies_environment use lowered case truncated (no '_proxy') keys
     self.assertEqual('localhost', proxies['no'])
Exemplo n.º 26
0
# Current version of webcheck.
VERSION = "1.9.6"

# The homepage of webcheck.
HOMEPAGE = "http://ch.tudelft.nl/~arthur/webcheck/"

# Whether to consider any URL not starting with the base URL to be external.
# This is the state of the -b command line option.
BASE_URLS_ONLY = False

# Avoid checking external links at all. This is the state of the -a command
# line option.
AVOID_EXTERNAL_LINKS = False

# The proxy configuration.
PROXIES = urllib.getproxies_environment()

# Output directory. This is the state of the -o command line option.
OUTPUT_DIR = "."

# This is the time in seconds to wait between requests. This is the state of
# the -w command line option.
WAIT_BETWEEN_REQUESTS = 0

# Redirect depth, the number of redirects to follow. This is the state of the
# -r command line option.
REDIRECT_DEPTH = 5

# The list of plugins that will be used to generate the report.
PLUGINS = ["sitemap",
           "urllist",
def proxied(value):
    netloc = urlparse(value).netloc
    proxied = bool(getproxies_environment()) and not proxy_bypass(netloc)
    return (proxied)