def test_generic_urlparse():
    url = 'https://ansible.com/blog'
    parts = urlparse(url)
    generic_parts = generic_urlparse(parts)
    assert generic_parts.as_list() == list(parts)

    assert urlunparse(generic_parts.as_list()) == url
def test_generic_urlparse():
    url = 'https://ansible.com/blog'
    parts = urlparse(url)
    generic_parts = generic_urlparse(parts)
    assert generic_parts.as_list() == list(parts)

    assert urlunparse(generic_parts.as_list()) == url
def update_url_query(url, **kwargs):
    parse_result = urlparse(url)
    query = parse_qs(parse_result.query)
    query.update({k: v for k, v in kwargs.items() if v is not None})
    query_string = urlencode(query, doseq=True)
    parse_result = parse_result._replace(query=query_string)
    return urlunparse(parse_result)
示例#4
0
def check(module, name, state, service_id, integration_key, api_key, incident_key=None, http_call=fetch_url):
    url = 'https://api.pagerduty.com/incidents'
    headers = {
        "Content-type": "application/json",
        "Authorization": "Token token=%s" % api_key,
        'Accept': 'application/vnd.pagerduty+json;version=2'
    }

    params = {
        'service_ids[]': service_id,
        'sort_by': 'incident_number:desc',
        'time_zone': 'UTC'
    }
    if incident_key:
        params['incident_key'] = incident_key

    url_parts = list(urlparse(url))
    url_parts[4] = urlencode(params, True)

    url = urlunparse(url_parts)

    response, info = http_call(module, url, method='get', headers=headers)

    if info['status'] != 200:
        module.fail_json(msg="failed to check current incident status."
                             "Reason: %s" % info['msg'])
    json_out = json.loads(response.read())["incidents"][0]

    if state != json_out["status"]:
        return json_out, True
    return json_out, False
示例#5
0
def do_notify_bearychat(module, url, payload):
    response, info = fetch_url(module, url, data=payload)
    if info['status'] != 200:
        url_info = urlparse(url)
        obscured_incoming_webhook = urlunparse(
            (url_info.scheme, url_info.netloc, '[obscured]', '', '', ''))
        module.fail_json(msg=" failed to send %s to %s: %s" %
                         (payload, obscured_incoming_webhook, info['msg']))
def test_generic_urlparse_netloc():
    url = 'https://ansible.com:443/blog'
    parts = urlparse(url)
    generic_parts = generic_urlparse(parts)
    assert generic_parts.hostname == parts.hostname
    assert generic_parts.hostname == 'ansible.com'
    assert generic_parts.port == 443
    assert urlunparse(generic_parts.as_list()) == url
def test_generic_urlparse_netloc():
    url = 'https://ansible.com:443/blog'
    parts = urlparse(url)
    generic_parts = generic_urlparse(parts)
    assert generic_parts.hostname == parts.hostname
    assert generic_parts.hostname == 'ansible.com'
    assert generic_parts.port == 443
    assert urlunparse(generic_parts.as_list()) == url
示例#8
0
def do_notify_bearychat(module, url, payload):
    response, info = fetch_url(module, url, data=payload)
    if info['status'] != 200:
        url_info = urlparse(url)
        obscured_incoming_webhook = urlunparse(
            (url_info.scheme, url_info.netloc, '[obscured]', '', '', ''))
        module.fail_json(
            msg=" failed to send %s to %s: %s" % (
                payload, obscured_incoming_webhook, info['msg']))
def test_generic_urlparse_no_netloc():
    url = 'https://*****:*****@ansible.com:443/blog'
    parts = list(urlparse(url))
    generic_parts = generic_urlparse(parts)
    assert generic_parts.hostname == 'ansible.com'
    assert generic_parts.port == 443
    assert generic_parts.username == 'user'
    assert generic_parts.password == 'passwd'
    assert urlunparse(generic_parts.as_list()) == url
def test_generic_urlparse_no_netloc():
    url = 'https://*****:*****@ansible.com:443/blog'
    parts = list(urlparse(url))
    generic_parts = generic_urlparse(parts)
    assert generic_parts.hostname == 'ansible.com'
    assert generic_parts.port == 443
    assert generic_parts.username == 'user'
    assert generic_parts.password == 'passwd'
    assert urlunparse(generic_parts.as_list()) == url
示例#11
0
def update_qsl(url, params):
    ''' Add or update a URL query string '''

    if HAS_URLPARSE:
        url_parts = list(urlparse(url))
        query = dict(parse_qsl(url_parts[4]))
        query.update(params)
        url_parts[4] = urlencode(query)
        return urlunparse(url_parts)
    elif '?' in url:
        return url + '&' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
    else:
        return url + '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
示例#12
0
def update_qsl(url, params):
    ''' Add or update a URL query string '''

    if HAS_URLPARSE:
        url_parts = list(urlparse(url))
        query = dict(parse_qsl(url_parts[4]))
        query.update(params)
        url_parts[4] = urlencode(query)
        return urlunparse(url_parts)
    elif '?' in url:
        return url + '&' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
    else:
        return url + '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
示例#13
0
def install_package(module, pkg, pkg_file, upgrade):
    check_build_environment(module)
    git_source = module.params['git_source']
    gpg_key = module.params['gpg_key']

    if not upgrade and query_package(module, pkg):
        module.exit_json(changed=False, msg="package already installed", package=pkg)

    if gpg_key:
         if should_add_gpg_key(module, gpg_key):
             add_gpg_key(module, gpg_key)

    if git_source:
      pkg_path = make_package(module, pkg, None)
    elif not pkg_file:
        # this is an aur pkg
        rpc_params = urllib.urlencode({"type": "info", "arg": pkg})
        rpc_req = urlunparse((AUR_SCHEME, AUR_NETLOC, AUR_RPC_PATH, "", rpc_params, ""))
        rsp, info = fetch_url(module, rpc_req)

        # create a temporary file and copy content to do checksum-based replacement
        if info['status'] != 200:
            module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'], url=rpc_req)

        pkg_info = json.load(rsp)

        if pkg_info['resultcount'] < 1:
            module.fail_json(msg="AUR query found no matching packages", package=pkg)

        # check pkg cache before install
        pkg_ver = pkg_info['results']['Version']
        pkg_path = query_package_dir(module, pkg, pkg_ver)
        if not pkg_path:
            pkg_path = make_package(module, pkg, None)

    else:
        pkg_path = make_package(module, pkg, pkg_file)

    if module.params['as_deps']:
        params = '-U --asdeps %s' % pkg_path
    else:
        params = '-U %s' % pkg_path

    cmd = "pacman %s --noconfirm" % (params)
    rc, stdout, stderr = module.run_command(cmd, check_rc=False)

    if rc != 0:
        module.fail_json(msg="failed to install package", pkg=pkg, stderr=stderr)

    module.exit_json(changed=True, msg="installed package", pkg=pkg)
示例#14
0
def update_qsl(url, params):
    """Add or update a URL query string"""

    if HAS_URLPARSE:
        url_parts = list(urlparse(url))
        query = dict(parse_qsl(url_parts[4]))
        query.update(params)
        url_parts[4] = urlencode(query)
        return urlunparse(url_parts)
    elif "?" in url:
        return url + "&" + "&".join(
            ["%s=%s" % (k, v) for k, v in params.items()])
    else:
        return url + "?" + "&".join(
            ["%s=%s" % (k, v) for k, v in params.items()])
示例#15
0
def prepare_aur_dir(module, build_dir, pkg):
    pkg_file_basename = "%s.tar.gz" % pkg
    pkg_file_dest = os.path.join(build_dir, pkg_file_basename)
    aur_req = urlunparse((AUR_SCHEME, AUR_NETLOC, "%s/%s" % (AUR_SNAPSHOT_PATH, pkg_file_basename), "", "", ""))
    rsp, info = fetch_url(module, aur_req)

    if info['status'] != 200:
        module.fail_json(msg="Request failed", url=aur_req, status_code=info['status'], response=info['msg'])
    # save response to archive
    f = open(pkg_file_dest, 'wb')
    try:
        shutil.copyfileobj(rsp, f)
    except (Exception, err):
        os.remove(pkg_file_dest)
        module.fail_json(msg="failed to create package archive file: %s" % str(err))
    f.close()
    rsp.close()
    extract_pkg(module, pkg, pkg_file_dest, build_dir)
def format_url(use_ssl, hostname, port, path=''):
    """ Format url based on ssl flag, hostname, port and path

        Args:
            use_ssl (bool): is ssl enabled
            hostname (str): hostname
            port (str): port
            path (str): url path
        Returns:
            str: The generated url string
    """
    scheme = 'https' if use_ssl else 'http'
    netloc = hostname
    if (use_ssl and port != '443') or (not use_ssl and port != '80'):
        netloc += ":%s" % port
    try:
        url = urlparse.urlunparse((scheme, netloc, path, '', '', ''))
    except AttributeError:
        # pylint: disable=undefined-variable
        url = urlunparse((scheme, netloc, path, '', '', ''))
    return url
示例#17
0
def open_url(url,
             data=None,
             headers=None,
             method=None,
             use_proxy=True,
             force=False,
             last_mod_time=None,
             timeout=10,
             validate_certs=True,
             url_username=None,
             url_password=None,
             http_agent=None,
             force_basic_auth=False,
             follow_redirects='urllib2',
             client_cert=None,
             client_key=None,
             cookies=None):
    '''
    Sends a request via HTTP(S) or FTP using urllib2 (Python2) or urllib (Python3)

    Does not require the module environment
    '''
    handlers = []
    ssl_handler = maybe_add_ssl_handler(url, validate_certs)
    if ssl_handler:
        handlers.append(ssl_handler)

    # FIXME: change the following to use the generic_urlparse function
    #        to remove the indexed references for 'parsed'
    parsed = urlparse(url)
    if parsed[0] != 'ftp':
        username = url_username

        if headers is None:
            headers = {}

        if username:
            password = url_password
            netloc = parsed[1]
        elif '@' in parsed[1]:
            credentials, netloc = parsed[1].split('@', 1)
            if ':' in credentials:
                username, password = credentials.split(':', 1)
            else:
                username = credentials
                password = ''

            parsed = list(parsed)
            parsed[1] = netloc

            # reconstruct url without credentials
            url = urlunparse(parsed)

        if username and not force_basic_auth:
            passman = urllib_request.HTTPPasswordMgrWithDefaultRealm()

            # this creates a password manager
            passman.add_password(None, netloc, username, password)

            # because we have put None at the start it will always
            # use this username/password combination for  urls
            # for which `theurl` is a super-url
            authhandler = urllib_request.HTTPBasicAuthHandler(passman)
            digest_authhandler = urllib_request.HTTPDigestAuthHandler(passman)

            # create the AuthHandler
            handlers.append(authhandler)
            handlers.append(digest_authhandler)

        elif username and force_basic_auth:
            headers["Authorization"] = basic_auth_header(username, password)

        else:
            try:
                rc = netrc.netrc(os.environ.get('NETRC'))
                login = rc.authenticators(parsed[1])
            except IOError:
                login = None

            if login:
                username, _, password = login
                if username and password:
                    headers["Authorization"] = basic_auth_header(
                        username, password)

    if not use_proxy:
        proxyhandler = urllib_request.ProxyHandler({})
        handlers.append(proxyhandler)

    if HAS_SSLCONTEXT and not validate_certs:
        # In 2.7.9, the default context validates certificates
        context = SSLContext(ssl.PROTOCOL_SSLv23)
        context.options |= ssl.OP_NO_SSLv2
        context.options |= ssl.OP_NO_SSLv3
        context.verify_mode = ssl.CERT_NONE
        context.check_hostname = False
        handlers.append(
            HTTPSClientAuthHandler(client_cert=client_cert,
                                   client_key=client_key,
                                   context=context))
    elif client_cert:
        handlers.append(
            HTTPSClientAuthHandler(client_cert=client_cert,
                                   client_key=client_key))

    # pre-2.6 versions of python cannot use the custom https
    # handler, since the socket class is lacking create_connection.
    # Some python builds lack HTTPS support.
    if hasattr(socket, 'create_connection') and CustomHTTPSHandler:
        handlers.append(CustomHTTPSHandler)

    handlers.append(RedirectHandlerFactory(follow_redirects, validate_certs))

    # add some nicer cookie handling
    if cookies is not None:
        handlers.append(urllib_request.HTTPCookieProcessor(cookies))

    opener = urllib_request.build_opener(*handlers)
    urllib_request.install_opener(opener)

    data = to_bytes(data, nonstring='passthru')
    if method:
        if method.upper() not in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
                                  'DELETE', 'TRACE', 'CONNECT', 'PATCH'):
            raise ConnectionError('invalid HTTP request method; %s' %
                                  method.upper())
        request = RequestWithMethod(url, method.upper(), data)
    else:
        request = urllib_request.Request(url, data)

    # add the custom agent header, to help prevent issues
    # with sites that block the default urllib agent string
    if http_agent:
        request.add_header('User-agent', http_agent)

    # Cache control
    # Either we directly force a cache refresh
    if force:
        request.add_header('cache-control', 'no-cache')
    # or we do it if the original is more recent than our copy
    elif last_mod_time:
        tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000')
        request.add_header('If-Modified-Since', tstamp)

    # user defined headers now, which may override things we've set above
    if headers:
        if not isinstance(headers, dict):
            raise ValueError("headers provided to fetch_url() must be a dict")
        for header in headers:
            request.add_header(header, headers[header])

    urlopen_args = [request, None]
    if sys.version_info >= (2, 6, 0):
        # urlopen in python prior to 2.6.0 did not
        # have a timeout parameter
        urlopen_args.append(timeout)

    r = urllib_request.urlopen(*urlopen_args)
    return r
示例#18
0
def open_url(url, data=None, headers=None, method=None, use_proxy=True,
             force=False, last_mod_time=None, timeout=10, validate_certs=True,
             url_username=None, url_password=None, http_agent=None,
             force_basic_auth=False, follow_redirects='urllib2'):
    '''
    Sends a request via HTTP(S) or FTP using urllib2 (Python2) or urllib (Python3)

    Does not require the module environment
    '''
    handlers = []
    ssl_handler = maybe_add_ssl_handler(url, validate_certs)
    if ssl_handler:
        handlers.append(ssl_handler)

    # FIXME: change the following to use the generic_urlparse function
    #        to remove the indexed references for 'parsed'
    parsed = urlparse(url)
    if parsed[0] != 'ftp':
        username = url_username

        if headers is None:
            headers = {}

        if username:
            password = url_password
            netloc = parsed[1]
        elif '@' in parsed[1]:
            credentials, netloc = parsed[1].split('@', 1)
            if ':' in credentials:
                username, password = credentials.split(':', 1)
            else:
                username = credentials
                password = ''

            parsed = list(parsed)
            parsed[1] = netloc

            # reconstruct url without credentials
            url = urlunparse(parsed)

        if username and not force_basic_auth:
            passman = urllib_request.HTTPPasswordMgrWithDefaultRealm()

            # this creates a password manager
            passman.add_password(None, netloc, username, password)

            # because we have put None at the start it will always
            # use this username/password combination for  urls
            # for which `theurl` is a super-url
            authhandler = urllib_request.HTTPBasicAuthHandler(passman)

            # create the AuthHandler
            handlers.append(authhandler)

        elif username and force_basic_auth:
            headers["Authorization"] = basic_auth_header(username, password)

        else:
            try:
                rc = netrc.netrc(os.environ.get('NETRC'))
                login = rc.authenticators(parsed[1])
            except IOError:
                login = None

            if login:
                username, _, password = login
                if username and password:
                    headers["Authorization"] = basic_auth_header(username, password)

    if not use_proxy:
        proxyhandler = urllib_request.ProxyHandler({})
        handlers.append(proxyhandler)

    if HAS_SSLCONTEXT and not validate_certs:
        # In 2.7.9, the default context validates certificates
        context = SSLContext(ssl.PROTOCOL_SSLv23)
        context.options |= ssl.OP_NO_SSLv2
        context.options |= ssl.OP_NO_SSLv3
        context.verify_mode = ssl.CERT_NONE
        context.check_hostname = False
        handlers.append(urllib_request.HTTPSHandler(context=context))

    # pre-2.6 versions of python cannot use the custom https
    # handler, since the socket class is lacking create_connection.
    # Some python builds lack HTTPS support.
    if hasattr(socket, 'create_connection') and CustomHTTPSHandler:
        handlers.append(CustomHTTPSHandler)

    handlers.append(RedirectHandlerFactory(follow_redirects, validate_certs))

    opener = urllib_request.build_opener(*handlers)
    urllib_request.install_opener(opener)

    if method:
        if method.upper() not in ('OPTIONS','GET','HEAD','POST','PUT','DELETE','TRACE','CONNECT','PATCH'):
            raise ConnectionError('invalid HTTP request method; %s' % method.upper())
        request = RequestWithMethod(url, method.upper(), data)
    else:
        request = urllib_request.Request(url, data)

    # add the custom agent header, to help prevent issues
    # with sites that block the default urllib agent string
    request.add_header('User-agent', http_agent)

    # if we're ok with getting a 304, set the timestamp in the
    # header, otherwise make sure we don't get a cached copy
    if last_mod_time and not force:
        tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000')
        request.add_header('If-Modified-Since', tstamp)
    else:
        request.add_header('cache-control', 'no-cache')

    # user defined headers now, which may override things we've set above
    if headers:
        if not isinstance(headers, dict):
            raise ValueError("headers provided to fetch_url() must be a dict")
        for header in headers:
            request.add_header(header, headers[header])

    urlopen_args = [request, None]
    if sys.version_info >= (2,6,0):
        # urlopen in python prior to 2.6.0 did not
        # have a timeout parameter
        urlopen_args.append(timeout)

    r = urllib_request.urlopen(*urlopen_args)
    return r