Пример #1
0
def assert_xml_equals(expected, actual):
    """
    Assert that two ElementTree nodes are equal
    """
    assert xmlutil.to_dict(xmlutil.strip_spaces(expected),
                           True) == xmlutil.to_dict(
                               xmlutil.strip_spaces(actual), True)
Пример #2
0
def mksls(src, dst=None):
    '''
    Convert an AutoYAST file to an SLS file
    '''
    with salt.utils.fopen(src, 'r') as fh_:
        ps_opts = xml.to_dict(ET.fromstring(fh_.read()))

    if dst is not None:
        with salt.utils.fopen(dst, 'w') as fh_:
            fh_.write(yaml.safe_dump(ps_opts, default_flow_style=False))
    else:
        return yaml.safe_dump(ps_opts, default_flow_style=False)
Пример #3
0
def mksls(src, dst=None):
    '''
    Convert an AutoYAST file to an SLS file
    '''
    with salt.utils.files.fopen(src, 'r') as fh_:
        ps_opts = xml.to_dict(ET.fromstring(fh_.read()))

    if dst is not None:
        with salt.utils.files.fopen(dst, 'w') as fh_:
            fh_.write(yaml.safe_dump(ps_opts, default_flow_style=False))
    else:
        return yaml.safe_dump(ps_opts, default_flow_style=False)
Пример #4
0
def mksls(src, dst=None):
    """
    Convert an AutoYAST file to an SLS file
    """
    with salt.utils.files.fopen(src, "r") as fh_:
        ps_opts = xml.to_dict(ET.fromstring(fh_.read()))

    if dst is not None:
        with salt.utils.files.fopen(dst, "w") as fh_:
            salt.utils.yaml.safe_dump(ps_opts, fh_, default_flow_style=False)
    else:
        return salt.utils.yaml.safe_dump(ps_opts, default_flow_style=False)
Пример #5
0
def query(key, keyid, method='GET', params=None, headers=None,
          requesturl=None, return_url=False, bucket=None, service_url=None,
          path='', return_bin=False, action=None, local_file=None,
          verify_ssl=True, location=None, full_headers=False):
    '''
    Perform a query against an S3-like API. This function requires that a
    secret key and the id for that key are passed in. For instance:

        s3.keyid: GKTADJGHEIQSXMKKRBJ08H
        s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs

    If keyid or key is not specified, an attempt to fetch them from EC2 IAM
    metadata service will be made.

    A service_url may also be specified in the configuration:

        s3.service_url: s3.amazonaws.com

    If a service_url is not specified, the default is s3.amazonaws.com. This
    may appear in various documentation as an "endpoint". A comprehensive list
    for Amazon S3 may be found at::

        http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region

    The service_url will form the basis for the final endpoint that is used to
    query the service.

    SSL verification may also be turned off in the configuration:

    s3.verify_ssl: False

    This is required if using S3 bucket names that contain a period, as
    these will not match Amazon's S3 wildcard certificates. Certificate
    verification is enabled by default.

    A region may be specified:

        s3.location: eu-central-1

    If region is not specified, an attempt to fetch the region from EC2 IAM
    metadata service will be made. Failing that, default is us-east-1
    '''
    if not HAS_REQUESTS:
        log.error('There was an error: requests is required for s3 access')

    if not headers:
        headers = {}

    if not params:
        params = {}

    if not service_url:
        service_url = 's3.amazonaws.com'

    if bucket:
        endpoint = '{0}.{1}'.format(bucket, service_url)
    else:
        endpoint = service_url

    # Try grabbing the credentials from the EC2 instance IAM metadata if available
    if not key or not keyid:
        key = salt.utils.aws.IROLE_CODE
        keyid = salt.utils.aws.IROLE_CODE

    if not location:
        location = iam.get_iam_region()
    if not location:
        location = DEFAULT_LOCATION

    data = ''
    if method == 'PUT':
        if local_file:
            with salt.utils.fopen(local_file, 'r') as ifile:
                data = ifile.read()

    if not requesturl:
        requesturl = 'https://{0}/{1}'.format(endpoint, path)
        headers, requesturl = salt.utils.aws.sig4(
            method,
            endpoint,
            params,
            data=data,
            uri='/{0}'.format(path),
            prov_dict={'id': keyid, 'key': key},
            location=location,
            product='s3',
            requesturl=requesturl,
        )

    log.debug('S3 Request: {0}'.format(requesturl))
    log.debug('S3 Headers::')
    log.debug('    Authorization: {0}'.format(headers['Authorization']))

    if not data:
        data = None

    try:
        result = requests.request(method, requesturl, headers=headers,
                                  data=data,
                                  verify=verify_ssl)
        response = result.content
    except requests.exceptions.HTTPError as exc:
        log.error('There was an error::')
        if hasattr(exc, 'code') and hasattr(exc, 'msg'):
            log.error('    Code: {0}: {1}'.format(exc.code, exc.msg))
        log.error('    Content: \n{0}'.format(exc.read()))
        return False

    log.debug('S3 Response Status Code: {0}'.format(result.status_code))

    if method == 'PUT':
        if result.status_code == 200:
            if local_file:
                log.debug('Uploaded from {0} to {1}'.format(local_file, path))
            else:
                log.debug('Created bucket {0}'.format(bucket))
        else:
            if local_file:
                log.debug('Failed to upload from {0} to {1}: {2}'.format(
                                                    local_file,
                                                    path,
                                                    result.status_code,
                                                    ))
            else:
                log.debug('Failed to create bucket {0}'.format(bucket))
        return

    if method == 'DELETE':
        if str(result.status_code).startswith('2'):
            if path:
                log.debug('Deleted {0} from bucket {1}'.format(path, bucket))
            else:
                log.debug('Deleted bucket {0}'.format(bucket))
        else:
            if path:
                log.debug('Failed to delete {0} from bucket {1}: {2}'.format(
                                                    path,
                                                    bucket,
                                                    result.status_code,
                                                    ))
            else:
                log.debug('Failed to delete bucket {0}'.format(bucket))
        return

    # This can be used to save a binary object to disk
    if local_file and method == 'GET':
        log.debug('Saving to local file: {0}'.format(local_file))
        with salt.utils.fopen(local_file, 'w') as out:
            out.write(response)
        return 'Saved to local file: {0}'.format(local_file)

    # This can be used to return a binary object wholesale
    if return_bin:
        return response

    if response:
        items = ET.fromstring(response)

        ret = []
        for item in items:
            ret.append(xml.to_dict(item))

        if return_url is True:
            return ret, requesturl
    else:
        if result.status_code != requests.codes.ok:
            return
        ret = {'headers': []}
        if full_headers:
            ret['headers'] = dict(result.headers)
        else:
            for header in result.headers:
                ret['headers'].append(header.strip())

    return ret
Пример #6
0
def query(
    key,
    keyid,
    method="GET",
    params=None,
    headers=None,
    requesturl=None,
    return_url=False,
    bucket=None,
    service_url=None,
    path=None,
    return_bin=False,
    action=None,
    local_file=None,
):
    """
    Perform a query against an S3-like API. This function requires that a
    secret key and the id for that key are passed in. For instance:

        s3.keyid: GKTADJGHEIQSXMKKRBJ08H
        s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs

    A service_url may also be specified in the configuration::

        s3.service_url: s3.amazonaws.com

    If a service_url is not specified, the default is s3.amazonaws.com. This
    may appear in various documentation as an "endpoint". A comprehensive list
    for Amazon S3 may be found at::

        http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region

    The service_url will form the basis for the final endpoint that is used to
    query the service.
    """
    if not headers:
        headers = {}

    if not params:
        params = {}

    if path is None:
        path = ""

    if not service_url:
        service_url = "s3.amazonaws.com"

    if bucket:
        endpoint = "{0}.{1}".format(bucket, service_url)
    else:
        endpoint = service_url

    # Try grabbing the credentials from the EC2 instance IAM metadata if available
    token = None
    if not key or not keyid:
        iam_creds = get_iam_metadata()
        key = iam_creds["secret_key"]
        keyid = iam_creds["access_key"]
        token = iam_creds["security_token"]

    if not requesturl:
        x_amz_date = datetime.datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT")
        content_type = "text/plain"
        if method == "GET":
            if bucket:
                can_resource = "/{0}/{1}".format(bucket, path)
            else:
                can_resource = "/"
        elif method == "PUT" or method == "HEAD" or method == "DELETE":
            if path:
                can_resource = "/{0}/{1}".format(bucket, path)
            else:
                can_resource = "/{0}/".format(bucket)

        if action:
            can_resource += "?{0}".format(action)

        log.debug("CanonicalizedResource: {0}".format(can_resource))

        headers["Host"] = endpoint
        headers["Content-type"] = content_type
        headers["Date"] = x_amz_date
        if token:
            headers["x-amz-security-token"] = token

        string_to_sign = "{0}\n".format(method)

        new_headers = []
        for header in sorted(headers.keys()):
            if header.lower().startswith("x-amz"):
                log.debug(header.lower())
                new_headers.append("{0}:{1}".format(header.lower(), headers[header]))
        can_headers = "\n".join(new_headers)
        log.debug("CanonicalizedAmzHeaders: {0}".format(can_headers))

        string_to_sign += "\n{0}".format(content_type)
        string_to_sign += "\n{0}".format(x_amz_date)
        if can_headers:
            string_to_sign += "\n{0}".format(can_headers)
        string_to_sign += "\n{0}".format(can_resource)
        log.debug("String To Sign:: \n{0}".format(string_to_sign))

        hashed = hmac.new(key, string_to_sign, hashlib.sha1)
        sig = binascii.b2a_base64(hashed.digest())
        headers["Authorization"] = "AWS {0}:{1}".format(keyid, sig.strip())

        querystring = urllib.urlencode(params)
        if action:
            if querystring:
                querystring = "{0}&{1}".format(action, querystring)
            else:
                querystring = action
        requesturl = "https://{0}/".format(endpoint)
        if path:
            requesturl += path
        if querystring:
            requesturl += "?{0}".format(querystring)

    req = urllib2.Request(url=requesturl)
    if method == "PUT":
        if local_file:
            with salt.utils.fopen(local_file, "r") as ifile:
                data = ifile.read()
            req = urllib2.Request(url=requesturl, data=data)
        req.get_method = lambda: "PUT"
    elif method == "HEAD":
        req.get_method = lambda: "HEAD"
    elif method == "DELETE":
        req.get_method = lambda: "DELETE"

    log.debug("S3 Request: {0}".format(requesturl))
    log.debug("S3 Headers::")
    for header in sorted(headers.keys()):
        if header == "Authorization":
            continue
        req.add_header(header, headers[header])
        log.debug("    {0}: {1}".format(header, headers[header]))
    log.debug("    Authorization: {0}".format(headers["Authorization"]))
    req.add_header("Authorization", headers["Authorization"])

    try:
        result = urllib2.urlopen(req)
        response = result.read()
    except Exception as exc:
        log.error("There was an error::")
        if hasattr(exc, "code") and hasattr(exc, "msg"):
            log.error("    Code: {0}: {1}".format(exc.code, exc.msg))
        log.error("    Content: \n{0}".format(exc.read()))
        return False

    log.debug("S3 Response Status Code: {0}".format(result.getcode()))
    result.close()

    if method == "PUT":
        if result.getcode() == 200:
            if local_file:
                log.debug("Uploaded from {0} to {1}".format(local_file, path))
            else:
                log.debug("Created bucket {0}".format(bucket))
        else:
            if local_file:
                log.debug("Failed to upload from {0} to {1}: {2}".format(local_file, path, result.getcode()))
            else:
                log.debug("Failed to create bucket {0}".format(bucket))
        return

    if method == "DELETE":
        if str(result.getcode()).startswith("2"):
            if path:
                log.debug("Deleted {0} from bucket {1}".format(path, bucket))
            else:
                log.debug("Deleted bucket {0}".format(bucket))
        else:
            if path:
                log.debug("Failed to delete {0} from bucket {1}: {2}".format(path, bucket, result.getcode()))
            else:
                log.debug("Failed to delete bucket {0}".format(bucket))
        return

    # This can be used to save a binary object to disk
    if local_file and method == "GET":
        log.debug("Saving to local file: {0}".format(local_file))
        with salt.utils.fopen(local_file, "w") as out:
            out.write(response)
        return "Saved to local file: {0}".format(local_file)

    # This can be used to return a binary object wholesale
    if return_bin:
        return response

    if response:
        items = ET.fromstring(response)

        ret = []
        for item in items:
            ret.append(xml.to_dict(item))

        if return_url is True:
            return ret, requesturl
    else:
        ret = {"headers": []}
        for header in result.headers.headers:
            ret["headers"].append(header.strip())

    return ret
Пример #7
0
def query(url,
          method="GET",
          params=None,
          data=None,
          data_file=None,
          header_dict=None,
          header_list=None,
          header_file=None,
          username=None,
          password=None,
          auth=None,
          decode=False,
          decode_type="auto",
          status=False,
          headers=False,
          text=False,
          cookies=None,
          cookie_jar=None,
          cookie_format="lwp",
          persist_session=False,
          session_cookie_jar=None,
          data_render=False,
          data_renderer=None,
          header_render=False,
          header_renderer=None,
          template_dict=None,
          test=False,
          test_url=None,
          node="minion",
          port=80,
          opts=None,
          backend=None,
          ca_bundle=None,
          verify_ssl=None,
          cert=None,
          text_out=None,
          headers_out=None,
          decode_out=None,
          stream=False,
          streaming_callback=None,
          header_callback=None,
          handle=False,
          agent=USERAGENT,
          hide_fields=None,
          raise_error=True,
          formdata=False,
          formdata_fieldname=None,
          formdata_filename=None,
          decode_body=True,
          **kwargs):
    """
    Query a resource, and decode the return data
    """
    ret = {}

    if opts is None:
        if node == "master":
            opts = salt.config.master_config(
                os.path.join(salt.syspaths.CONFIG_DIR, "master"))
        elif node == "minion":
            opts = salt.config.minion_config(
                os.path.join(salt.syspaths.CONFIG_DIR, "minion"))
        else:
            opts = {}

    if not backend:
        backend = opts.get("backend", "tornado")

    match = re.match(
        r"https?://((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)($|/)",
        url,
    )
    if not match:
        salt.utils.network.refresh_dns()

    if backend == "requests":
        if HAS_REQUESTS is False:
            ret["error"] = ("http.query has been set to use requests, but the "
                            "requests library does not seem to be installed")
            log.error(ret["error"])
            return ret
        else:
            requests_log = logging.getLogger("requests")
            requests_log.setLevel(logging.WARNING)

    # Some libraries don't support separation of url and GET parameters
    # Don't need a try/except block, since Salt depends on tornado
    url_full = salt.ext.tornado.httputil.url_concat(url,
                                                    params) if params else url

    if ca_bundle is None:
        ca_bundle = get_ca_bundle(opts)

    if verify_ssl is None:
        verify_ssl = opts.get("verify_ssl", True)

    if cert is None:
        cert = opts.get("cert", None)

    if data_file is not None:
        data = _render(data_file, data_render, data_renderer, template_dict,
                       opts)

    # Make sure no secret fields show up in logs
    log_url = sanitize_url(url_full, hide_fields)

    log.debug("Requesting URL %s using %s method", log_url, method)
    log.debug("Using backend: %s", backend)

    if method == "POST" and log.isEnabledFor(logging.TRACE):
        # Make sure no secret fields show up in logs
        if isinstance(data, dict):
            log_data = data.copy()
            if isinstance(hide_fields, list):
                for item in data:
                    for field in hide_fields:
                        if item == field:
                            log_data[item] = "XXXXXXXXXX"
            log.trace("Request POST Data: %s", pprint.pformat(log_data))
        else:
            log.trace("Request POST Data: %s", pprint.pformat(data))

    if header_file is not None:
        header_tpl = _render(header_file, header_render, header_renderer,
                             template_dict, opts)
        if isinstance(header_tpl, dict):
            header_dict = header_tpl
        else:
            header_list = header_tpl.splitlines()

    if header_dict is None:
        header_dict = {}

    if header_list is None:
        header_list = []

    if cookie_jar is None:
        cookie_jar = os.path.join(
            opts.get("cachedir", salt.syspaths.CACHE_DIR), "cookies.txt")
    if session_cookie_jar is None:
        session_cookie_jar = os.path.join(
            opts.get("cachedir", salt.syspaths.CACHE_DIR), "cookies.session.p")

    if persist_session is True and salt.utils.msgpack.HAS_MSGPACK:
        # TODO: This is hackish; it will overwrite the session cookie jar with
        # all cookies from this one connection, rather than behaving like a
        # proper cookie jar. Unfortunately, since session cookies do not
        # contain expirations, they can't be stored in a proper cookie jar.
        if os.path.isfile(session_cookie_jar):
            with salt.utils.files.fopen(session_cookie_jar, "rb") as fh_:
                session_cookies = salt.utils.msgpack.load(fh_)
            if isinstance(session_cookies, dict):
                header_dict.update(session_cookies)
        else:
            with salt.utils.files.fopen(session_cookie_jar, "wb") as fh_:
                salt.utils.msgpack.dump("", fh_)

    for header in header_list:
        comps = header.split(":")
        if len(comps) < 2:
            continue
        header_dict[comps[0].strip()] = comps[1].strip()

    if not auth:
        if username and password:
            auth = (username, password)

    if agent == USERAGENT:
        agent = "{} http.query()".format(agent)
    header_dict["User-agent"] = agent

    if backend == "requests":
        sess = requests.Session()
        sess.auth = auth
        sess.headers.update(header_dict)
        log.trace("Request Headers: %s", sess.headers)
        sess_cookies = sess.cookies
        sess.verify = verify_ssl
    elif backend == "urllib2":
        sess_cookies = None
    else:
        # Tornado
        sess_cookies = None

    if cookies is not None:
        if cookie_format == "mozilla":
            sess_cookies = http.cookiejar.MozillaCookieJar(cookie_jar)
        else:
            sess_cookies = http.cookiejar.LWPCookieJar(cookie_jar)
        if not os.path.isfile(cookie_jar):
            sess_cookies.save()
        sess_cookies.load()

    if test is True:
        if test_url is None:
            return {}
        else:
            url = test_url
            ret["test"] = True

    if backend == "requests":
        req_kwargs = {}
        if stream is True:
            if requests.__version__[0] == "0":
                # 'stream' was called 'prefetch' before 1.0, with flipped meaning
                req_kwargs["prefetch"] = False
            else:
                req_kwargs["stream"] = True

        # Client-side cert handling
        if cert is not None:
            if isinstance(cert, str):
                if os.path.exists(cert):
                    req_kwargs["cert"] = cert
            elif isinstance(cert, list):
                if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                    req_kwargs["cert"] = cert
            else:
                log.error(
                    "The client-side certificate path that"
                    " was passed is not valid: %s",
                    cert,
                )

        if formdata:
            if not formdata_fieldname:
                ret["error"] = "formdata_fieldname is required when formdata=True"
                log.error(ret["error"])
                return ret
            result = sess.request(method,
                                  url,
                                  params=params,
                                  files={
                                      formdata_fieldname:
                                      (formdata_filename, io.StringIO(data))
                                  },
                                  **req_kwargs)
        else:
            result = sess.request(method,
                                  url,
                                  params=params,
                                  data=data,
                                  **req_kwargs)
        result.raise_for_status()
        if stream is True:
            # fake a HTTP response header
            header_callback("HTTP/1.0 {} MESSAGE".format(result.status_code))
            # fake streaming the content
            streaming_callback(result.content)
            return {
                "handle": result,
            }

        if handle is True:
            return {
                "handle": result,
                "body": result.content,
            }

        log.debug("Final URL location of Response: %s",
                  sanitize_url(result.url, hide_fields))

        result_status_code = result.status_code
        result_headers = result.headers
        result_text = result.content
        result_cookies = result.cookies
        body = result.content
        if not isinstance(body, str) and decode_body:
            body = body.decode(result.encoding or "utf-8")
        ret["body"] = body
    elif backend == "urllib2":
        request = urllib.request.Request(url_full, data)
        handlers = [
            urllib.request.HTTPHandler,
            urllib.request.HTTPCookieProcessor(sess_cookies),
        ]

        if url.startswith("https"):
            hostname = request.get_host()
            handlers[0] = urllib.request.HTTPSHandler(1)
            if not HAS_MATCHHOSTNAME:
                log.warning(
                    "match_hostname() not available, SSL hostname checking "
                    "not available. THIS CONNECTION MAY NOT BE SECURE!")
            elif verify_ssl is False:
                log.warning("SSL certificate verification has been explicitly "
                            "disabled. THIS CONNECTION MAY NOT BE SECURE!")
            else:
                if ":" in hostname:
                    hostname, port = hostname.split(":")
                else:
                    port = 443
                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                sock.connect((hostname, int(port)))
                sockwrap = ssl.wrap_socket(sock,
                                           ca_certs=ca_bundle,
                                           cert_reqs=ssl.CERT_REQUIRED)
                try:
                    match_hostname(sockwrap.getpeercert(), hostname)
                except CertificateError as exc:
                    ret["error"] = "The certificate was invalid. Error returned was: {}".format(
                        pprint.pformat(exc))
                    return ret

                # Client-side cert handling
                if cert is not None:
                    cert_chain = None
                    if isinstance(cert, str):
                        if os.path.exists(cert):
                            cert_chain = cert
                    elif isinstance(cert, list):
                        if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                            cert_chain = cert
                    else:
                        log.error(
                            "The client-side certificate path that was "
                            "passed is not valid: %s",
                            cert,
                        )
                        return
                    if hasattr(ssl, "SSLContext"):
                        # Python >= 2.7.9
                        context = ssl.SSLContext.load_cert_chain(*cert_chain)
                        handlers.append(
                            urllib.request.HTTPSHandler(context=context))  # pylint: disable=E1123
                    else:
                        # Python < 2.7.9
                        cert_kwargs = {
                            "host": request.get_host(),
                            "port": port,
                            "cert_file": cert_chain[0],
                        }
                        if len(cert_chain) > 1:
                            cert_kwargs["key_file"] = cert_chain[1]
                        handlers[0] = http.client.HTTPSConnection(
                            **cert_kwargs)

        opener = urllib.request.build_opener(*handlers)
        for header in header_dict:
            request.add_header(header, header_dict[header])
        request.get_method = lambda: method
        try:
            result = opener.open(request)
        except urllib.error.URLError as exc:
            return {"Error": str(exc)}
        if stream is True or handle is True:
            return {
                "handle": result,
                "body": result.content,
            }

        result_status_code = result.code
        result_headers = dict(result.info())
        result_text = result.read()
        if "Content-Type" in result_headers:
            res_content_type, res_params = cgi.parse_header(
                result_headers["Content-Type"])
            if (res_content_type.startswith("text/")
                    and "charset" in res_params
                    and not isinstance(result_text, str)):
                result_text = result_text.decode(res_params["charset"])
        if isinstance(result_text, bytes) and decode_body:
            result_text = result_text.decode("utf-8")
        ret["body"] = result_text
    else:
        # Tornado
        req_kwargs = {}

        # Client-side cert handling
        if cert is not None:
            if isinstance(cert, str):
                if os.path.exists(cert):
                    req_kwargs["client_cert"] = cert
            elif isinstance(cert, list):
                if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                    req_kwargs["client_cert"] = cert[0]
                    req_kwargs["client_key"] = cert[1]
            else:
                log.error(
                    "The client-side certificate path that "
                    "was passed is not valid: %s",
                    cert,
                )

        if isinstance(data, dict):
            data = urllib.parse.urlencode(data)

        if verify_ssl:
            req_kwargs["ca_certs"] = ca_bundle

        max_body = opts.get("http_max_body",
                            salt.config.DEFAULT_MINION_OPTS["http_max_body"])
        connect_timeout = opts.get(
            "http_connect_timeout",
            salt.config.DEFAULT_MINION_OPTS["http_connect_timeout"],
        )
        timeout = opts.get(
            "http_request_timeout",
            salt.config.DEFAULT_MINION_OPTS["http_request_timeout"],
        )

        client_argspec = None

        proxy_host = opts.get("proxy_host", None)
        if proxy_host:
            # tornado requires a str for proxy_host, cannot be a unicode str in py2
            proxy_host = salt.utils.stringutils.to_str(proxy_host)
        proxy_port = opts.get("proxy_port", None)
        proxy_username = opts.get("proxy_username", None)
        if proxy_username:
            # tornado requires a str, cannot be unicode str in py2
            proxy_username = salt.utils.stringutils.to_str(proxy_username)
        proxy_password = opts.get("proxy_password", None)
        if proxy_password:
            # tornado requires a str, cannot be unicode str in py2
            proxy_password = salt.utils.stringutils.to_str(proxy_password)
        no_proxy = opts.get("no_proxy", [])

        # Since tornado doesnt support no_proxy, we'll always hand it empty proxies or valid ones
        # except we remove the valid ones if a url has a no_proxy hostname in it
        if urllib.parse.urlparse(url_full).hostname in no_proxy:
            proxy_host = None
            proxy_port = None
            proxy_username = None
            proxy_password = None

        # We want to use curl_http if we have a proxy defined
        if proxy_host and proxy_port:
            if HAS_CURL_HTTPCLIENT is False:
                ret["error"] = (
                    "proxy_host and proxy_port has been set. This requires pycurl and tornado, "
                    "but the libraries does not seem to be installed")
                log.error(ret["error"])
                return ret

            salt.ext.tornado.httpclient.AsyncHTTPClient.configure(
                "tornado.curl_httpclient.CurlAsyncHTTPClient")
            client_argspec = salt.utils.args.get_function_argspec(
                salt.ext.tornado.curl_httpclient.CurlAsyncHTTPClient.initialize
            )
        else:
            salt.ext.tornado.httpclient.AsyncHTTPClient.configure(None)
            client_argspec = salt.utils.args.get_function_argspec(
                salt.ext.tornado.simple_httpclient.SimpleAsyncHTTPClient.
                initialize)

        supports_max_body_size = "max_body_size" in client_argspec.args

        req_kwargs.update({
            "method": method,
            "headers": header_dict,
            "auth_username": username,
            "auth_password": password,
            "body": data,
            "validate_cert": verify_ssl,
            "allow_nonstandard_methods": True,
            "streaming_callback": streaming_callback,
            "header_callback": header_callback,
            "connect_timeout": connect_timeout,
            "request_timeout": timeout,
            "proxy_host": proxy_host,
            "proxy_port": proxy_port,
            "proxy_username": proxy_username,
            "proxy_password": proxy_password,
            "raise_error": raise_error,
            "decompress_response": False,
        })

        # Unicode types will cause a TypeError when Tornado's curl HTTPClient
        # invokes setopt. Therefore, make sure all arguments we pass which
        # contain strings are str types.
        req_kwargs = salt.utils.data.decode(req_kwargs, to_str=True)

        try:
            download_client = (HTTPClient(max_body_size=max_body)
                               if supports_max_body_size else HTTPClient())
            result = download_client.fetch(url_full, **req_kwargs)
        except salt.ext.tornado.httpclient.HTTPError as exc:
            ret["status"] = exc.code
            ret["error"] = str(exc)
            return ret
        except (socket.herror, OSError, socket.timeout,
                socket.gaierror) as exc:
            if status is True:
                ret["status"] = 0
            ret["error"] = str(exc)
            log.debug("Cannot perform 'http.query': %s - %s", url_full,
                      ret["error"])
            return ret

        if stream is True or handle is True:
            return {
                "handle": result,
                "body": result.body,
            }

        result_status_code = result.code
        result_headers = result.headers
        result_text = result.body
        if "Content-Type" in result_headers:
            res_content_type, res_params = cgi.parse_header(
                result_headers["Content-Type"])
            if (res_content_type.startswith("text/")
                    and "charset" in res_params
                    and not isinstance(result_text, str)):
                result_text = result_text.decode(res_params["charset"])
        if isinstance(result_text, bytes) and decode_body:
            result_text = result_text.decode("utf-8")
        ret["body"] = result_text
        if "Set-Cookie" in result_headers and cookies is not None:
            result_cookies = parse_cookie_header(result_headers["Set-Cookie"])
            for item in result_cookies:
                sess_cookies.set_cookie(item)
        else:
            result_cookies = None

    if isinstance(result_headers, list):
        result_headers_dict = {}
        for header in result_headers:
            comps = header.split(":")
            result_headers_dict[comps[0].strip()] = ":".join(comps[1:]).strip()
        result_headers = result_headers_dict

    log.debug("Response Status Code: %s", result_status_code)
    log.trace("Response Headers: %s", result_headers)
    log.trace("Response Cookies: %s", sess_cookies)
    # log.trace("Content: %s", result_text)

    coding = result_headers.get("Content-Encoding", "identity")

    # Requests will always decompress the content, and working around that is annoying.
    if backend != "requests":
        result_text = __decompressContent(coding, result_text)

    try:
        log.trace("Response Text: %s", result_text)
    except UnicodeEncodeError as exc:
        log.trace(
            "Cannot Trace Log Response Text: %s. This may be due to "
            "incompatibilities between requests and logging.",
            exc,
        )

    if text_out is not None:
        with salt.utils.files.fopen(text_out, "w") as tof:
            tof.write(result_text)

    if headers_out is not None and os.path.exists(headers_out):
        with salt.utils.files.fopen(headers_out, "w") as hof:
            hof.write(result_headers)

    if cookies is not None:
        sess_cookies.save()

    if persist_session is True and salt.utils.msgpack.HAS_MSGPACK:
        # TODO: See persist_session above
        if "set-cookie" in result_headers:
            with salt.utils.files.fopen(session_cookie_jar, "wb") as fh_:
                session_cookies = result_headers.get("set-cookie", None)
                if session_cookies is not None:
                    salt.utils.msgpack.dump({"Cookie": session_cookies}, fh_)
                else:
                    salt.utils.msgpack.dump("", fh_)

    if status is True:
        ret["status"] = result_status_code

    if headers is True:
        ret["headers"] = result_headers

    if decode is True:
        if decode_type == "auto":
            content_type = result_headers.get("content-type",
                                              "application/json")
            if "xml" in content_type:
                decode_type = "xml"
            elif "json" in content_type:
                decode_type = "json"
            elif "yaml" in content_type:
                decode_type = "yaml"
            else:
                decode_type = "plain"

        valid_decodes = ("json", "xml", "yaml", "plain")
        if decode_type not in valid_decodes:
            ret["error"] = "Invalid decode_type specified. Valid decode types are: {}".format(
                pprint.pformat(valid_decodes))
            log.error(ret["error"])
            return ret

        if decode_type == "json":
            ret["dict"] = salt.utils.json.loads(result_text)
        elif decode_type == "xml":
            ret["dict"] = []
            items = ET.fromstring(result_text)
            for item in items:
                ret["dict"].append(xml.to_dict(item))
        elif decode_type == "yaml":
            ret["dict"] = salt.utils.data.decode(
                salt.utils.yaml.safe_load(result_text))
        else:
            text = True

        if decode_out:
            with salt.utils.files.fopen(decode_out, "w") as dof:
                dof.write(result_text)

    if text is True:
        ret["text"] = result_text

    return ret
Пример #8
0
Файл: s3.py Проект: bryson/salt
def query(key, keyid, method='GET', params=None, headers=None,
          requesturl=None, return_url=False, bucket=None, service_url=None,
          path='', return_bin=False, action=None, local_file=None,
          verify_ssl=True, full_headers=False, kms_keyid=None,
          location=None, role_arn=None, chunk_size=16384):
    '''
    Perform a query against an S3-like API. This function requires that a
    secret key and the id for that key are passed in. For instance:

        s3.keyid: GKTADJGHEIQSXMKKRBJ08H
        s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs

    If keyid or key is not specified, an attempt to fetch them from EC2 IAM
    metadata service will be made.

    A service_url may also be specified in the configuration:

        s3.service_url: s3.amazonaws.com

    If a service_url is not specified, the default is s3.amazonaws.com. This
    may appear in various documentation as an "endpoint". A comprehensive list
    for Amazon S3 may be found at::

        http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region

    The service_url will form the basis for the final endpoint that is used to
    query the service.

    SSL verification may also be turned off in the configuration:

    s3.verify_ssl: False

    This is required if using S3 bucket names that contain a period, as
    these will not match Amazon's S3 wildcard certificates. Certificate
    verification is enabled by default.

    A region may be specified:

        s3.location: eu-central-1

    If region is not specified, an attempt to fetch the region from EC2 IAM
    metadata service will be made. Failing that, default is us-east-1
    '''
    if not HAS_REQUESTS:
        log.error('There was an error: requests is required for s3 access')

    if not headers:
        headers = {}

    if not params:
        params = {}

    if not service_url:
        service_url = 's3.amazonaws.com'

    if bucket:
        endpoint = '{0}.{1}'.format(bucket, service_url)
    else:
        endpoint = service_url

    # Try grabbing the credentials from the EC2 instance IAM metadata if available
    if not key:
        key = salt.utils.aws.IROLE_CODE

    if not keyid:
        keyid = salt.utils.aws.IROLE_CODE

    if kms_keyid is not None and method in ('PUT', 'POST'):
        headers['x-amz-server-side-encryption'] = 'aws:kms'
        headers['x-amz-server-side-encryption-aws-kms-key-id'] = kms_keyid

    if not location:
        location = salt.utils.aws.get_location()

    data = ''
    payload_hash = None
    if method == 'PUT':
        if local_file:
            payload_hash = salt.utils.get_hash(local_file, form='sha256')

    if path is None:
        path = ''

    if not requesturl:
        requesturl = 'https://{0}/{1}'.format(endpoint, path)
        headers, requesturl = salt.utils.aws.sig4(
            method,
            endpoint,
            params,
            data=data,
            uri='/{0}'.format(path),
            prov_dict={'id': keyid, 'key': key},
            role_arn=role_arn,
            location=location,
            product='s3',
            requesturl=requesturl,
            headers=headers,
            payload_hash=payload_hash,
        )

    log.debug('S3 Request: {0}'.format(requesturl))
    log.debug('S3 Headers::')
    log.debug('    Authorization: {0}'.format(headers['Authorization']))

    if not data:
        data = None

    response = None
    if method == 'PUT':
        if local_file:
            data = salt.utils.fopen(local_file, 'r')
        result = requests.request(method,
                                  requesturl,
                                  headers=headers,
                                  data=data,
                                  verify=verify_ssl,
                                  stream=True)
        response = result.content
    elif method == 'GET' and local_file and not return_bin:
        result = requests.request(method,
                                  requesturl,
                                  headers=headers,
                                  data=data,
                                  verify=verify_ssl,
                                  stream=True)
    else:
        result = requests.request(method,
                                  requesturl,
                                  headers=headers,
                                  data=data,
                                  verify=verify_ssl)
        response = result.content

    err_code = None
    err_msg = None
    if result.status_code >= 400:
        # On error the S3 API response should contain error message
        err_text = response or result.content or 'Unknown error'
        log.debug('    Response content: {0}'.format(err_text))

        # Try to get err info from response xml
        try:
            err_data = xml.to_dict(ET.fromstring(err_text))
            err_code = err_data['Code']
            err_msg = err_data['Message']
        except (KeyError, ET.ParseError) as err:
            log.debug('Failed to parse s3 err response. {0}: {1}'.format(
                type(err).__name__, err))
            err_code = 'http-{0}'.format(result.status_code)
            err_msg = err_text

    log.debug('S3 Response Status Code: {0}'.format(result.status_code))

    if method == 'PUT':
        if result.status_code != 200:
            if local_file:
                raise CommandExecutionError(
                    'Failed to upload from {0} to {1}. {2}: {3}'.format(
                        local_file, path, err_code, err_msg))
            raise CommandExecutionError(
                'Failed to create bucket {0}. {1}: {2}'.format(
                    bucket, err_code, err_msg))

        if local_file:
            log.debug('Uploaded from {0} to {1}'.format(local_file, path))
        else:
            log.debug('Created bucket {0}'.format(bucket))
        return

    if method == 'DELETE':
        if not str(result.status_code).startswith('2'):
            if path:
                raise CommandExecutionError(
                    'Failed to delete {0} from bucket {1}. {2}: {3}'.format(
                        path, bucket, err_code, err_msg))
            raise CommandExecutionError(
                'Failed to delete bucket {0}. {1}: {2}'.format(
                    bucket, err_code, err_msg))

        if path:
            log.debug('Deleted {0} from bucket {1}'.format(path, bucket))
        else:
            log.debug('Deleted bucket {0}'.format(bucket))
        return

    # This can be used to save a binary object to disk
    if local_file and method == 'GET':
        if result.status_code < 200 or result.status_code >= 300:
            raise CommandExecutionError(
                'Failed to get file. {0}: {1}'.format(err_code, err_msg))

        log.debug('Saving to local file: {0}'.format(local_file))
        with salt.utils.fopen(local_file, 'wb') as out:
            for chunk in result.iter_content(chunk_size=chunk_size):
                out.write(chunk)
        return 'Saved to local file: {0}'.format(local_file)

    if result.status_code < 200 or result.status_code >= 300:
        raise CommandExecutionError(
            'Failed s3 operation. {0}: {1}'.format(err_code, err_msg))

    # This can be used to return a binary object wholesale
    if return_bin:
        return response

    if response:
        items = ET.fromstring(response)

        ret = []
        for item in items:
            ret.append(xml.to_dict(item))

        if return_url is True:
            return ret, requesturl
    else:
        if result.status_code != requests.codes.ok:
            return
        ret = {'headers': []}
        if full_headers:
            ret['headers'] = dict(result.headers)
        else:
            for header in result.headers:
                ret['headers'].append(header.strip())

    return ret
Пример #9
0
 def test_xml_case_f_legacy(self):
     xmldata = ET.fromstring(self.cases["f"]["xml"])
     defaultdict = xml.to_dict(xmldata, False)
     self.assertEqual(defaultdict, self.cases["f"]["legacy"])
Пример #10
0
def query(
    key,
    keyid,
    method="GET",
    params=None,
    headers=None,
    requesturl=None,
    return_url=False,
    bucket=None,
    service_url=None,
    path="",
    return_bin=False,
    action=None,
    local_file=None,
    verify_ssl=True,
    full_headers=False,
    kms_keyid=None,
    location=None,
    role_arn=None,
    chunk_size=16384,
    path_style=False,
    https_enable=True,
):
    """
    Perform a query against an S3-like API. This function requires that a
    secret key and the id for that key are passed in. For instance:

        s3.keyid: GKTADJGHEIQSXMKKRBJ08H
        s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs

    If keyid or key is not specified, an attempt to fetch them from EC2 IAM
    metadata service will be made.

    A service_url may also be specified in the configuration:

        s3.service_url: s3.amazonaws.com

    If a service_url is not specified, the default is s3.amazonaws.com. This
    may appear in various documentation as an "endpoint". A comprehensive list
    for Amazon S3 may be found at::

        http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region

    The service_url will form the basis for the final endpoint that is used to
    query the service.

    Path style can be enabled:

        s3.path_style: True

    This can be useful if you need to use salt with a proxy for an s3 compatible storage

    You can use either https protocol or http protocol:

        s3.https_enable: True

    SSL verification may also be turned off in the configuration:

        s3.verify_ssl: False

    This is required if using S3 bucket names that contain a period, as
    these will not match Amazon's S3 wildcard certificates. Certificate
    verification is enabled by default.

    A region may be specified:

        s3.location: eu-central-1

    If region is not specified, an attempt to fetch the region from EC2 IAM
    metadata service will be made. Failing that, default is us-east-1
    """
    if not HAS_REQUESTS:
        log.error("There was an error: requests is required for s3 access")

    if not headers:
        headers = {}

    if not params:
        params = {}

    if not service_url:
        service_url = "s3.amazonaws.com"

    if not bucket or path_style:
        endpoint = service_url
    else:
        endpoint = "{}.{}".format(bucket, service_url)

    if path_style and bucket:
        path = "{}/{}".format(bucket, path)

    # Try grabbing the credentials from the EC2 instance IAM metadata if available
    if not key:
        key = salt.utils.aws.IROLE_CODE

    if not keyid:
        keyid = salt.utils.aws.IROLE_CODE

    if kms_keyid is not None and method in ("PUT", "POST"):
        headers["x-amz-server-side-encryption"] = "aws:kms"
        headers["x-amz-server-side-encryption-aws-kms-key-id"] = kms_keyid

    if not location:
        location = salt.utils.aws.get_location()

    data = ""
    fh = None
    payload_hash = None
    if method == "PUT":
        if local_file:
            payload_hash = salt.utils.hashutils.get_hash(local_file, form="sha256")

    if path is None:
        path = ""
    path = urllib.parse.quote(path)

    if not requesturl:
        requesturl = (("https" if https_enable else "http") + "://{0}/{1}").format(
            endpoint, path
        )
        headers, requesturl = salt.utils.aws.sig4(
            method,
            endpoint,
            params,
            data=data,
            uri="/{}".format(path),
            prov_dict={"id": keyid, "key": key},
            role_arn=role_arn,
            location=location,
            product="s3",
            requesturl=requesturl,
            headers=headers,
            payload_hash=payload_hash,
        )

    log.debug("S3 Request: %s", requesturl)
    log.debug("S3 Headers::")
    log.debug("    Authorization: %s", headers["Authorization"])

    if not data:
        data = None

    try:
        if method == "PUT":
            if local_file:
                # pylint: disable=resource-leakage
                fh = salt.utils.files.fopen(local_file, "rb")
                # pylint: enable=resource-leakage
                data = fh.read()  # pylint: disable=resource-leakage
            result = requests.request(
                method,
                requesturl,
                headers=headers,
                data=data,
                verify=verify_ssl,
                stream=True,
                timeout=300,
            )
        elif method == "GET" and local_file and not return_bin:
            result = requests.request(
                method,
                requesturl,
                headers=headers,
                data=data,
                verify=verify_ssl,
                stream=True,
                timeout=300,
            )
        else:
            result = requests.request(
                method,
                requesturl,
                headers=headers,
                data=data,
                verify=verify_ssl,
                timeout=300,
            )
    finally:
        if fh is not None:
            fh.close()

    err_code = None
    err_msg = None
    if result.status_code >= 400:
        # On error the S3 API response should contain error message
        err_text = result.content or "Unknown error"
        log.debug("    Response content: %s", err_text)

        # Try to get err info from response xml
        try:
            err_data = xml.to_dict(ET.fromstring(err_text))
            err_code = err_data["Code"]
            err_msg = err_data["Message"]
        except (KeyError, ET.ParseError) as err:
            log.debug(
                "Failed to parse s3 err response. %s: %s", type(err).__name__, err
            )
            err_code = "http-{}".format(result.status_code)
            err_msg = err_text

    log.debug("S3 Response Status Code: %s", result.status_code)

    if method == "PUT":
        if result.status_code != 200:
            if local_file:
                raise CommandExecutionError(
                    "Failed to upload from {} to {}. {}: {}".format(
                        local_file, path, err_code, err_msg
                    )
                )
            raise CommandExecutionError(
                "Failed to create bucket {}. {}: {}".format(bucket, err_code, err_msg)
            )

        if local_file:
            log.debug("Uploaded from %s to %s", local_file, path)
        else:
            log.debug("Created bucket %s", bucket)
        return

    if method == "DELETE":
        if not str(result.status_code).startswith("2"):
            if path:
                raise CommandExecutionError(
                    "Failed to delete {} from bucket {}. {}: {}".format(
                        path, bucket, err_code, err_msg
                    )
                )
            raise CommandExecutionError(
                "Failed to delete bucket {}. {}: {}".format(bucket, err_code, err_msg)
            )

        if path:
            log.debug("Deleted %s from bucket %s", path, bucket)
        else:
            log.debug("Deleted bucket %s", bucket)
        return

    # This can be used to save a binary object to disk
    if local_file and method == "GET":
        if result.status_code < 200 or result.status_code >= 300:
            raise CommandExecutionError(
                "Failed to get file. {}: {}".format(err_code, err_msg)
            )

        log.debug("Saving to local file: %s", local_file)
        with salt.utils.files.fopen(local_file, "wb") as out:
            for chunk in result.iter_content(chunk_size=chunk_size):
                out.write(chunk)
        return "Saved to local file: {}".format(local_file)

    if result.status_code < 200 or result.status_code >= 300:
        raise CommandExecutionError(
            "Failed s3 operation. {}: {}".format(err_code, err_msg)
        )

    # This can be used to return a binary object wholesale
    if return_bin:
        return result.content

    if result.content:
        items = ET.fromstring(result.content)

        ret = []
        for item in items:
            ret.append(xml.to_dict(item))

        if return_url is True:
            return ret, requesturl
    else:
        if result.status_code != requests.codes.ok:
            return
        ret = {"headers": []}
        if full_headers:
            ret["headers"] = dict(result.headers)
        else:
            for header in result.headers:
                ret["headers"].append(header.strip())

    return ret
Пример #11
0
def query(params=None,
          setname=None,
          requesturl=None,
          location=None,
          return_url=False,
          return_root=False,
          opts=None,
          provider=None,
          endpoint=None,
          product='ec2',
          sigver='2'):
    '''
    Perform a query against AWS services using Signature Version 2 Signing
    Process. This is documented at:

    http://docs.aws.amazon.com/general/latest/gr/signature-version-2.html

    Regions and endpoints are documented at:

    http://docs.aws.amazon.com/general/latest/gr/rande.html

    Default ``product`` is ``ec2``. Valid ``product`` names are:

    .. code-block: yaml

        - autoscaling (Auto Scaling)
        - cloudformation (CloudFormation)
        - ec2 (Elastic Compute Cloud)
        - elasticache (ElastiCache)
        - elasticbeanstalk (Elastic BeanStalk)
        - elasticloadbalancing (Elastic Load Balancing)
        - elasticmapreduce (Elastic MapReduce)
        - iam (Identity and Access Management)
        - importexport (Import/Export)
        - monitoring (CloudWatch)
        - rds (Relational Database Service)
        - simpledb (SimpleDB)
        - sns (Simple Notification Service)
        - sqs (Simple Queue Service)
    '''
    if params is None:
        params = {}

    if opts is None:
        opts = {}

    function = opts.get('function', (None, product))
    providers = opts.get('providers', {})

    if provider is None:
        prov_dict = providers.get(function[1], {}).get(product, {})
        if prov_dict:
            driver = list(list(prov_dict.keys()))[0]
            provider = providers.get(driver, product)
    else:
        prov_dict = providers.get(provider, {}).get(product, {})

    service_url = prov_dict.get('service_url', 'amazonaws.com')

    if not location:
        location = get_location(opts, prov_dict)

    if endpoint is None:
        if not requesturl:
            endpoint = prov_dict.get(
                'endpoint', '{0}.{1}.{2}'.format(product, location,
                                                 service_url))

            requesturl = 'https://{0}/'.format(endpoint)
        else:
            endpoint = urlparse(requesturl).netloc
            if endpoint == '':
                endpoint_err = (
                    'Could not find a valid endpoint in the '
                    'requesturl: {0}. Looking for something '
                    'like https://some.aws.endpoint/?args').format(requesturl)
                log.error(endpoint_err)
                if return_url is True:
                    return {'error': endpoint_err}, requesturl
                return {'error': endpoint_err}

    log.debug('Using AWS endpoint: %s', endpoint)
    method = 'GET'

    aws_api_version = prov_dict.get(
        'aws_api_version',
        prov_dict.get('{0}_api_version'.format(product),
                      DEFAULT_AWS_API_VERSION))

    # Fallback to ec2's id & key if none is found, for this component
    if not prov_dict.get('id', None):
        prov_dict['id'] = providers.get(provider, {}).get('ec2',
                                                          {}).get('id', {})
        prov_dict['key'] = providers.get(provider, {}).get('ec2',
                                                           {}).get('key', {})

    if sigver == '4':
        headers, requesturl = sig4(method,
                                   endpoint,
                                   params,
                                   prov_dict,
                                   aws_api_version,
                                   location,
                                   product,
                                   requesturl=requesturl)
        params_with_headers = {}
    else:
        params_with_headers = sig2(method, endpoint, params, prov_dict,
                                   aws_api_version)
        headers = {}

    MAX_RETRIES = 6
    attempts = 0
    while attempts < MAX_RETRIES:
        log.debug('AWS Request: %s', requesturl)
        log.trace('AWS Request Parameters: %s', params_with_headers)
        try:
            result = requests.get(requesturl,
                                  headers=headers,
                                  params=params_with_headers)
            log.debug('AWS Response Status Code: %s', result.status_code)
            log.trace(
                'AWS Response Text: %s',
                result.text.encode(
                    result.encoding if result.encoding else 'utf-8'))
            result.raise_for_status()
            break
        except requests.exceptions.HTTPError as exc:
            root = ET.fromstring(exc.response.content)
            data = xml.to_dict(root)

            # check to see if we should retry the query
            err_code = data.get('Errors', {}).get('Error', {}).get('Code', '')
            if attempts < MAX_RETRIES and err_code and err_code in AWS_RETRY_CODES:
                attempts += 1
                log.error(
                    'AWS Response Status Code and Error: [%s %s] %s; '
                    'Attempts remaining: %s', exc.response.status_code, exc,
                    data, attempts)
                # backoff an exponential amount of time to throttle requests
                # during "API Rate Exceeded" failures as suggested by the AWS documentation here:
                # https://docs.aws.amazon.com/AWSEC2/latest/APIReference/query-api-troubleshooting.html
                # and also here:
                # https://docs.aws.amazon.com/general/latest/gr/api-retries.html
                # Failure to implement this approach results in a failure rate of >30% when using salt-cloud with
                # "--parallel" when creating 50 or more instances with a fixed delay of 2 seconds.
                # A failure rate of >10% is observed when using the salt-api with an asyncronous client
                # specified (runner_async).
                time.sleep(random.uniform(1, 2**attempts))
                continue

            log.error('AWS Response Status Code and Error: [%s %s] %s',
                      exc.response.status_code, exc, data)
            if return_url is True:
                return {'error': data}, requesturl
            return {'error': data}
    else:
        log.error('AWS Response Status Code and Error: [%s %s] %s',
                  exc.response.status_code, exc, data)
        if return_url is True:
            return {'error': data}, requesturl
        return {'error': data}

    response = result.text.encode(
        result.encoding if result.encoding else 'utf-8')

    root = ET.fromstring(response)
    items = root[1]
    if return_root is True:
        items = root

    if setname:
        if sys.version_info < (2, 7):
            children_len = len(root.getchildren())
        else:
            children_len = len(root)

        for item in range(0, children_len):
            comps = root[item].tag.split('}')
            if comps[1] == setname:
                items = root[item]

    ret = []
    for item in items:
        ret.append(xml.to_dict(item))

    if return_url is True:
        return ret, requesturl

    return ret
Пример #12
0
def query(url,
          method='GET',
          params=None,
          data=None,
          data_file=None,
          header_dict=None,
          header_list=None,
          header_file=None,
          username=None,
          password=None,
          auth=None,
          decode=False,
          decode_type='auto',
          status=False,
          headers=False,
          text=False,
          cookies=None,
          cookie_jar=JARFILE,
          cookie_format='lwp',
          persist_session=False,
          session_cookie_jar=SESSIONJARFILE,
          data_render=False,
          data_renderer=None,
          header_render=False,
          header_renderer=None,
          template_dict=None,
          test=False,
          test_url=None,
          node='minion',
          port=80,
          opts=None,
          requests_lib=None,
          ca_bundle=None,
          verify_ssl=None,
          cert=None,
          text_out=None,
          headers_out=None,
          decode_out=None,
          stream=False,
          handle=False,
          agent=USERAGENT,
          **kwargs):
    '''
    Query a resource, and decode the return data
    '''
    ret = {}

    if opts is None:
        if node == 'master':
            opts = salt.config.master_config(
                os.path.join(syspaths.CONFIG_DIR, 'master')
            )
        elif node == 'minion':
            opts = salt.config.minion_config(
                os.path.join(syspaths.CONFIG_DIR, 'minion')
            )
        else:
            opts = {}

    if requests_lib is None:
        requests_lib = opts.get('requests_lib', False)

    if requests_lib is True:
        if HAS_REQUESTS is False:
            ret['error'] = ('http.query has been set to use requests, but the '
                            'requests library does not seem to be installed')
            log.error(ret['error'])
            return ret
    else:
        requests_log = logging.getLogger('requests')
        requests_log.setLevel(logging.WARNING)

    if ca_bundle is None:
        ca_bundle = get_ca_bundle(opts)

    if verify_ssl is None:
        verify_ssl = opts.get('verify_ssl', True)

    if cert is None:
        cert = opts.get('cert', None)

    if data_file is not None:
        data = _render(
            data_file, data_render, data_renderer, template_dict, opts
        )

    log.debug('Using {0} Method'.format(method))
    if method == 'POST':
        log.trace('POST Data: {0}'.format(pprint.pformat(data)))

    if header_file is not None:
        header_tpl = _render(
            header_file, header_render, header_renderer, template_dict, opts
        )
        if isinstance(header_tpl, dict):
            header_dict = header_tpl
        else:
            header_list = header_tpl.splitlines()

    if header_dict is None:
        header_dict = {}

    if header_list is None:
        header_list = []

    if persist_session is True and HAS_MSGPACK:
        # TODO: This is hackish; it will overwrite the session cookie jar with
        # all cookies from this one connection, rather than behaving like a
        # proper cookie jar. Unfortunately, since session cookies do not
        # contain expirations, they can't be stored in a proper cookie jar.
        if os.path.isfile(session_cookie_jar):
            with salt.utils.fopen(session_cookie_jar, 'r') as fh_:
                session_cookies = msgpack.load(fh_)
            if isinstance(session_cookies, dict):
                header_dict.update(session_cookies)
        else:
            with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
                msgpack.dump('', fh_)

    for header in header_list:
        comps = header.split(':')
        if len(comps) < 2:
            continue
        header_dict[comps[0].strip()] = comps[1].strip()

    if username and password:
        auth = (username, password)
    else:
        auth = None

    if requests_lib is True:
        sess = requests.Session()
        sess.auth = auth
        sess.headers.update(header_dict)
        log.trace('Request Headers: {0}'.format(sess.headers))
        sess_cookies = sess.cookies
        sess.verify = verify_ssl
    else:
        sess_cookies = None

    if cookies is not None:
        if cookie_format == 'mozilla':
            sess_cookies = salt.ext.six.moves.http_cookiejar.MozillaCookieJar(cookie_jar)
        else:
            sess_cookies = salt.ext.six.moves.http_cookiejar.LWPCookieJar(cookie_jar)
        if not os.path.isfile(cookie_jar):
            sess_cookies.save()
        else:
            sess_cookies.load()

    if agent == USERAGENT:
        agent = '{0} http.query()'.format(agent)
    header_dict['User-agent'] = agent

    if test is True:
        if test_url is None:
            return {}
        else:
            url = test_url
            ret['test'] = True

    if requests_lib is True:
        req_kwargs = {}
        if stream is True:
            if requests.__version__[0] == '0':
                # 'stream' was called 'prefetch' before 1.0, with flipped meaning
                req_kwargs['prefetch'] = False
            else:
                req_kwargs['stream'] = True

        # Client-side cert handling
        if cert is not None:
            if isinstance(cert, string_types):
                if os.path.exists(cert):
                    req_kwargs['cert'] = cert
            elif isinstance(cert, tuple):
                if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                    req_kwargs['cert'] = cert
            else:
                log.error('The client-side certificate path that was passed is '
                          'not valid: {0}'.format(cert))

        result = sess.request(
            method, url, params=params, data=data, **req_kwargs
        )
        result.raise_for_status()
        if stream is True or handle is True:
            return {'handle': result}

        result_status_code = result.status_code
        result_headers = result.headers
        result_text = result.text
        result_cookies = result.cookies
    else:
        request = urllib_request.Request(url, data)
        handlers = [
            urllib_request.HTTPHandler,
            urllib_request.HTTPCookieProcessor(sess_cookies)
        ]

        if url.startswith('https') or port == 443:
            if not HAS_MATCHHOSTNAME:
                log.warn(('match_hostname() not available, SSL hostname checking '
                         'not available. THIS CONNECTION MAY NOT BE SECURE!'))
            elif verify_ssl is False:
                log.warn(('SSL certificate verification has been explicitly '
                         'disabled. THIS CONNECTION MAY NOT BE SECURE!'))
            else:
                hostname = request.get_host()
                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                sock.connect((hostname, 443))
                sockwrap = ssl.wrap_socket(
                    sock,
                    ca_certs=ca_bundle,
                    cert_reqs=ssl.CERT_REQUIRED
                )
                try:
                    match_hostname(sockwrap.getpeercert(), hostname)
                except CertificateError as exc:
                    ret['error'] = (
                        'The certificate was invalid. '
                        'Error returned was: {0}'.format(
                            pprint.pformat(exc)
                        )
                    )
                    return ret

                # Client-side cert handling
                if cert is not None:
                    cert_chain = None
                    if isinstance(cert, string_types):
                        if os.path.exists(cert):
                            cert_chain = (cert)
                    elif isinstance(cert, tuple):
                        if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                            cert_chain = cert
                    else:
                        log.error('The client-side certificate path that was '
                                  'passed is not valid: {0}'.format(cert))
                        return
                    if hasattr(ssl, 'SSLContext'):
                        # Python >= 2.7.9
                        context = ssl.SSLContext.load_cert_chain(*cert_chain)
                        handlers.append(urllib_request.HTTPSHandler(context=context))  # pylint: disable=E1123
                    else:
                        # Python < 2.7.9
                        cert_kwargs = {
                            'host': request.get_host(),
                            'port': port,
                            'cert_file': cert_chain[0]
                        }
                        if len(cert_chain) > 1:
                            cert_kwargs['key_file'] = cert_chain[1]
                        handlers[0] = salt.ext.six.moves.http_client.HTTPSConnection(**cert_kwargs)

        opener = urllib_request.build_opener(*handlers)
        for header in header_dict:
            request.add_header(header, header_dict[header])
        request.get_method = lambda: method
        result = opener.open(request)
        if stream is True or handle is True:
            return {'handle': result}

        result_status_code = result.code
        result_headers = result.headers.headers
        result_text = result.read()

    if isinstance(result_headers, list):
        result_headers_dict = {}
        for header in result_headers:
            comps = header.split(':')
            result_headers_dict[comps[0].strip()] = ':'.join(comps[1:]).strip()
        result_headers = result_headers_dict

    log.debug('Response Status Code: {0}'.format(result_status_code))
    log.trace('Response Headers: {0}'.format(result_headers))
    log.trace('Response Cookies: {0}'.format(sess_cookies))
    try:
        log.trace('Response Text: {0}'.format(result_text))
    except UnicodeEncodeError as exc:
        log.trace(('Cannot Trace Log Response Text: {0}. This may be due to '
                  'incompatibilities between requests and logging.').format(exc))

    if text_out is not None and os.path.exists(text_out):
        with salt.utils.fopen(text_out, 'w') as tof:
            tof.write(result_text)

    if headers_out is not None and os.path.exists(headers_out):
        with salt.utils.fopen(headers_out, 'w') as hof:
            hof.write(result_headers)

    if cookies is not None:
        sess_cookies.save()

    if persist_session is True and HAS_MSGPACK:
        # TODO: See persist_session above
        if 'set-cookie' in result_headers:
            with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
                session_cookies = result_headers.get('set-cookie', None)
                if session_cookies is not None:
                    msgpack.dump({'Cookie': session_cookies}, fh_)
                else:
                    msgpack.dump('', fh_)

    if status is True:
        ret['status'] = result_status_code

    if headers is True:
        ret['headers'] = result_headers

    if decode is True:
        if decode_type == 'auto':
            content_type = result_headers.get(
                'content-type', 'application/json'
            )
            if 'xml' in content_type:
                decode_type = 'xml'
            elif 'json' in content_type:
                decode_type = 'json'
            else:
                decode_type = 'plain'

        valid_decodes = ('json', 'xml', 'plain')
        if decode_type not in valid_decodes:
            ret['error'] = (
                'Invalid decode_type specified. '
                'Valid decode types are: {0}'.format(
                    pprint.pformat(valid_decodes)
                )
            )
            log.error(ret['error'])
            return ret

        if decode_type == 'json':
            ret['dict'] = json.loads(result_text)
        elif decode_type == 'xml':
            ret['dict'] = []
            items = ET.fromstring(result_text)
            for item in items:
                ret['dict'].append(xml.to_dict(item))
        else:
            text = True

        if decode_out and os.path.exists(decode_out):
            with salt.utils.fopen(decode_out, 'w') as dof:
                dof.write(result_text)

    if text is True:
        ret['text'] = result_text

    return ret
Пример #13
0
def call(payload=None):
    '''
    This function captures the query string and sends it to the Palo Alto device.
    '''
    r = None
    try:
        if DETAILS['method'] == 'dev_key':
            # Pass the api key without the target declaration
            conditional_payload = {'key': DETAILS['apikey']}
            payload.update(conditional_payload)
            r = __utils__['http.query'](DETAILS['url'],
                                        data=payload,
                                        method='POST',
                                        decode_type='plain',
                                        decode=True,
                                        verify_ssl=DETAILS["verify_ssl"],
                                        raise_error=True)
        elif DETAILS['method'] == 'dev_pass':
            # Pass credentials without the target declaration
            r = __utils__['http.query'](DETAILS['url'],
                                        username=DETAILS['username'],
                                        password=DETAILS['password'],
                                        data=payload,
                                        method='POST',
                                        decode_type='plain',
                                        decode=True,
                                        verify_ssl=DETAILS["verify_ssl"],
                                        raise_error=True)
        elif DETAILS['method'] == 'pan_key':
            # Pass the api key with the target declaration
            conditional_payload = {
                'key': DETAILS['apikey'],
                'target': DETAILS['serial']
            }
            payload.update(conditional_payload)
            r = __utils__['http.query'](DETAILS['url'],
                                        data=payload,
                                        method='POST',
                                        decode_type='plain',
                                        decode=True,
                                        verify_ssl=DETAILS["verify_ssl"],
                                        raise_error=True)
        elif DETAILS['method'] == 'pan_pass':
            # Pass credentials with the target declaration
            conditional_payload = {'target': DETAILS['serial']}
            payload.update(conditional_payload)
            r = __utils__['http.query'](DETAILS['url'],
                                        username=DETAILS['username'],
                                        password=DETAILS['password'],
                                        data=payload,
                                        method='POST',
                                        decode_type='plain',
                                        decode=True,
                                        verify_ssl=DETAILS["verify_ssl"],
                                        raise_error=True)
    except KeyError as err:
        raise salt.exceptions.CommandExecutionError(
            "Did not receive a valid response from host.")

    if not r:
        raise salt.exceptions.CommandExecutionError(
            "Did not receive a valid response from host.")

    xmldata = ET.fromstring(r['text'])

    # If we are pulling the candidate configuration, we need to strip the dirtyId
    if payload['type'] == 'config' and payload['action'] == 'get':
        xmldata = (_strip_dirty(xmldata))

    return xml.to_dict(xmldata, True)
Пример #14
0
 def test_xml_case_f_full(self):
     xmldata = ET.fromstring(self.cases['f']['xml'])
     defaultdict = xml.to_dict(xmldata, True)
     self.assertEqual(defaultdict, self.cases['f']['full'])
Пример #15
0
 def test_xml_case_f_legacy(self):
     xmldata = ET.fromstring(self.cases['f']['xml'])
     defaultdict = xml.to_dict(xmldata, False)
     self.assertEqual(defaultdict, self.cases['f']['legacy'])
Пример #16
0
def service_group_exists(name,
                         groupname=None,
                         vsys=1,
                         members=None,
                         description=None,
                         commit=False):
    '''
    Ensures that a service group object exists in the configured state. If it does not exist or is not configured with
    the specified attributes, it will be adjusted to match the specified values.

    This module will enforce group membership. If a group exists and contains members this state does not include,
    those members will be removed and replaced with the specified members in the state.

    name: The name of the module function to execute.

    groupname(str): The name of the service group object.  The name is case-sensitive and can have up to 31 characters,
    which an be letters, numbers, spaces, hyphens, and underscores. The name must be unique on a firewall and, on
    Panorama, unique within its device group and any ancestor or descendant device groups.

    vsys(str): The string representation of the VSYS ID. Defaults to VSYS 1.

    members(str, list): The members of the service group. These must be valid service objects or service groups on the
    system that already exist prior to the execution of this state.

    description(str): A description for the policy (up to 255 characters).

    commit(bool): If true the firewall will commit the changes, if false do not commit changes.

    SLS Example:

    .. code-block:: yaml

        panos/service-group/my-group:
            panos.service_group_exists:
              - groupname: my-group
              - vsys: 1
              - members:
                - tcp-80
                - custom-port-group
              - description: A group that needs to exist
              - commit: False

    '''
    ret = _default_ret(name)

    if not groupname:
        ret.update({'comment': "The group name field must be provided."})
        return ret

    # Check if service group object currently exists
    group = __salt__['panos.get_service_group'](groupname, vsys)['result']

    if group and 'entry' in group:
        group = group['entry']
    else:
        group = {}

    # Verify the arguments
    if members:
        element = "<members>{0}</members>".format(_build_members(
            members, True))
    else:
        ret.update({'comment': "The group members must be provided."})
        return ret

    if description:
        element += "<description>{0}</description>".format(description)

    full_element = "<entry name='{0}'>{1}</entry>".format(groupname, element)

    new_group = xml.to_dict(ET.fromstring(full_element), True)

    if group == new_group:
        ret.update({
            'comment':
            'Service group object already exists. No changes required.',
            'result': True
        })
        return ret
    else:
        xpath = "/config/devices/entry[@name=\'localhost.localdomain\']/vsys/entry[@name=\'vsys{0}\']/service-group/" \
                "entry[@name=\'{1}\']".format(vsys, groupname)

        result, msg = _edit_config(xpath, full_element)

        if not result:
            ret.update({'comment': msg})
            return ret

    if commit is True:
        ret.update({
            'changes': {
                'before': group,
                'after': new_group
            },
            'commit': __salt__['panos.commit'](),
            'comment': 'Service group object successfully configured.',
            'result': True
        })
    else:
        ret.update({
            'changes': {
                'before': group,
                'after': new_group
            },
            'comment': 'Service group object successfully configured.',
            'result': True
        })

    return ret
Пример #17
0
def call(payload=None):
    """
    This function captures the query string and sends it to the Palo Alto device.
    """
    r = None
    try:
        if DETAILS["method"] == "dev_key":
            # Pass the api key without the target declaration
            conditional_payload = {"key": DETAILS["apikey"]}
            payload.update(conditional_payload)
            r = __utils__["http.query"](
                DETAILS["url"],
                data=payload,
                method="POST",
                decode_type="plain",
                decode=True,
                verify_ssl=False,
                status=True,
                raise_error=True,
            )
        elif DETAILS["method"] == "dev_pass":
            # Pass credentials without the target declaration
            r = __utils__["http.query"](
                DETAILS["url"],
                username=DETAILS["username"],
                password=DETAILS["password"],
                data=payload,
                method="POST",
                decode_type="plain",
                decode=True,
                verify_ssl=False,
                status=True,
                raise_error=True,
            )
        elif DETAILS["method"] == "pan_key":
            # Pass the api key with the target declaration
            conditional_payload = {
                "key": DETAILS["apikey"],
                "target": DETAILS["serial"],
            }
            payload.update(conditional_payload)
            r = __utils__["http.query"](
                DETAILS["url"],
                data=payload,
                method="POST",
                decode_type="plain",
                decode=True,
                verify_ssl=False,
                status=True,
                raise_error=True,
            )
        elif DETAILS["method"] == "pan_pass":
            # Pass credentials with the target declaration
            conditional_payload = {"target": DETAILS["serial"]}
            payload.update(conditional_payload)
            r = __utils__["http.query"](
                DETAILS["url"],
                username=DETAILS["username"],
                password=DETAILS["password"],
                data=payload,
                method="POST",
                decode_type="plain",
                decode=True,
                verify_ssl=False,
                status=True,
                raise_error=True,
            )
    except KeyError as err:
        raise salt.exceptions.CommandExecutionError(
            "Did not receive a valid response from host."
        )

    if not r:
        raise salt.exceptions.CommandExecutionError(
            "Did not receive a valid response from host."
        )

    if six.text_type(r["status"]) not in ["200", "201", "204"]:
        if six.text_type(r["status"]) == "400":
            raise salt.exceptions.CommandExecutionError(
                "The server cannot process the request due to a client error."
            )
        elif six.text_type(r["status"]) == "401":
            raise salt.exceptions.CommandExecutionError(
                "The server cannot process the request because it lacks valid authentication "
                "credentials for the target resource."
            )
        elif six.text_type(r["status"]) == "403":
            raise salt.exceptions.CommandExecutionError(
                "The server refused to authorize the request."
            )
        elif six.text_type(r["status"]) == "404":
            raise salt.exceptions.CommandExecutionError(
                "The requested resource could not be found."
            )
        else:
            raise salt.exceptions.CommandExecutionError(
                "Did not receive a valid response from host."
            )

    xmldata = ET.fromstring(r["text"])

    # If we are pulling the candidate configuration, we need to strip the dirtyId
    if payload["type"] == "config" and payload["action"] == "get":
        xmldata = _strip_dirty(xmldata)

    return xml.to_dict(xmldata, True)
Пример #18
0
def query(params=None,
          setname=None,
          requesturl=None,
          location=None,
          return_url=False,
          return_root=False,
          opts=None,
          provider=None,
          endpoint=None,
          product='ec2',
          sigver='2'):
    '''
    Perform a query against AWS services using Signature Version 2 Signing
    Process. This is documented at:

    http://docs.aws.amazon.com/general/latest/gr/signature-version-2.html

    Regions and endpoints are documented at:

    http://docs.aws.amazon.com/general/latest/gr/rande.html

    Default ``product`` is ``ec2``. Valid ``product`` names are:

    .. code-block: yaml

        - autoscaling (Auto Scaling)
        - cloudformation (CloudFormation)
        - ec2 (Elastic Compute Cloud)
        - elasticache (ElastiCache)
        - elasticbeanstalk (Elastic BeanStalk)
        - elasticloadbalancing (Elastic Load Balancing)
        - elasticmapreduce (Elastic MapReduce)
        - iam (Identity and Access Management)
        - importexport (Import/Export)
        - monitoring (CloudWatch)
        - rds (Relational Database Service)
        - simpledb (SimpleDB)
        - sns (Simple Notification Service)
        - sqs (Simple Queue Service)
    '''
    if params is None:
        params = {}

    if opts is None:
        opts = {}

    function = opts.get('function', (None, product))
    providers = opts.get('providers', {})

    if provider is None:
        prov_dict = providers.get(function[1], {}).get(product, {})
        if prov_dict:
            driver = list(list(prov_dict.keys()))[0]
            provider = providers.get(driver, product)
    else:
        prov_dict = providers.get(provider, {}).get(product, {})

    service_url = prov_dict.get('service_url', 'amazonaws.com')

    if not location:
        location = get_location(opts, provider)

    if endpoint is None:
        if not requesturl:
            endpoint = prov_dict.get(
                'endpoint', '{0}.{1}.{2}'.format(product, location,
                                                 service_url))

            requesturl = 'https://{0}/'.format(endpoint)
        else:
            endpoint = urlparse(requesturl).netloc
            if endpoint == '':
                endpoint_err = (
                    'Could not find a valid endpoint in the '
                    'requesturl: {0}. Looking for something '
                    'like https://some.aws.endpoint/?args').format(requesturl)
                LOG.error(endpoint_err)
                if return_url is True:
                    return {'error': endpoint_err}, requesturl
                return {'error': endpoint_err}

    LOG.debug('Using AWS endpoint: {0}'.format(endpoint))
    method = 'GET'

    aws_api_version = prov_dict.get(
        'aws_api_version',
        prov_dict.get('{0}_api_version'.format(product),
                      DEFAULT_AWS_API_VERSION))

    if sigver == '4':
        headers, requesturl = sig4(method,
                                   endpoint,
                                   params,
                                   prov_dict,
                                   aws_api_version,
                                   location,
                                   product,
                                   requesturl=requesturl)
        params_with_headers = {}
    else:
        params_with_headers = sig2(method, endpoint, params, prov_dict,
                                   aws_api_version)
        headers = {}

    attempts = 5
    while attempts > 0:
        LOG.debug('AWS Request: {0}'.format(requesturl))
        LOG.trace('AWS Request Parameters: {0}'.format(params_with_headers))
        try:
            result = requests.get(requesturl,
                                  headers=headers,
                                  params=params_with_headers)
            LOG.debug('AWS Response Status Code: {0}'.format(
                result.status_code))
            LOG.trace('AWS Response Text: {0}'.format(result.text))
            result.raise_for_status()
            break
        except requests.exceptions.HTTPError as exc:
            root = ET.fromstring(exc.response.content)
            data = xml.to_dict(root)

            # check to see if we should retry the query
            err_code = data.get('Errors', {}).get('Error', {}).get('Code', '')
            if attempts > 0 and err_code and err_code in AWS_RETRY_CODES:
                attempts -= 1
                LOG.error('AWS Response Status Code and Error: [{0} {1}] {2}; '
                          'Attempts remaining: {3}'.format(
                              exc.response.status_code, exc, data, attempts))
                # Wait a bit before continuing to prevent throttling
                time.sleep(2)
                continue

            LOG.error(
                'AWS Response Status Code and Error: [{0} {1}] {2}'.format(
                    exc.response.status_code, exc, data))
            if return_url is True:
                return {'error': data}, requesturl
            return {'error': data}
    else:
        LOG.error('AWS Response Status Code and Error: [{0} {1}] {2}'.format(
            exc.response.status_code, exc, data))
        if return_url is True:
            return {'error': data}, requesturl
        return {'error': data}

    response = result.text

    root = ET.fromstring(response)
    items = root[1]
    if return_root is True:
        items = root

    if setname:
        if sys.version_info < (2, 7):
            children_len = len(root.getchildren())
        else:
            children_len = len(root)

        for item in range(0, children_len):
            comps = root[item].tag.split('}')
            if comps[1] == setname:
                items = root[item]

    ret = []
    for item in items:
        ret.append(xml.to_dict(item))

    if return_url is True:
        return ret, requesturl

    return ret
Пример #19
0
def service_exists(
    name,
    servicename=None,
    vsys=1,
    protocol=None,
    port=None,
    description=None,
    commit=False,
):
    """
    Ensures that a service object exists in the configured state. If it does not exist or is not configured with the
    specified attributes, it will be adjusted to match the specified values.

    name: The name of the module function to execute.

    servicename(str): The name of the security object.  The name is case-sensitive and can have up to 31 characters,
    which an be letters, numbers, spaces, hyphens, and underscores. The name must be unique on a firewall and, on
    Panorama, unique within its device group and any ancestor or descendant device groups.

    vsys(str): The string representation of the VSYS ID. Defaults to VSYS 1.

    protocol(str): The protocol that is used by the service object. The only valid options are tcp and udp.

    port(str): The port number that is used by the service object. This can be specified as a single integer or a
    valid range of ports.

    description(str): A description for the policy (up to 255 characters).

    commit(bool): If true the firewall will commit the changes, if false do not commit changes.

    SLS Example:

    .. code-block:: yaml

        panos/service/tcp-80:
            panos.service_exists:
              - servicename: tcp-80
              - vsys: 1
              - protocol: tcp
              - port: 80
              - description: Hypertext Transfer Protocol
              - commit: False

        panos/service/udp-500-550:
            panos.service_exists:
              - servicename: udp-500-550
              - vsys: 3
              - protocol: udp
              - port: 500-550
              - commit: False

    """
    ret = _default_ret(name)

    if not servicename:
        ret.update({"comment": "The service name field must be provided."})
        return ret

    # Check if service object currently exists
    service = __salt__["panos.get_service"](servicename, vsys)["result"]

    if service and "entry" in service:
        service = service["entry"]
    else:
        service = {}

    # Verify the arguments
    if not protocol and protocol not in ["tcp", "udp"]:
        ret.update({
            "comment":
            "The protocol must be provided and must be tcp or udp."
        })
        return ret
    if not port:
        ret.update({"comment": "The port field must be provided."})
        return ret

    element = "<protocol><{0}><port>{1}</port></{0}></protocol>".format(
        protocol, port)

    if description:
        element += "<description>{0}</description>".format(description)

    full_element = "<entry name='{0}'>{1}</entry>".format(servicename, element)

    new_service = xml.to_dict(ET.fromstring(full_element), True)

    if service == new_service:
        ret.update({
            "comment": "Service object already exists. No changes required.",
            "result": True,
        })
        return ret
    else:
        xpath = (
            "/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys{0}']/service/"
            "entry[@name='{1}']".format(vsys, servicename))

        result, msg = _edit_config(xpath, full_element)

        if not result:
            ret.update({"comment": msg})
            return ret

    if commit is True:
        ret.update({
            "changes": {
                "before": service,
                "after": new_service
            },
            "commit": __salt__["panos.commit"](),
            "comment": "Service object successfully configured.",
            "result": True,
        })
    else:
        ret.update({
            "changes": {
                "before": service,
                "after": new_service
            },
            "comment": "Service object successfully configured.",
            "result": True,
        })

    return ret
Пример #20
0
def query(url,
          method='GET',
          params=None,
          data=None,
          data_file=None,
          header_dict=None,
          header_list=None,
          header_file=None,
          username=None,
          password=None,
          decode=True,
          decode_type='auto',
          status=False,
          headers=False,
          text=False,
          cookies=None,
          cookie_jar=JARFILE,
          cookie_format='lwp',
          persist_session=False,
          session_cookie_jar=SESSIONJARFILE,
          data_render=False,
          data_renderer=None,
          header_render=False,
          header_renderer=None,
          template_dict=None,
          test=False,
          test_url=None,
          node='minion',
          opts=None,
          **kwargs):
    '''
    Query a resource, and decode the return data
    '''
    ret = {}

    requests_log = logging.getLogger('requests')
    requests_log.setLevel(logging.WARNING)

    if opts is None:
        if node == 'master':
            opts = salt.config.master_config('/etc/salt/master')
        elif node == 'minion':
            opts = salt.config.master_config('/etc/salt/minion')
        else:
            opts = {}

    if data_file is not None:
        data = _render(
            data_file, data_render, data_renderer, template_dict, opts
        )
    log.trace('POST Data: {0}'.format(pprint.pformat(data)))

    if header_file is not None:
        header_tpl = _render(
            header_file, header_render, header_renderer, template_dict, opts
        )
        if isinstance(header_tpl, dict):
            header_dict = header_tpl
        else:
            header_list = header_tpl.splitlines()

    if header_dict is None:
        header_dict = {}

    if header_list is None:
        header_list = []

    if persist_session is True:
        # TODO: This is hackish; it will overwrite the session cookie jar with
        # all cookies from this one connection, rather than behaving like a
        # proper cookie jar. Unfortunately, since session cookies do not
        # contain expirations, they can't be stored in a proper cookie jar.
        if os.path.isfile(session_cookie_jar):
            with salt.utils.fopen(session_cookie_jar, 'r') as fh_:
                session_cookies = msgpack.load(fh_)
            if isinstance(session_cookies, dict):
                header_dict.update(session_cookies)
        else:
            with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
                msgpack.dump('', fh_)

    for header in header_list:
        comps = header.split(':')
        if len(comps) < 2:
            continue
        header_dict[comps[0].strip()] = comps[1].strip()

    if username and password:
        auth = (username, password)
    else:
        auth = None

    sess = requests.Session()
    sess.auth = auth
    sess.headers.update(header_dict)
    log.trace('Request Headers: {0}'.format(sess.headers))

    if cookies is not None:
        if cookie_format == 'mozilla':
            sess.cookies = salt.ext.six.moves.http_cookiejar.MozillaCookieJar(cookie_jar)
        else:
            sess.cookies = salt.ext.six.moves.http_cookiejar.LWPCookieJar(cookie_jar)
        if not os.path.isfile(cookie_jar):
            sess.cookies.save()
        else:
            sess.cookies.load()

    if test is True:
        if test_url is None:
            return {}
        else:
            url = test_url
            ret['test'] = True

    result = sess.request(
        method, url, params=params, data=data
    )
    log.debug('Response Status Code: {0}'.format(result.status_code))
    log.trace('Response Headers: {0}'.format(result.headers))
    log.trace('Response Text: {0}'.format(result.text))
    log.trace('Response Cookies: {0}'.format(result.cookies.get_dict()))

    if cookies is not None:
        sess.cookies.save()

    if persist_session is True:
        # TODO: See persist_session above
        if 'set-cookie' in result.headers:
            with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
                session_cookies = result.headers.get('set-cookie', None)
                if session_cookies is not None:
                    msgpack.dump({'Cookie': session_cookies}, fh_)
                else:
                    msgpack.dump('', fh_)

    if status is True:
        ret['status'] = result.status_code

    if headers is True:
        ret['headers'] = result.headers

    if decode is True:
        if decode_type == 'auto':
            content_type = result.headers.get(
                'content-type', 'application/json'
            )
            if 'xml' in content_type:
                decode_type = 'xml'
            elif 'json' in content_type:
                decode_type = 'json'
            else:
                decode_type = 'plain'

        valid_decodes = ('json', 'xml', 'plain')
        if decode_type not in valid_decodes:
            ret['error'] = (
                'Invalid decode_type specified. '
                'Valid decode types are: {0}'.format(
                    pprint.pformat(valid_decodes)
                )
            )
            log.error(ret['error'])
            return ret

        if decode_type == 'json':
            ret['dict'] = json.loads(result.text)
        elif decode_type == 'xml':
            ret['dict'] = []
            items = ET.fromstring(result.text)
            for item in items:
                ret['dict'].append(xml.to_dict(item))
        else:
            text = True

    if text is True:
        ret['text'] = result.text

    return ret
Пример #21
0
def query(key,
          keyid,
          method='GET',
          params=None,
          headers=None,
          requesturl=None,
          return_url=False,
          bucket=None,
          service_url=None,
          path='',
          return_bin=False,
          action=None,
          local_file=None,
          verify_ssl=True,
          full_headers=False,
          kms_keyid=None,
          location=None,
          role_arn=None,
          chunk_size=16384,
          path_style=False,
          https_enable=True):
    '''
    Perform a query against an S3-like API. This function requires that a
    secret key and the id for that key are passed in. For instance:

        s3.keyid: GKTADJGHEIQSXMKKRBJ08H
        s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs

    If keyid or key is not specified, an attempt to fetch them from EC2 IAM
    metadata service will be made.

    A service_url may also be specified in the configuration:

        s3.service_url: s3.amazonaws.com

    If a service_url is not specified, the default is s3.amazonaws.com. This
    may appear in various documentation as an "endpoint". A comprehensive list
    for Amazon S3 may be found at::

        http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region

    The service_url will form the basis for the final endpoint that is used to
    query the service.

    Path style can be enabled:

        s3.path_style: True

    This can be useful if you need to use salt with a proxy for an s3 compatible storage

    You can use either https protocol or http protocol:

        s3.https_enable: True

    SSL verification may also be turned off in the configuration:

        s3.verify_ssl: False

    This is required if using S3 bucket names that contain a period, as
    these will not match Amazon's S3 wildcard certificates. Certificate
    verification is enabled by default.

    A region may be specified:

        s3.location: eu-central-1

    If region is not specified, an attempt to fetch the region from EC2 IAM
    metadata service will be made. Failing that, default is us-east-1
    '''
    if not HAS_REQUESTS:
        log.error('There was an error: requests is required for s3 access')

    if not headers:
        headers = {}

    if not params:
        params = {}

    if not service_url:
        service_url = 's3.amazonaws.com'

    if not bucket or path_style:
        endpoint = service_url
    else:
        endpoint = '{0}.{1}'.format(bucket, service_url)

    if path_style and bucket:
        path = '{0}/{1}'.format(bucket, path)

    # Try grabbing the credentials from the EC2 instance IAM metadata if available
    if not key:
        key = salt.utils.aws.IROLE_CODE

    if not keyid:
        keyid = salt.utils.aws.IROLE_CODE

    if kms_keyid is not None and method in ('PUT', 'POST'):
        headers['x-amz-server-side-encryption'] = 'aws:kms'
        headers['x-amz-server-side-encryption-aws-kms-key-id'] = kms_keyid

    if not location:
        location = salt.utils.aws.get_location()

    data = ''
    payload_hash = None
    if method == 'PUT':
        if local_file:
            payload_hash = salt.utils.hashutils.get_hash(local_file,
                                                         form='sha256')

    if path is None:
        path = ''

    if not requesturl:
        requesturl = (('https' if https_enable else 'http') +
                      '://{0}/{1}').format(endpoint, path)
        headers, requesturl = salt.utils.aws.sig4(
            method,
            endpoint,
            params,
            data=data,
            uri='/{0}'.format(path),
            prov_dict={
                'id': keyid,
                'key': key
            },
            role_arn=role_arn,
            location=location,
            product='s3',
            requesturl=requesturl,
            headers=headers,
            payload_hash=payload_hash,
        )

    log.debug('S3 Request: %s', requesturl)
    log.debug('S3 Headers::')
    log.debug('    Authorization: %s', headers['Authorization'])

    if not data:
        data = None

    try:
        if method == 'PUT':
            if local_file:
                data = salt.utils.files.fopen(local_file, 'r')  # pylint: disable=resource-leakage
            result = requests.request(method,
                                      requesturl,
                                      headers=headers,
                                      data=data,
                                      verify=verify_ssl,
                                      stream=True)
        elif method == 'GET' and local_file and not return_bin:
            result = requests.request(method,
                                      requesturl,
                                      headers=headers,
                                      data=data,
                                      verify=verify_ssl,
                                      stream=True)
        else:
            result = requests.request(method,
                                      requesturl,
                                      headers=headers,
                                      data=data,
                                      verify=verify_ssl)
    finally:
        if data is not None:
            data.close()

    err_code = None
    err_msg = None
    if result.status_code >= 400:
        # On error the S3 API response should contain error message
        err_text = result.content or 'Unknown error'
        log.debug('    Response content: %s', err_text)

        # Try to get err info from response xml
        try:
            err_data = xml.to_dict(ET.fromstring(err_text))
            err_code = err_data['Code']
            err_msg = err_data['Message']
        except (KeyError, ET.ParseError) as err:
            log.debug('Failed to parse s3 err response. %s: %s',
                      type(err).__name__, err)
            err_code = 'http-{0}'.format(result.status_code)
            err_msg = err_text

    log.debug('S3 Response Status Code: %s', result.status_code)

    if method == 'PUT':
        if result.status_code != 200:
            if local_file:
                raise CommandExecutionError(
                    'Failed to upload from {0} to {1}. {2}: {3}'.format(
                        local_file, path, err_code, err_msg))
            raise CommandExecutionError(
                'Failed to create bucket {0}. {1}: {2}'.format(
                    bucket, err_code, err_msg))

        if local_file:
            log.debug('Uploaded from %s to %s', local_file, path)
        else:
            log.debug('Created bucket %s', bucket)
        return

    if method == 'DELETE':
        if not six.text_type(result.status_code).startswith('2'):
            if path:
                raise CommandExecutionError(
                    'Failed to delete {0} from bucket {1}. {2}: {3}'.format(
                        path, bucket, err_code, err_msg))
            raise CommandExecutionError(
                'Failed to delete bucket {0}. {1}: {2}'.format(
                    bucket, err_code, err_msg))

        if path:
            log.debug('Deleted %s from bucket %s', path, bucket)
        else:
            log.debug('Deleted bucket %s', bucket)
        return

    # This can be used to save a binary object to disk
    if local_file and method == 'GET':
        if result.status_code < 200 or result.status_code >= 300:
            raise CommandExecutionError('Failed to get file. {0}: {1}'.format(
                err_code, err_msg))

        log.debug('Saving to local file: %s', local_file)
        with salt.utils.files.fopen(local_file, 'wb') as out:
            for chunk in result.iter_content(chunk_size=chunk_size):
                out.write(chunk)
        return 'Saved to local file: {0}'.format(local_file)

    if result.status_code < 200 or result.status_code >= 300:
        raise CommandExecutionError('Failed s3 operation. {0}: {1}'.format(
            err_code, err_msg))

    # This can be used to return a binary object wholesale
    if return_bin:
        return result.content

    if result.content:
        items = ET.fromstring(result.content)

        ret = []
        for item in items:
            ret.append(xml.to_dict(item))

        if return_url is True:
            return ret, requesturl
    else:
        if result.status_code != requests.codes.ok:
            return
        ret = {'headers': []}
        if full_headers:
            ret['headers'] = dict(result.headers)
        else:
            for header in result.headers:
                ret['headers'].append(header.strip())

    return ret
Пример #22
0
def address_exists(
    name,
    addressname=None,
    vsys=1,
    ipnetmask=None,
    iprange=None,
    fqdn=None,
    description=None,
    commit=False,
):
    """
    Ensures that an address object exists in the configured state. If it does not exist or is not configured with the
    specified attributes, it will be adjusted to match the specified values.

    This module will only process a single address type (ip-netmask, ip-range, or fqdn). It will process the specified
    value if the following order: ip-netmask, ip-range, fqdn. For proper execution, only specify a single address
    type.

    name: The name of the module function to execute.

    addressname(str): The name of the address object.  The name is case-sensitive and can have up to 31 characters,
    which an be letters, numbers, spaces, hyphens, and underscores. The name must be unique on a firewall and, on
    Panorama, unique within its device group and any ancestor or descendant device groups.

    vsys(str): The string representation of the VSYS ID. Defaults to VSYS 1.

    ipnetmask(str): The IPv4 or IPv6 address or IP address range using the format ip_address/mask or ip_address where
    the mask is the number of significant binary digits used for the network portion of the address. Ideally, for IPv6,
    you specify only the network portion, not the host portion.

    iprange(str): A range of addresses using the format ip_address–ip_address where both addresses can be  IPv4 or both
    can be IPv6.

    fqdn(str): A fully qualified domain name format. The FQDN initially resolves at commit time. Entries are
    subsequently refreshed when the firewall performs a check every 30 minutes; all changes in the IP address for the
    entries are picked up at the refresh cycle.

    description(str): A description for the policy (up to 255 characters).

    commit(bool): If true the firewall will commit the changes, if false do not commit changes.

    SLS Example:

    .. code-block:: yaml

        panos/address/h-10.10.10.10:
            panos.address_exists:
              - addressname: h-10.10.10.10
              - vsys: 1
              - ipnetmask: 10.10.10.10
              - commit: False

        panos/address/10.0.0.1-10.0.0.50:
            panos.address_exists:
              - addressname: r-10.0.0.1-10.0.0.50
              - vsys: 1
              - iprange: 10.0.0.1-10.0.0.50
              - commit: False

        panos/address/foo.bar.com:
            panos.address_exists:
              - addressname: foo.bar.com
              - vsys: 1
              - fqdn: foo.bar.com
              - description: My fqdn object
              - commit: False

    """
    ret = _default_ret(name)

    if not addressname:
        ret.update({"comment": "The service name field must be provided."})
        return ret

    # Check if address object currently exists
    address = __salt__["panos.get_address"](addressname, vsys)["result"]

    if address and "entry" in address:
        address = address["entry"]
    else:
        address = {}

    element = ""

    # Verify the arguments
    if ipnetmask:
        element = "<ip-netmask>{0}</ip-netmask>".format(ipnetmask)
    elif iprange:
        element = "<ip-range>{0}</ip-range>".format(iprange)
    elif fqdn:
        element = "<fqdn>{0}</fqdn>".format(fqdn)
    else:
        ret.update({"comment": "A valid address type must be specified."})
        return ret

    if description:
        element += "<description>{0}</description>".format(description)

    full_element = "<entry name='{0}'>{1}</entry>".format(addressname, element)

    new_address = xml.to_dict(ET.fromstring(full_element), True)

    if address == new_address:
        ret.update({
            "comment": "Address object already exists. No changes required.",
            "result": True,
        })
        return ret
    else:
        xpath = (
            "/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys{0}']/address/"
            "entry[@name='{1}']".format(vsys, addressname))

        result, msg = _edit_config(xpath, full_element)

        if not result:
            ret.update({"comment": msg})
            return ret

    if commit is True:
        ret.update({
            "changes": {
                "before": address,
                "after": new_address
            },
            "commit": __salt__["panos.commit"](),
            "comment": "Address object successfully configured.",
            "result": True,
        })
    else:
        ret.update({
            "changes": {
                "before": address,
                "after": new_address
            },
            "comment": "Service object successfully configured.",
            "result": True,
        })

    return ret
Пример #23
0
def query(url,
          method='GET',
          params=None,
          data=None,
          data_file=None,
          header_dict=None,
          header_list=None,
          header_file=None,
          username=None,
          password=None,
          auth=None,
          decode=False,
          decode_type='auto',
          status=False,
          headers=False,
          text=False,
          cookies=None,
          cookie_jar=None,
          cookie_format='lwp',
          persist_session=False,
          session_cookie_jar=None,
          data_render=False,
          data_renderer=None,
          header_render=False,
          header_renderer=None,
          template_dict=None,
          test=False,
          test_url=None,
          node='minion',
          port=80,
          opts=None,
          backend='tornado',
          requests_lib=None,
          ca_bundle=None,
          verify_ssl=None,
          cert=None,
          text_out=None,
          headers_out=None,
          decode_out=None,
          stream=False,
          handle=False,
          agent=USERAGENT,
          **kwargs):
    '''
    Query a resource, and decode the return data
    '''
    ret = {}

    if opts is None:
        if node == 'master':
            opts = salt.config.master_config(
                os.path.join(syspaths.CONFIG_DIR, 'master'))
        elif node == 'minion':
            opts = salt.config.minion_config(
                os.path.join(syspaths.CONFIG_DIR, 'minion'))
        else:
            opts = {}

    if requests_lib is None:
        requests_lib = opts.get('requests_lib', False)

    if requests_lib is True:
        log.warn('Please set "backend" to "requests" instead of setting '
                 '"requests_lib" to "True"')

        if HAS_REQUESTS is False:
            ret['error'] = ('http.query has been set to use requests, but the '
                            'requests library does not seem to be installed')
            log.error(ret['error'])
            return ret

        backend = 'requests'

    else:
        requests_log = logging.getLogger('requests')
        requests_log.setLevel(logging.WARNING)

    if ca_bundle is None:
        ca_bundle = get_ca_bundle(opts)

    if verify_ssl is None:
        verify_ssl = opts.get('verify_ssl', True)

    if cert is None:
        cert = opts.get('cert', None)

    if data_file is not None:
        data = _render(data_file, data_render, data_renderer, template_dict,
                       opts)

    log.debug('Using {0} Method'.format(method))
    if method == 'POST':
        log.trace('POST Data: {0}'.format(pprint.pformat(data)))

    if header_file is not None:
        header_tpl = _render(header_file, header_render, header_renderer,
                             template_dict, opts)
        if isinstance(header_tpl, dict):
            header_dict = header_tpl
        else:
            header_list = header_tpl.splitlines()

    if header_dict is None:
        header_dict = {}

    if header_list is None:
        header_list = []

    if cookie_jar is None:
        cookie_jar = os.path.join(opts.get('cachedir', syspaths.CACHE_DIR),
                                  'cookies.txt')
    if session_cookie_jar is None:
        session_cookie_jar = os.path.join(
            opts.get('cachedir', syspaths.CACHE_DIR), 'cookies.session.p')

    if persist_session is True and HAS_MSGPACK:
        # TODO: This is hackish; it will overwrite the session cookie jar with
        # all cookies from this one connection, rather than behaving like a
        # proper cookie jar. Unfortunately, since session cookies do not
        # contain expirations, they can't be stored in a proper cookie jar.
        if os.path.isfile(session_cookie_jar):
            with salt.utils.fopen(session_cookie_jar, 'r') as fh_:
                session_cookies = msgpack.load(fh_)
            if isinstance(session_cookies, dict):
                header_dict.update(session_cookies)
        else:
            with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
                msgpack.dump('', fh_)

    for header in header_list:
        comps = header.split(':')
        if len(comps) < 2:
            continue
        header_dict[comps[0].strip()] = comps[1].strip()

    if username and password:
        auth = (username, password)
    else:
        auth = None

    if agent == USERAGENT:
        agent = '{0} http.query()'.format(agent)
    header_dict['User-agent'] = agent

    if backend == 'requests':
        sess = requests.Session()
        sess.auth = auth
        sess.headers.update(header_dict)
        log.trace('Request Headers: {0}'.format(sess.headers))
        sess_cookies = sess.cookies
        sess.verify = verify_ssl
    elif backend == 'urllib2':
        sess_cookies = None
    else:
        # Tornado
        sess_cookies = None

    if cookies is not None:
        if cookie_format == 'mozilla':
            sess_cookies = salt.ext.six.moves.http_cookiejar.MozillaCookieJar(
                cookie_jar)
        else:
            sess_cookies = salt.ext.six.moves.http_cookiejar.LWPCookieJar(
                cookie_jar)
        if not os.path.isfile(cookie_jar):
            sess_cookies.save()
        sess_cookies.load()

    if test is True:
        if test_url is None:
            return {}
        else:
            url = test_url
            ret['test'] = True

    if backend == 'requests':
        req_kwargs = {}
        if stream is True:
            if requests.__version__[0] == '0':
                # 'stream' was called 'prefetch' before 1.0, with flipped meaning
                req_kwargs['prefetch'] = False
            else:
                req_kwargs['stream'] = True

        # Client-side cert handling
        if cert is not None:
            if isinstance(cert, six.string_types):
                if os.path.exists(cert):
                    req_kwargs['cert'] = cert
            elif isinstance(cert, tuple):
                if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                    req_kwargs['cert'] = cert
            else:
                log.error(
                    'The client-side certificate path that was passed is '
                    'not valid: {0}'.format(cert))

        result = sess.request(method,
                              url,
                              params=params,
                              data=data,
                              **req_kwargs)
        result.raise_for_status()
        if stream is True or handle is True:
            return {'handle': result}

        log.debug(result.url)
        log.trace(data)

        result_status_code = result.status_code
        result_headers = result.headers
        result_text = result.text
        result_cookies = result.cookies
    elif backend == 'urllib2':
        request = urllib_request.Request(url, data)
        handlers = [
            urllib_request.HTTPHandler,
            urllib_request.HTTPCookieProcessor(sess_cookies)
        ]

        if url.startswith('https') or port == 443:
            hostname = request.get_host()
            handlers[0] = urllib_request.HTTPSHandler(1)
            if not HAS_MATCHHOSTNAME:
                log.warn(
                    ('match_hostname() not available, SSL hostname checking '
                     'not available. THIS CONNECTION MAY NOT BE SECURE!'))
            elif verify_ssl is False:
                log.warn(('SSL certificate verification has been explicitly '
                          'disabled. THIS CONNECTION MAY NOT BE SECURE!'))
            else:
                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                sock.connect((hostname, 443))
                sockwrap = ssl.wrap_socket(sock,
                                           ca_certs=ca_bundle,
                                           cert_reqs=ssl.CERT_REQUIRED)
                try:
                    match_hostname(sockwrap.getpeercert(), hostname)
                except CertificateError as exc:
                    ret['error'] = ('The certificate was invalid. '
                                    'Error returned was: {0}'.format(
                                        pprint.pformat(exc)))
                    return ret

                # Client-side cert handling
                if cert is not None:
                    cert_chain = None
                    if isinstance(cert, six.string_types):
                        if os.path.exists(cert):
                            cert_chain = (cert)
                    elif isinstance(cert, tuple):
                        if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                            cert_chain = cert
                    else:
                        log.error('The client-side certificate path that was '
                                  'passed is not valid: {0}'.format(cert))
                        return
                    if hasattr(ssl, 'SSLContext'):
                        # Python >= 2.7.9
                        context = ssl.SSLContext.load_cert_chain(*cert_chain)
                        handlers.append(
                            urllib_request.HTTPSHandler(context=context))  # pylint: disable=E1123
                    else:
                        # Python < 2.7.9
                        cert_kwargs = {
                            'host': request.get_host(),
                            'port': port,
                            'cert_file': cert_chain[0]
                        }
                        if len(cert_chain) > 1:
                            cert_kwargs['key_file'] = cert_chain[1]
                        handlers[
                            0] = salt.ext.six.moves.http_client.HTTPSConnection(
                                **cert_kwargs)

        opener = urllib_request.build_opener(*handlers)
        for header in header_dict:
            request.add_header(header, header_dict[header])
        request.get_method = lambda: method
        try:
            result = opener.open(request)
        except URLError as exc:
            return {'Error': str(exc)}
        if stream is True or handle is True:
            return {'handle': result}

        result_status_code = result.code
        result_headers = result.headers.headers
        result_text = result.read()
    else:
        # Tornado
        req_kwargs = {}

        # Client-side cert handling
        if cert is not None:
            if isinstance(cert, six.string_types):
                if os.path.exists(cert):
                    req_kwargs['client_cert'] = cert
            elif isinstance(cert, tuple):
                if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                    req_kwargs['client_cert'] = cert[0]
                    req_kwargs['client_key'] = cert[1]
            else:
                log.error(
                    'The client-side certificate path that was passed is '
                    'not valid: {0}'.format(cert))

        try:
            result = HTTPClient().fetch(tornado.httputil.url_concat(
                url, params),
                                        method=method,
                                        headers=header_dict,
                                        auth_username=username,
                                        auth_password=password,
                                        body=data,
                                        validate_cert=verify_ssl,
                                        **req_kwargs)
        except tornado.httpclient.HTTPError as exc:
            ret['status'] = exc.code
            ret['error'] = str(exc)
            return ret

        if stream is True or handle is True:
            return {'handle': result}

        result_status_code = result.code
        result_headers = result.headers
        result_text = result.body
        if 'Set-Cookie' in result_headers.keys() and cookies is not None:
            result_cookies = parse_cookie_header(result_headers['Set-Cookie'])
            for item in result_cookies:
                sess_cookies.set_cookie(item)
        else:
            result_cookies = None

    if isinstance(result_headers, list):
        result_headers_dict = {}
        for header in result_headers:
            comps = header.split(':')
            result_headers_dict[comps[0].strip()] = ':'.join(comps[1:]).strip()
        result_headers = result_headers_dict

    log.debug('Response Status Code: {0}'.format(result_status_code))
    log.trace('Response Headers: {0}'.format(result_headers))
    log.trace('Response Cookies: {0}'.format(sess_cookies))
    try:
        log.trace('Response Text: {0}'.format(result_text))
    except UnicodeEncodeError as exc:
        log.trace(
            ('Cannot Trace Log Response Text: {0}. This may be due to '
             'incompatibilities between requests and logging.').format(exc))

    if text_out is not None and os.path.exists(text_out):
        with salt.utils.fopen(text_out, 'w') as tof:
            tof.write(result_text)

    if headers_out is not None and os.path.exists(headers_out):
        with salt.utils.fopen(headers_out, 'w') as hof:
            hof.write(result_headers)

    if cookies is not None:
        sess_cookies.save()

    if persist_session is True and HAS_MSGPACK:
        # TODO: See persist_session above
        if 'set-cookie' in result_headers:
            with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
                session_cookies = result_headers.get('set-cookie', None)
                if session_cookies is not None:
                    msgpack.dump({'Cookie': session_cookies}, fh_)
                else:
                    msgpack.dump('', fh_)

    if status is True:
        ret['status'] = result_status_code

    if headers is True:
        ret['headers'] = result_headers

    if decode is True:
        if decode_type == 'auto':
            content_type = result_headers.get('content-type',
                                              'application/json')
            if 'xml' in content_type:
                decode_type = 'xml'
            elif 'json' in content_type:
                decode_type = 'json'
            else:
                decode_type = 'plain'

        valid_decodes = ('json', 'xml', 'plain')
        if decode_type not in valid_decodes:
            ret['error'] = ('Invalid decode_type specified. '
                            'Valid decode types are: {0}'.format(
                                pprint.pformat(valid_decodes)))
            log.error(ret['error'])
            return ret

        if decode_type == 'json':
            ret['dict'] = json.loads(result_text)
        elif decode_type == 'xml':
            ret['dict'] = []
            items = ET.fromstring(result_text)
            for item in items:
                ret['dict'].append(xml.to_dict(item))
        else:
            text = True

        if decode_out and os.path.exists(decode_out):
            with salt.utils.fopen(decode_out, 'w') as dof:
                dof.write(result_text)

    if text is True:
        ret['text'] = result_text

    return ret
Пример #24
0
def address_group_exists(name,
                         groupname=None,
                         vsys=1,
                         members=None,
                         description=None,
                         commit=False):
    """
    Ensures that an address group object exists in the configured state. If it does not exist or is not configured with
    the specified attributes, it will be adjusted to match the specified values.

    This module will enforce group membership. If a group exists and contains members this state does not include,
    those members will be removed and replaced with the specified members in the state.

    name: The name of the module function to execute.

    groupname(str): The name of the address group object.  The name is case-sensitive and can have up to 31 characters,
    which an be letters, numbers, spaces, hyphens, and underscores. The name must be unique on a firewall and, on
    Panorama, unique within its device group and any ancestor or descendant device groups.

    vsys(str): The string representation of the VSYS ID. Defaults to VSYS 1.

    members(str, list): The members of the address group. These must be valid address objects or address groups on the
    system that already exist prior to the execution of this state.

    description(str): A description for the policy (up to 255 characters).

    commit(bool): If true the firewall will commit the changes, if false do not commit changes.

    SLS Example:

    .. code-block:: yaml

        panos/address-group/my-group:
            panos.address_group_exists:
              - groupname: my-group
              - vsys: 1
              - members:
                - my-address-object
                - my-other-address-group
              - description: A group that needs to exist
              - commit: False

    """
    ret = _default_ret(name)

    if not groupname:
        ret.update({"comment": "The group name field must be provided."})
        return ret

    # Check if address group object currently exists
    group = __salt__["panos.get_address_group"](groupname, vsys)["result"]

    if group and "entry" in group:
        group = group["entry"]
    else:
        group = {}

    # Verify the arguments
    if members:
        element = "<static>{0}</static>".format(_build_members(members, True))
    else:
        ret.update({"comment": "The group members must be provided."})
        return ret

    if description:
        element += "<description>{0}</description>".format(description)

    full_element = "<entry name='{0}'>{1}</entry>".format(groupname, element)

    new_group = xml.to_dict(ET.fromstring(full_element), True)

    if group == new_group:
        ret.update({
            "comment":
            "Address group object already exists. No changes required.",
            "result": True,
        })
        return ret
    else:
        xpath = (
            "/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys{0}']/address-group/"
            "entry[@name='{1}']".format(vsys, groupname))

        result, msg = _edit_config(xpath, full_element)

        if not result:
            ret.update({"comment": msg})
            return ret

    if commit is True:
        ret.update({
            "changes": {
                "before": group,
                "after": new_group
            },
            "commit": __salt__["panos.commit"](),
            "comment": "Address group object successfully configured.",
            "result": True,
        })
    else:
        ret.update({
            "changes": {
                "before": group,
                "after": new_group
            },
            "comment": "Address group object successfully configured.",
            "result": True,
        })

    return ret
Пример #25
0
 def test_xml_case_f_full(self):
     xmldata = ET.fromstring(self.cases["f"]["xml"])
     defaultdict = xml.to_dict(xmldata, True)
     self.assertEqual(defaultdict, self.cases["f"]["full"])
Пример #26
0
def edit_config(name, xpath=None, value=None, commit=False):
    """
    Edits a Palo Alto XPATH to a specific value. This will always overwrite the existing value, even if it is not
    changed.

    You can replace an existing object hierarchy at a specified location in the configuration with a new value. Use
    the xpath parameter to specify the location of the object, including the node to be replaced.

    This is the recommended state to enforce configurations on a xpath.

    name: The name of the module function to execute.

    xpath(str): The XPATH of the configuration API tree to control.

    value(str): The XML value to edit. This must be a child to the XPATH.

    commit(bool): If true the firewall will commit the changes, if false do not commit changes.

    SLS Example:

    .. code-block:: yaml

        panos/addressgroup:
            panos.edit_config:
              - xpath: /config/devices/entry/vsys/entry[@name='vsys1']/address-group/entry[@name='test']
              - value: <static><entry name='test'><member>abc</member><member>xyz</member></entry></static>
              - commit: True

    """
    ret = _default_ret(name)

    # Verify if the current XPATH is equal to the specified value.
    # If we are equal, no changes required.
    xpath_split = xpath.split("/")

    # Retrieve the head of the xpath for validation.
    if xpath_split:
        head = xpath_split[-1]
        if "[" in head:
            head = head.split("[")[0]

    current_element = __salt__["panos.get_xpath"](xpath)["result"]

    if head and current_element and head in current_element:
        current_element = current_element[head]
    else:
        current_element = {}

    new_element = xml.to_dict(ET.fromstring(value), True)

    if current_element == new_element:
        ret.update({
            "comment": "XPATH is already equal to the specified value.",
            "result": True,
        })
        return ret

    result, msg = _edit_config(xpath, value)

    ret.update({"comment": msg, "result": result})

    if not result:
        return ret

    if commit is True:
        ret.update({
            "changes": {
                "before": current_element,
                "after": new_element
            },
            "commit": __salt__["panos.commit"](),
            "result": True,
        })
    else:
        ret.update({
            "changes": {
                "before": current_element,
                "after": new_element
            },
            "result": True,
        })

    return ret
Пример #27
0
def query(url,
          method='GET',
          params=None,
          data=None,
          data_file=None,
          header_dict=None,
          header_list=None,
          header_file=None,
          username=None,
          password=None,
          auth=None,
          decode=False,
          decode_type='auto',
          status=False,
          headers=False,
          text=False,
          cookies=None,
          cookie_jar=None,
          cookie_format='lwp',
          persist_session=False,
          session_cookie_jar=None,
          data_render=False,
          data_renderer=None,
          header_render=False,
          header_renderer=None,
          template_dict=None,
          test=False,
          test_url=None,
          node='minion',
          port=80,
          opts=None,
          backend='tornado',
          requests_lib=None,
          ca_bundle=None,
          verify_ssl=None,
          cert=None,
          text_out=None,
          headers_out=None,
          decode_out=None,
          stream=False,
          streaming_callback=None,
          handle=False,
          agent=USERAGENT,
          hide_fields=None,
          **kwargs):
    '''
    Query a resource, and decode the return data
    '''
    ret = {}

    if opts is None:
        if node == 'master':
            opts = salt.config.master_config(
                os.path.join(syspaths.CONFIG_DIR, 'master')
            )
        elif node == 'minion':
            opts = salt.config.minion_config(
                os.path.join(syspaths.CONFIG_DIR, 'minion')
            )
        else:
            opts = {}

    if requests_lib is None:
        requests_lib = opts.get('requests_lib', False)

    if requests_lib is True:
        log.warn('Please set "backend" to "requests" instead of setting '
                 '"requests_lib" to "True"')

        if HAS_REQUESTS is False:
            ret['error'] = ('http.query has been set to use requests, but the '
                            'requests library does not seem to be installed')
            log.error(ret['error'])
            return ret

        backend = 'requests'

    else:
        requests_log = logging.getLogger('requests')
        requests_log.setLevel(logging.WARNING)

    # Some libraries don't support separation of url and GET parameters
    # Don't need a try/except block, since Salt depends on tornado
    url_full = tornado.httputil.url_concat(url, params)

    if ca_bundle is None:
        ca_bundle = get_ca_bundle(opts)

    if verify_ssl is None:
        verify_ssl = opts.get('verify_ssl', True)

    if cert is None:
        cert = opts.get('cert', None)

    if data_file is not None:
        data = _render(
            data_file, data_render, data_renderer, template_dict, opts
        )

    # Make sure no secret fields show up in logs
    log_url = sanitize_url(url_full, hide_fields)

    log.debug('Requesting URL {0} using {1} method'.format(log_url, method))
    if method == 'POST':
        # Make sure no secret fields show up in logs
        if isinstance(data, dict):
            log_data = data.copy()
            if isinstance(hide_fields, list):
                for item in data:
                    for field in hide_fields:
                        if item == field:
                            log_data[item] = 'XXXXXXXXXX'
            log.trace('Request POST Data: {0}'.format(pprint.pformat(log_data)))
        else:
            log.trace('Request POST Data: {0}'.format(pprint.pformat(data)))

    if header_file is not None:
        header_tpl = _render(
            header_file, header_render, header_renderer, template_dict, opts
        )
        if isinstance(header_tpl, dict):
            header_dict = header_tpl
        else:
            header_list = header_tpl.splitlines()

    if header_dict is None:
        header_dict = {}

    if header_list is None:
        header_list = []

    if cookie_jar is None:
        cookie_jar = os.path.join(opts.get('cachedir', syspaths.CACHE_DIR), 'cookies.txt')
    if session_cookie_jar is None:
        session_cookie_jar = os.path.join(opts.get('cachedir', syspaths.CACHE_DIR), 'cookies.session.p')

    if persist_session is True and HAS_MSGPACK:
        # TODO: This is hackish; it will overwrite the session cookie jar with
        # all cookies from this one connection, rather than behaving like a
        # proper cookie jar. Unfortunately, since session cookies do not
        # contain expirations, they can't be stored in a proper cookie jar.
        if os.path.isfile(session_cookie_jar):
            with salt.utils.fopen(session_cookie_jar, 'rb') as fh_:
                session_cookies = msgpack.load(fh_)
            if isinstance(session_cookies, dict):
                header_dict.update(session_cookies)
        else:
            with salt.utils.fopen(session_cookie_jar, 'wb') as fh_:
                msgpack.dump('', fh_)

    for header in header_list:
        comps = header.split(':')
        if len(comps) < 2:
            continue
        header_dict[comps[0].strip()] = comps[1].strip()

    if not auth:
        if username and password:
            auth = (username, password)

    if agent == USERAGENT:
        agent = '{0} http.query()'.format(agent)
    header_dict['User-agent'] = agent

    if backend == 'requests':
        sess = requests.Session()
        sess.auth = auth
        sess.headers.update(header_dict)
        log.trace('Request Headers: {0}'.format(sess.headers))
        sess_cookies = sess.cookies
        sess.verify = verify_ssl
    elif backend == 'urllib2':
        sess_cookies = None
    else:
        # Tornado
        sess_cookies = None

    if cookies is not None:
        if cookie_format == 'mozilla':
            sess_cookies = salt.ext.six.moves.http_cookiejar.MozillaCookieJar(cookie_jar)
        else:
            sess_cookies = salt.ext.six.moves.http_cookiejar.LWPCookieJar(cookie_jar)
        if not os.path.isfile(cookie_jar):
            sess_cookies.save()
        sess_cookies.load()

    if test is True:
        if test_url is None:
            return {}
        else:
            url = test_url
            ret['test'] = True

    if backend == 'requests':
        req_kwargs = {}
        if stream is True:
            if requests.__version__[0] == '0':
                # 'stream' was called 'prefetch' before 1.0, with flipped meaning
                req_kwargs['prefetch'] = False
            else:
                req_kwargs['stream'] = True

        # Client-side cert handling
        if cert is not None:
            if isinstance(cert, six.string_types):
                if os.path.exists(cert):
                    req_kwargs['cert'] = cert
            elif isinstance(cert, tuple):
                if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                    req_kwargs['cert'] = cert
            else:
                log.error('The client-side certificate path that was passed is '
                          'not valid: {0}'.format(cert))

        result = sess.request(
            method, url, params=params, data=data, **req_kwargs
        )
        result.raise_for_status()
        if stream is True or handle is True:
            return {
                'handle': result,
                'body': result.content,
            }

        log.debug('Final URL location of Response: {0}'.format(sanitize_url(result.url, hide_fields)))

        result_status_code = result.status_code
        result_headers = result.headers
        result_text = result.content
        result_cookies = result.cookies
        ret['body'] = result.content
    elif backend == 'urllib2':
        request = urllib_request.Request(url_full, data)
        handlers = [
            urllib_request.HTTPHandler,
            urllib_request.HTTPCookieProcessor(sess_cookies)
        ]

        if url.startswith('https'):
            hostname = request.get_host()
            handlers[0] = urllib_request.HTTPSHandler(1)
            if not HAS_MATCHHOSTNAME:
                log.warn(('match_hostname() not available, SSL hostname checking '
                         'not available. THIS CONNECTION MAY NOT BE SECURE!'))
            elif verify_ssl is False:
                log.warn(('SSL certificate verification has been explicitly '
                         'disabled. THIS CONNECTION MAY NOT BE SECURE!'))
            else:
                if ':' in hostname:
                    hostname, port = hostname.split(':')
                else:
                    port = 443
                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                sock.connect((hostname, int(port)))
                sockwrap = ssl.wrap_socket(
                    sock,
                    ca_certs=ca_bundle,
                    cert_reqs=ssl.CERT_REQUIRED
                )
                try:
                    match_hostname(sockwrap.getpeercert(), hostname)
                except CertificateError as exc:
                    ret['error'] = (
                        'The certificate was invalid. '
                        'Error returned was: {0}'.format(
                            pprint.pformat(exc)
                        )
                    )
                    return ret

                # Client-side cert handling
                if cert is not None:
                    cert_chain = None
                    if isinstance(cert, six.string_types):
                        if os.path.exists(cert):
                            cert_chain = (cert)
                    elif isinstance(cert, tuple):
                        if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                            cert_chain = cert
                    else:
                        log.error('The client-side certificate path that was '
                                  'passed is not valid: {0}'.format(cert))
                        return
                    if hasattr(ssl, 'SSLContext'):
                        # Python >= 2.7.9
                        context = ssl.SSLContext.load_cert_chain(*cert_chain)
                        handlers.append(urllib_request.HTTPSHandler(context=context))  # pylint: disable=E1123
                    else:
                        # Python < 2.7.9
                        cert_kwargs = {
                            'host': request.get_host(),
                            'port': port,
                            'cert_file': cert_chain[0]
                        }
                        if len(cert_chain) > 1:
                            cert_kwargs['key_file'] = cert_chain[1]
                        handlers[0] = salt.ext.six.moves.http_client.HTTPSConnection(**cert_kwargs)

        opener = urllib_request.build_opener(*handlers)
        for header in header_dict:
            request.add_header(header, header_dict[header])
        request.get_method = lambda: method
        try:
            result = opener.open(request)
        except URLError as exc:
            return {'Error': str(exc)}
        if stream is True or handle is True:
            return {
                'handle': result,
                'body': result.content,
            }

        result_status_code = result.code
        result_headers = result.headers.headers
        result_text = result.read()
        ret['body'] = result_text
    else:
        # Tornado
        req_kwargs = {}

        # Client-side cert handling
        if cert is not None:
            if isinstance(cert, six.string_types):
                if os.path.exists(cert):
                    req_kwargs['client_cert'] = cert
            elif isinstance(cert, tuple):
                if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                    req_kwargs['client_cert'] = cert[0]
                    req_kwargs['client_key'] = cert[1]
            else:
                log.error('The client-side certificate path that was passed is '
                          'not valid: {0}'.format(cert))

        if isinstance(data, dict):
            data = urllib.urlencode(data)

        if verify_ssl:
            req_kwargs['ca_certs'] = ca_bundle

        max_body = opts.get('http_max_body', salt.config.DEFAULT_MINION_OPTS['http_max_body'])
        timeout = opts.get('http_request_timeout', salt.config.DEFAULT_MINION_OPTS['http_request_timeout'])

        client_argspec = None

        proxy_host = opts.get('proxy_host', None)
        proxy_port = opts.get('proxy_port', None)
        proxy_username = opts.get('proxy_username', None)
        proxy_password = opts.get('proxy_password', None)

        # We want to use curl_http if we have a proxy defined
        if proxy_host and proxy_port:
            if HAS_CURL_HTTPCLIENT is False:
                ret['error'] = ('proxy_host and proxy_port has been set. This requires pycurl, but the '
                                'pycurl library does not seem to be installed')
                log.error(ret['error'])
                return ret

            tornado.httpclient.AsyncHTTPClient.configure('tornado.curl_httpclient.CurlAsyncHTTPClient')
            client_argspec = inspect.getargspec(tornado.curl_httpclient.CurlAsyncHTTPClient.initialize)
        else:
            client_argspec = inspect.getargspec(tornado.simple_httpclient.SimpleAsyncHTTPClient.initialize)

        supports_max_body_size = 'max_body_size' in client_argspec.args

        try:
            if supports_max_body_size:
                result = HTTPClient(max_body_size=max_body).fetch(
                    url_full,
                    method=method,
                    headers=header_dict,
                    auth_username=username,
                    auth_password=password,
                    body=data,
                    validate_cert=verify_ssl,
                    allow_nonstandard_methods=True,
                    streaming_callback=streaming_callback,
                    request_timeout=timeout,
                    proxy_host=proxy_host,
                    proxy_port=proxy_port,
                    proxy_username=proxy_username,
                    proxy_password=proxy_password,
                    **req_kwargs
                )
            else:
                result = HTTPClient().fetch(
                    url_full,
                    method=method,
                    headers=header_dict,
                    auth_username=username,
                    auth_password=password,
                    body=data,
                    validate_cert=verify_ssl,
                    allow_nonstandard_methods=True,
                    streaming_callback=streaming_callback,
                    request_timeout=timeout,
                    proxy_host=proxy_host,
                    proxy_port=proxy_port,
                    proxy_username=proxy_username,
                    proxy_password=proxy_password,
                    **req_kwargs
                )
        except tornado.httpclient.HTTPError as exc:
            ret['status'] = exc.code
            ret['error'] = str(exc)
            return ret

        if stream is True or handle is True:
            return {
                'handle': result,
                'body': result.body,
            }

        result_status_code = result.code
        result_headers = result.headers
        result_text = result.body
        ret['body'] = result.body
        if 'Set-Cookie' in result_headers.keys() and cookies is not None:
            result_cookies = parse_cookie_header(result_headers['Set-Cookie'])
            for item in result_cookies:
                sess_cookies.set_cookie(item)
        else:
            result_cookies = None

    if isinstance(result_headers, list):
        result_headers_dict = {}
        for header in result_headers:
            comps = header.split(':')
            result_headers_dict[comps[0].strip()] = ':'.join(comps[1:]).strip()
        result_headers = result_headers_dict

    log.debug('Response Status Code: {0}'.format(result_status_code))
    log.trace('Response Headers: {0}'.format(result_headers))
    log.trace('Response Cookies: {0}'.format(sess_cookies))
    try:
        log.trace('Response Text: {0}'.format(result_text))
    except UnicodeEncodeError as exc:
        log.trace(('Cannot Trace Log Response Text: {0}. This may be due to '
                  'incompatibilities between requests and logging.').format(exc))

    if text_out is not None and os.path.exists(text_out):
        with salt.utils.fopen(text_out, 'w') as tof:
            tof.write(result_text)

    if headers_out is not None and os.path.exists(headers_out):
        with salt.utils.fopen(headers_out, 'w') as hof:
            hof.write(result_headers)

    if cookies is not None:
        sess_cookies.save()

    if persist_session is True and HAS_MSGPACK:
        # TODO: See persist_session above
        if 'set-cookie' in result_headers:
            with salt.utils.fopen(session_cookie_jar, 'wb') as fh_:
                session_cookies = result_headers.get('set-cookie', None)
                if session_cookies is not None:
                    msgpack.dump({'Cookie': session_cookies}, fh_)
                else:
                    msgpack.dump('', fh_)

    if status is True:
        ret['status'] = result_status_code

    if headers is True:
        ret['headers'] = result_headers

    if decode is True:
        if decode_type == 'auto':
            content_type = result_headers.get(
                'content-type', 'application/json'
            )
            if 'xml' in content_type:
                decode_type = 'xml'
            elif 'json' in content_type:
                decode_type = 'json'
            elif 'yaml' in content_type:
                decode_type = 'yaml'
            else:
                decode_type = 'plain'

        valid_decodes = ('json', 'xml', 'yaml', 'plain')
        if decode_type not in valid_decodes:
            ret['error'] = (
                'Invalid decode_type specified. '
                'Valid decode types are: {0}'.format(
                    pprint.pformat(valid_decodes)
                )
            )
            log.error(ret['error'])
            return ret

        if decode_type == 'json':
            ret['dict'] = json.loads(salt.utils.to_str(result_text))
        elif decode_type == 'xml':
            ret['dict'] = []
            items = ET.fromstring(result_text)
            for item in items:
                ret['dict'].append(xml.to_dict(item))
        elif decode_type == 'yaml':
            ret['dict'] = yaml.safe_load(result_text)
        else:
            text = True

        if decode_out and os.path.exists(decode_out):
            with salt.utils.fopen(decode_out, 'w') as dof:
                dof.write(result_text)

    if text is True:
        ret['text'] = result_text

    return ret
Пример #28
0
def security_rule_exists(
    name,
    rulename=None,
    vsys="1",
    action=None,
    disabled=None,
    sourcezone=None,
    destinationzone=None,
    source=None,
    destination=None,
    application=None,
    service=None,
    description=None,
    logsetting=None,
    logstart=None,
    logend=None,
    negatesource=None,
    negatedestination=None,
    profilegroup=None,
    datafilter=None,
    fileblock=None,
    spyware=None,
    urlfilter=None,
    virus=None,
    vulnerability=None,
    wildfire=None,
    move=None,
    movetarget=None,
    commit=False,
):
    """
    Ensures that a security rule exists on the device. Also, ensure that all configurations are set appropriately.

    This method will create the rule if it does not exist. If the rule does exist, it will ensure that the
    configurations are set appropriately.

    If the rule does not exist and is created, any value that is not provided will be provided as the default.
    The action, to, from, source, destination, application, and service fields are mandatory and must be provided.

    This will enforce the exact match of the rule. For example, if the rule is currently configured with the log-end
    option, but this option is not specified in the state method, it will be removed and reset to the system default.

    It is strongly recommended to specify all options to ensure proper operation.

    When defining the profile group settings, the device can only support either a profile group or individual settings.
    If both are specified, the profile group will be preferred and the individual settings are ignored. If neither are
    specified, the value will be set to system default of none.

    name: The name of the module function to execute.

    rulename(str): The name of the security rule.  The name is case-sensitive and can have up to 31 characters, which
    can be letters, numbers, spaces, hyphens, and underscores. The name must be unique on a firewall and, on Panorama,
    unique within its device group and any ancestor or descendant device groups.

    vsys(str): The string representation of the VSYS ID. Defaults to VSYS 1.

    action(str): The action that the security rule will enforce. Valid options are: allow, deny, drop, reset-client,
    reset-server, reset-both.

    disabled(bool): Controls if the rule is disabled. Set 'True' to disable and 'False' to enable.

    sourcezone(str, list): The source zone(s). The value 'any' will match all zones.

    destinationzone(str, list): The destination zone(s). The value 'any' will match all zones.

    source(str, list): The source address(es). The value 'any' will match all addresses.

    destination(str, list): The destination address(es). The value 'any' will match all addresses.

    application(str, list): The application(s) matched. The value 'any' will match all applications.

    service(str, list): The service(s) matched. The value 'any' will match all services. The value
    'application-default' will match based upon the application defined ports.

    description(str): A description for the policy (up to 255 characters).

    logsetting(str): The name of a valid log forwarding profile.

    logstart(bool): Generates a traffic log entry for the start of a session (disabled by default).

    logend(bool): Generates a traffic log entry for the end of a session (enabled by default).

    negatesource(bool): Match all but the specified source addresses.

    negatedestination(bool): Match all but the specified destination addresses.

    profilegroup(str): A valid profile group name.

    datafilter(str): A valid data filter profile name. Ignored with the profilegroup option set.

    fileblock(str): A valid file blocking profile name. Ignored with the profilegroup option set.

    spyware(str): A valid spyware profile name. Ignored with the profilegroup option set.

    urlfilter(str): A valid URL filtering profile name. Ignored with the profilegroup option set.

    virus(str): A valid virus profile name. Ignored with the profilegroup option set.

    vulnerability(str): A valid vulnerability profile name. Ignored with the profilegroup option set.

    wildfire(str): A valid vulnerability profile name. Ignored with the profilegroup option set.

    move(str): An optional argument that ensure the rule is moved to a specific location. Valid options are 'top',
    'bottom', 'before', or 'after'. The 'before' and 'after' options require the use of the 'movetarget' argument
    to define the location of the move request.

    movetarget(str): An optional argument that defines the target of the move operation if the move argument is
    set to 'before' or 'after'.

    commit(bool): If true the firewall will commit the changes, if false do not commit changes.

    SLS Example:

    .. code-block:: yaml

        panos/rulebase/security/rule01:
            panos.security_rule_exists:
              - rulename: rule01
              - vsys: 1
              - action: allow
              - disabled: False
              - sourcezone: untrust
              - destinationzone: trust
              - source:
                - 10.10.10.0/24
                - 1.1.1.1
              - destination:
                - 2.2.2.2-2.2.2.4
              - application:
                - any
              - service:
                - tcp-25
              - description: My test security rule
              - logsetting: logprofile
              - logstart: False
              - logend: True
              - negatesource: False
              - negatedestination: False
              - profilegroup: myprofilegroup
              - move: top
              - commit: False

        panos/rulebase/security/rule01:
            panos.security_rule_exists:
              - rulename: rule01
              - vsys: 1
              - action: allow
              - disabled: False
              - sourcezone: untrust
              - destinationzone: trust
              - source:
                - 10.10.10.0/24
                - 1.1.1.1
              - destination:
                - 2.2.2.2-2.2.2.4
              - application:
                - any
              - service:
                - tcp-25
              - description: My test security rule
              - logsetting: logprofile
              - logstart: False
              - logend: False
              - datafilter: foobar
              - fileblock: foobar
              - spyware: foobar
              - urlfilter: foobar
              - virus: foobar
              - vulnerability: foobar
              - wildfire: foobar
              - move: after
              - movetarget: rule02
              - commit: False
    """
    ret = _default_ret(name)

    if not rulename:
        return ret

    # Check if rule currently exists
    rule = __salt__["panos.get_security_rule"](rulename, vsys)["result"]

    if rule and "entry" in rule:
        rule = rule["entry"]
    else:
        rule = {}

    # Build the rule element
    element = ""
    if sourcezone:
        element += "<from>{0}</from>".format(_build_members(sourcezone, True))
    else:
        ret.update({"comment": "The sourcezone field must be provided."})
        return ret

    if destinationzone:
        element += "<to>{0}</to>".format(_build_members(destinationzone, True))
    else:
        ret.update({"comment": "The destinationzone field must be provided."})
        return ret

    if source:
        element += "<source>{0}</source>".format(_build_members(source, True))
    else:
        ret.update({"comment": "The source field must be provided."})
        return

    if destination:
        element += "<destination>{0}</destination>".format(
            _build_members(destination, True))
    else:
        ret.update({"comment": "The destination field must be provided."})
        return ret

    if application:
        element += "<application>{0}</application>".format(
            _build_members(application, True))
    else:
        ret.update({"comment": "The application field must be provided."})
        return ret

    if service:
        element += "<service>{0}</service>".format(
            _build_members(service, True))
    else:
        ret.update({"comment": "The service field must be provided."})
        return ret

    if action:
        element += "<action>{0}</action>".format(action)
    else:
        ret.update({"comment": "The action field must be provided."})
        return ret

    if disabled is not None:
        if disabled:
            element += "<disabled>yes</disabled>"
        else:
            element += "<disabled>no</disabled>"

    if description:
        element += "<description>{0}</description>".format(description)

    if logsetting:
        element += "<log-setting>{0}</log-setting>".format(logsetting)

    if logstart is not None:
        if logstart:
            element += "<log-start>yes</log-start>"
        else:
            element += "<log-start>no</log-start>"

    if logend is not None:
        if logend:
            element += "<log-end>yes</log-end>"
        else:
            element += "<log-end>no</log-end>"

    if negatesource is not None:
        if negatesource:
            element += "<negate-source>yes</negate-source>"
        else:
            element += "<negate-source>no</negate-source>"

    if negatedestination is not None:
        if negatedestination:
            element += "<negate-destination>yes</negate-destination>"
        else:
            element += "<negate-destination>no</negate-destination>"

    # Build the profile settings
    profile_string = None
    if profilegroup:
        profile_string = "<group><member>{0}</member></group>".format(
            profilegroup)
    else:
        member_string = ""
        if datafilter:
            member_string += "<data-filtering><member>{0}</member></data-filtering>".format(
                datafilter)
        if fileblock:
            member_string += "<file-blocking><member>{0}</member></file-blocking>".format(
                fileblock)
        if spyware:
            member_string += "<spyware><member>{0}</member></spyware>".format(
                spyware)
        if urlfilter:
            member_string += "<url-filtering><member>{0}</member></url-filtering>".format(
                urlfilter)
        if virus:
            member_string += "<virus><member>{0}</member></virus>".format(
                virus)
        if vulnerability:
            member_string += "<vulnerability><member>{0}</member></vulnerability>".format(
                vulnerability)
        if wildfire:
            member_string += "<wildfire-analysis><member>{0}</member></wildfire-analysis>".format(
                wildfire)
        if member_string != "":
            profile_string = "<profiles>{0}</profiles>".format(member_string)

    if profile_string:
        element += "<profile-setting>{0}</profile-setting>".format(
            profile_string)

    full_element = "<entry name='{0}'>{1}</entry>".format(rulename, element)

    new_rule = xml.to_dict(ET.fromstring(full_element), True)

    config_change = False

    if rule == new_rule:
        ret.update(
            {"comment": "Security rule already exists. No changes required."})
    else:
        config_change = True
        xpath = (
            "/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys{0}']/rulebase/"
            "security/rules/entry[@name='{1}']".format(vsys, rulename))

        result, msg = _edit_config(xpath, full_element)

        if not result:
            ret.update({"comment": msg})
            return ret

        ret.update({
            "changes": {
                "before": rule,
                "after": new_rule
            },
            "comment": "Security rule verified successfully.",
        })

    if move:
        movepath = (
            "/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys{0}']/rulebase/"
            "security/rules/entry[@name='{1}']".format(vsys, rulename))
        move_result = False
        move_msg = ""
        if move == "before" and movetarget:
            move_result, move_msg = _move_before(movepath, movetarget)
        elif move == "after":
            move_result, move_msg = _move_after(movepath, movetarget)
        elif move == "top":
            move_result, move_msg = _move_top(movepath)
        elif move == "bottom":
            move_result, move_msg = _move_bottom(movepath)

        if config_change:
            ret.update({
                "changes": {
                    "before": rule,
                    "after": new_rule,
                    "move": move_msg
                }
            })
        else:
            ret.update({"changes": {"move": move_msg}})

        if not move_result:
            ret.update({"comment": move_msg})
            return ret

    if commit is True:
        ret.update({"commit": __salt__["panos.commit"](), "result": True})
    else:
        ret.update({"result": True})

    return ret
Пример #29
0
def query(key,
          keyid,
          method='GET',
          params=None,
          headers=None,
          requesturl=None,
          return_url=False,
          bucket=None,
          service_url=None,
          path='',
          return_bin=False,
          action=None,
          local_file=None,
          verify_ssl=True,
          location=None,
          full_headers=False):
    '''
    Perform a query against an S3-like API. This function requires that a
    secret key and the id for that key are passed in. For instance:

        s3.keyid: GKTADJGHEIQSXMKKRBJ08H
        s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs

    If keyid or key is not specified, an attempt to fetch them from EC2 IAM
    metadata service will be made.

    A service_url may also be specified in the configuration:

        s3.service_url: s3.amazonaws.com

    If a service_url is not specified, the default is s3.amazonaws.com. This
    may appear in various documentation as an "endpoint". A comprehensive list
    for Amazon S3 may be found at::

        http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region

    The service_url will form the basis for the final endpoint that is used to
    query the service.

    SSL verification may also be turned off in the configuration:

    s3.verify_ssl: False

    This is required if using S3 bucket names that contain a period, as
    these will not match Amazon's S3 wildcard certificates. Certificate
    verification is enabled by default.

    A region may be specified:

        s3.location: eu-central-1

    If region is not specified, an attempt to fetch the region from EC2 IAM
    metadata service will be made. Failing that, default is us-east-1
    '''
    if not HAS_REQUESTS:
        log.error('There was an error: requests is required for s3 access')

    if not headers:
        headers = {}

    if not params:
        params = {}

    if not service_url:
        service_url = 's3.amazonaws.com'

    if bucket:
        endpoint = '{0}.{1}'.format(bucket, service_url)
    else:
        endpoint = service_url

    # Try grabbing the credentials from the EC2 instance IAM metadata if available
    if not key or not keyid:
        key = salt.utils.aws.IROLE_CODE
        keyid = salt.utils.aws.IROLE_CODE

    data = ''
    if method == 'PUT':
        if local_file:
            with salt.utils.fopen(local_file, 'r') as ifile:
                data = ifile.read()

    if not requesturl:
        requesturl = 'https://{0}/{1}'.format(endpoint, path)
        headers, requesturl = salt.utils.aws.sig4(
            method,
            endpoint,
            params,
            data=data,
            uri='/{0}'.format(path),
            prov_dict={
                'id': keyid,
                'key': key
            },
            location=location,
            product='s3',
            requesturl=requesturl,
        )

    log.debug('S3 Request: {0}'.format(requesturl))
    log.debug('S3 Headers::')
    log.debug('    Authorization: {0}'.format(headers['Authorization']))

    if not data:
        data = None

    try:
        result = requests.request(method,
                                  requesturl,
                                  headers=headers,
                                  data=data,
                                  verify=verify_ssl)
        response = result.content
    except requests.exceptions.HTTPError as exc:
        log.error('Failed to {0} {1}::'.format(method, requesturl))
        log.error('    Exception: {0}'.format(exc))
        if exc.response:
            log.error('    Response content: {0}'.format(exc.response.content))
        return False

    log.debug('S3 Response Status Code: {0}'.format(result.status_code))

    if method == 'PUT':
        if result.status_code == 200:
            if local_file:
                log.debug('Uploaded from {0} to {1}'.format(local_file, path))
            else:
                log.debug('Created bucket {0}'.format(bucket))
        else:
            if local_file:
                log.debug('Failed to upload from {0} to {1}: {2}'.format(
                    local_file,
                    path,
                    result.status_code,
                ))
            else:
                log.debug('Failed to create bucket {0}'.format(bucket))
        return

    if method == 'DELETE':
        if str(result.status_code).startswith('2'):
            if path:
                log.debug('Deleted {0} from bucket {1}'.format(path, bucket))
            else:
                log.debug('Deleted bucket {0}'.format(bucket))
        else:
            if path:
                log.debug('Failed to delete {0} from bucket {1}: {2}'.format(
                    path,
                    bucket,
                    result.status_code,
                ))
            else:
                log.debug('Failed to delete bucket {0}'.format(bucket))
        return

    # This can be used to save a binary object to disk
    if local_file and method == 'GET':
        log.debug('Saving to local file: {0}'.format(local_file))
        with salt.utils.fopen(local_file, 'w') as out:
            out.write(response)
        return 'Saved to local file: {0}'.format(local_file)

    # This can be used to return a binary object wholesale
    if return_bin:
        return response

    if response:
        items = ET.fromstring(response)

        ret = []
        for item in items:
            ret.append(xml.to_dict(item))

        if return_url is True:
            return ret, requesturl
    else:
        if result.status_code != requests.codes.ok:
            return
        ret = {'headers': []}
        if full_headers:
            ret['headers'] = dict(result.headers)
        else:
            for header in result.headers:
                ret['headers'].append(header.strip())

    return ret
Пример #30
0
Файл: aws.py Проект: DaveQB/salt
def query(params=None, setname=None, requesturl=None, location=None,
          return_url=False, return_root=False, opts=None, provider=None,
          endpoint=None, product='ec2', sigver='2'):
    '''
    Perform a query against AWS services using Signature Version 2 Signing
    Process. This is documented at:

    http://docs.aws.amazon.com/general/latest/gr/signature-version-2.html

    Regions and endpoints are documented at:

    http://docs.aws.amazon.com/general/latest/gr/rande.html

    Default ``product`` is ``ec2``. Valid ``product`` names are:

    .. code-block: yaml

        - autoscaling (Auto Scaling)
        - cloudformation (CloudFormation)
        - ec2 (Elastic Compute Cloud)
        - elasticache (ElastiCache)
        - elasticbeanstalk (Elastic BeanStalk)
        - elasticloadbalancing (Elastic Load Balancing)
        - elasticmapreduce (Elastic MapReduce)
        - iam (Identity and Access Management)
        - importexport (Import/Export)
        - monitoring (CloudWatch)
        - rds (Relational Database Service)
        - simpledb (SimpleDB)
        - sns (Simple Notification Service)
        - sqs (Simple Queue Service)
    '''
    if params is None:
        params = {}

    if opts is None:
        opts = {}

    function = opts.get('function', (None, product))
    providers = opts.get('providers', {})

    if provider is None:
        prov_dict = providers.get(function[1], {}).get(product, {})
        if prov_dict:
            driver = list(list(prov_dict.keys()))[0]
            provider = providers.get(driver, product)
    else:
        prov_dict = providers.get(provider, {}).get(product, {})

    service_url = prov_dict.get('service_url', 'amazonaws.com')

    if not location:
        location = get_location(opts, provider)

    if endpoint is None:
        if not requesturl:
            endpoint = prov_dict.get(
                'endpoint',
                '{0}.{1}.{2}'.format(product, location, service_url)
            )

            requesturl = 'https://{0}/'.format(endpoint)
        else:
            endpoint = urlparse(requesturl).netloc
            if endpoint == '':
                endpoint_err = ('Could not find a valid endpoint in the '
                                'requesturl: {0}. Looking for something '
                                'like https://some.aws.endpoint/?args').format(
                                    requesturl
                                )
                LOG.error(endpoint_err)
                if return_url is True:
                    return {'error': endpoint_err}, requesturl
                return {'error': endpoint_err}

    LOG.debug('Using AWS endpoint: {0}'.format(endpoint))
    method = 'GET'

    aws_api_version = prov_dict.get(
        'aws_api_version', prov_dict.get(
            '{0}_api_version'.format(product),
            DEFAULT_AWS_API_VERSION
        )
    )

    if sigver == '4':
        headers, requesturl = sig4(
            method, endpoint, params, prov_dict, aws_api_version, location, product, requesturl=requesturl
        )
        params_with_headers = {}
    else:
        params_with_headers = sig2(
            method, endpoint, params, prov_dict, aws_api_version
        )
        headers = {}

    attempts = 5
    while attempts > 0:
        LOG.debug('AWS Request: {0}'.format(requesturl))
        LOG.trace('AWS Request Parameters: {0}'.format(params_with_headers))
        try:
            result = requests.get(requesturl, headers=headers, params=params_with_headers)
            LOG.debug(
                'AWS Response Status Code: {0}'.format(
                    result.status_code
                )
            )
            LOG.trace(
                'AWS Response Text: {0}'.format(
                    result.text
                )
            )
            result.raise_for_status()
            break
        except requests.exceptions.HTTPError as exc:
            root = ET.fromstring(exc.response.content)
            data = xml.to_dict(root)

            # check to see if we should retry the query
            err_code = data.get('Errors', {}).get('Error', {}).get('Code', '')
            if attempts > 0 and err_code and err_code in AWS_RETRY_CODES:
                attempts -= 1
                LOG.error(
                    'AWS Response Status Code and Error: [{0} {1}] {2}; '
                    'Attempts remaining: {3}'.format(
                        exc.response.status_code, exc, data, attempts
                    )
                )
                # Wait a bit before continuing to prevent throttling
                time.sleep(2)
                continue

            LOG.error(
                'AWS Response Status Code and Error: [{0} {1}] {2}'.format(
                    exc.response.status_code, exc, data
                )
            )
            if return_url is True:
                return {'error': data}, requesturl
            return {'error': data}
    else:
        LOG.error(
            'AWS Response Status Code and Error: [{0} {1}] {2}'.format(
                exc.response.status_code, exc, data
            )
        )
        if return_url is True:
            return {'error': data}, requesturl
        return {'error': data}

    response = result.text

    root = ET.fromstring(response)
    items = root[1]
    if return_root is True:
        items = root

    if setname:
        if sys.version_info < (2, 7):
            children_len = len(root.getchildren())
        else:
            children_len = len(root)

        for item in range(0, children_len):
            comps = root[item].tag.split('}')
            if comps[1] == setname:
                items = root[item]

    ret = []
    for item in items:
        ret.append(xml.to_dict(item))

    if return_url is True:
        return ret, requesturl

    return ret
Пример #31
0
def query(key, keyid, method='GET', params=None, headers=None,
          requesturl=None, return_url=False, bucket=None, service_url=None,
          path=None, return_bin=False, action=None, local_file=None,
          verify_ssl=True):
    '''
    Perform a query against an S3-like API. This function requires that a
    secret key and the id for that key are passed in. For instance:

        s3.keyid: GKTADJGHEIQSXMKKRBJ08H
        s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs

    A service_url may also be specified in the configuration::

        s3.service_url: s3.amazonaws.com

    If a service_url is not specified, the default is s3.amazonaws.com. This
    may appear in various documentation as an "endpoint". A comprehensive list
    for Amazon S3 may be found at::

        http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region

    The service_url will form the basis for the final endpoint that is used to
    query the service.

    SSL verification may also be turned off in the configuration:

    s3.verify_ssl: False

    This is required if using S3 bucket names that contain a period, as
    these will not match Amazon's S3 wildcard certificates. Certificate
    verification is enabled by default.
    '''
    if not headers:
        headers = {}

    if not params:
        params = {}

    if path is None:
        path = ''

    if not service_url:
        service_url = 's3.amazonaws.com'

    if bucket:
        endpoint = '{0}.{1}'.format(bucket, service_url)
    else:
        endpoint = service_url

    # Try grabbing the credentials from the EC2 instance IAM metadata if available
    token = None
    if not key or not keyid:
        iam_creds = iam.get_iam_metadata()
        key = iam_creds['secret_key']
        keyid = iam_creds['access_key']
        token = iam_creds['security_token']

    if not requesturl:
        x_amz_date = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
        content_type = 'text/plain'
        if method == 'GET':
            if bucket:
                can_resource = '/{0}/{1}'.format(bucket, path)
            else:
                can_resource = '/'
        elif method == 'PUT' or method == 'HEAD' or method == 'DELETE':
            if path:
                can_resource = '/{0}/{1}'.format(bucket, path)
            else:
                can_resource = '/{0}/'.format(bucket)

        if action:
            can_resource += '?{0}'.format(action)

        log.debug('CanonicalizedResource: {0}'.format(can_resource))

        headers['Host'] = endpoint
        headers['Content-type'] = content_type
        headers['Date'] = x_amz_date
        if token:
            headers['x-amz-security-token'] = token

        string_to_sign = '{0}\n'.format(method)

        new_headers = []
        for header in sorted(headers):
            if header.lower().startswith('x-amz'):
                log.debug(header.lower())
                new_headers.append('{0}:{1}'.format(header.lower(),
                                                    headers[header]))
        can_headers = '\n'.join(new_headers)
        log.debug('CanonicalizedAmzHeaders: {0}'.format(can_headers))

        string_to_sign += '\n{0}'.format(content_type)
        string_to_sign += '\n{0}'.format(x_amz_date)
        if can_headers:
            string_to_sign += '\n{0}'.format(can_headers)
        string_to_sign += '\n{0}'.format(can_resource)
        log.debug('String To Sign:: \n{0}'.format(string_to_sign))

        hashed = hmac.new(key, string_to_sign, hashlib.sha1)
        sig = binascii.b2a_base64(hashed.digest())
        headers['Authorization'] = 'AWS {0}:{1}'.format(keyid, sig.strip())

        querystring = urllib.urlencode(params)
        if action:
            if querystring:
                querystring = '{0}&{1}'.format(action, querystring)
            else:
                querystring = action
        requesturl = 'https://{0}/'.format(endpoint)
        if path:
            requesturl += path
        if querystring:
            requesturl += '?{0}'.format(querystring)

    data = None
    if method == 'PUT':
        if local_file:
            with salt.utils.fopen(local_file, 'r') as ifile:
                data = ifile.read()

    log.debug('S3 Request: {0}'.format(requesturl))
    log.debug('S3 Headers::')
    log.debug('    Authorization: {0}'.format(headers['Authorization']))

    try:
        result = requests.request(method, requesturl, headers=headers,
                                  data=data,
                                  verify=verify_ssl)
        response = result.content
    except requests.exceptions.HTTPError as exc:
        log.error('There was an error::')
        if hasattr(exc, 'code') and hasattr(exc, 'msg'):
            log.error('    Code: {0}: {1}'.format(exc.code, exc.msg))
        log.error('    Content: \n{0}'.format(exc.read()))
        return False

    log.debug('S3 Response Status Code: {0}'.format(result.status_code))

    if method == 'PUT':
        if result.status_code == 200:
            if local_file:
                log.debug('Uploaded from {0} to {1}'.format(local_file, path))
            else:
                log.debug('Created bucket {0}'.format(bucket))
        else:
            if local_file:
                log.debug('Failed to upload from {0} to {1}: {2}'.format(
                                                    local_file,
                                                    path,
                                                    result.status_code,
                                                    ))
            else:
                log.debug('Failed to create bucket {0}'.format(bucket))
        return

    if method == 'DELETE':
        if str(result.status_code).startswith('2'):
            if path:
                log.debug('Deleted {0} from bucket {1}'.format(path, bucket))
            else:
                log.debug('Deleted bucket {0}'.format(bucket))
        else:
            if path:
                log.debug('Failed to delete {0} from bucket {1}: {2}'.format(
                                                    path,
                                                    bucket,
                                                    result.status_code,
                                                    ))
            else:
                log.debug('Failed to delete bucket {0}'.format(bucket))
        return

    # This can be used to save a binary object to disk
    if local_file and method == 'GET':
        log.debug('Saving to local file: {0}'.format(local_file))
        with salt.utils.fopen(local_file, 'w') as out:
            out.write(response)
        return 'Saved to local file: {0}'.format(local_file)

    # This can be used to return a binary object wholesale
    if return_bin:
        return response

    if response:
        items = ET.fromstring(response)

        ret = []
        for item in items:
            ret.append(xml.to_dict(item))

        if return_url is True:
            return ret, requesturl
    else:
        if result.status_code != requests.codes.ok:
            return
        ret = {'headers': []}
        for header in result.headers:
            ret['headers'].append(header.strip())

    return ret
Пример #32
0
def call(payload=None):
    '''
    This function captures the query string and sends it to the Palo Alto device.
    '''
    r = None
    try:
        if DETAILS['method'] == 'dev_key':
            # Pass the api key without the target declaration
            conditional_payload = {'key': DETAILS['apikey']}
            payload.update(conditional_payload)
            r = __utils__['http.query'](DETAILS['url'],
                                        data=payload,
                                        method='POST',
                                        decode_type='plain',
                                        decode=True,
                                        verify_ssl=False,
                                        status=True,
                                        raise_error=True)
        elif DETAILS['method'] == 'dev_pass':
            # Pass credentials without the target declaration
            r = __utils__['http.query'](DETAILS['url'],
                                        username=DETAILS['username'],
                                        password=DETAILS['password'],
                                        data=payload,
                                        method='POST',
                                        decode_type='plain',
                                        decode=True,
                                        verify_ssl=False,
                                        status=True,
                                        raise_error=True)
        elif DETAILS['method'] == 'pan_key':
            # Pass the api key with the target declaration
            conditional_payload = {
                'key': DETAILS['apikey'],
                'target': DETAILS['serial']
            }
            payload.update(conditional_payload)
            r = __utils__['http.query'](DETAILS['url'],
                                        data=payload,
                                        method='POST',
                                        decode_type='plain',
                                        decode=True,
                                        verify_ssl=False,
                                        status=True,
                                        raise_error=True)
        elif DETAILS['method'] == 'pan_pass':
            # Pass credentials with the target declaration
            conditional_payload = {'target': DETAILS['serial']}
            payload.update(conditional_payload)
            r = __utils__['http.query'](DETAILS['url'],
                                        username=DETAILS['username'],
                                        password=DETAILS['password'],
                                        data=payload,
                                        method='POST',
                                        decode_type='plain',
                                        decode=True,
                                        verify_ssl=False,
                                        status=True,
                                        raise_error=True)
    except KeyError as err:
        raise salt.exceptions.CommandExecutionError(
            "Did not receive a valid response from host.")

    if not r:
        raise salt.exceptions.CommandExecutionError(
            "Did not receive a valid response from host.")

    if six.text_type(r['status']) not in ['200', '201', '204']:
        if six.text_type(r['status']) == '400':
            raise salt.exceptions.CommandExecutionError(
                "The server cannot process the request due to a client error.")
        elif six.text_type(r['status']) == '401':
            raise salt.exceptions.CommandExecutionError(
                "The server cannot process the request because it lacks valid authentication "
                "credentials for the target resource.")
        elif six.text_type(r['status']) == '403':
            raise salt.exceptions.CommandExecutionError(
                "The server refused to authorize the request.")
        elif six.text_type(r['status']) == '404':
            raise salt.exceptions.CommandExecutionError(
                "The requested resource could not be found.")
        else:
            raise salt.exceptions.CommandExecutionError(
                "Did not receive a valid response from host.")

    xmldata = ET.fromstring(r['text'])

    # If we are pulling the candidate configuration, we need to strip the dirtyId
    if payload['type'] == 'config' and payload['action'] == 'get':
        xmldata = (_strip_dirty(xmldata))

    return xml.to_dict(xmldata, True)
Пример #33
0
def query(url,
          method='GET',
          params=None,
          data=None,
          data_file=None,
          header_dict=None,
          header_list=None,
          header_file=None,
          username=None,
          password=None,
          auth=None,
          decode=False,
          decode_type='auto',
          status=False,
          headers=False,
          text=False,
          cookies=None,
          cookie_jar=None,
          cookie_format='lwp',
          persist_session=False,
          session_cookie_jar=None,
          data_render=False,
          data_renderer=None,
          header_render=False,
          header_renderer=None,
          template_dict=None,
          test=False,
          test_url=None,
          node='minion',
          port=80,
          opts=None,
          backend=None,
          ca_bundle=None,
          verify_ssl=None,
          cert=None,
          text_out=None,
          headers_out=None,
          decode_out=None,
          stream=False,
          streaming_callback=None,
          header_callback=None,
          handle=False,
          agent=USERAGENT,
          hide_fields=None,
          raise_error=True,
          **kwargs):
    '''
    Query a resource, and decode the return data
    '''
    ret = {}

    if opts is None:
        if node == 'master':
            opts = salt.config.master_config(
                os.path.join(salt.syspaths.CONFIG_DIR, 'master'))
        elif node == 'minion':
            opts = salt.config.minion_config(
                os.path.join(salt.syspaths.CONFIG_DIR, 'minion'))
        else:
            opts = {}

    if not backend:
        backend = opts.get('backend', 'tornado')

    match = re.match(
        r'https?://((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)($|/)',
        url)
    if not match:
        salt.utils.network.refresh_dns()

    if backend == 'requests':
        if HAS_REQUESTS is False:
            ret['error'] = ('http.query has been set to use requests, but the '
                            'requests library does not seem to be installed')
            log.error(ret['error'])
            return ret
        else:
            requests_log = logging.getLogger('requests')
            requests_log.setLevel(logging.WARNING)

    # Some libraries don't support separation of url and GET parameters
    # Don't need a try/except block, since Salt depends on tornado
    url_full = tornado.httputil.url_concat(url, params) if params else url

    if ca_bundle is None:
        ca_bundle = get_ca_bundle(opts)

    if verify_ssl is None:
        verify_ssl = opts.get('verify_ssl', True)

    if cert is None:
        cert = opts.get('cert', None)

    if data_file is not None:
        data = _render(data_file, data_render, data_renderer, template_dict,
                       opts)

    # Make sure no secret fields show up in logs
    log_url = sanitize_url(url_full, hide_fields)

    log.debug('Requesting URL %s using %s method', log_url, method)
    log.debug("Using backend: %s", backend)

    if method == 'POST' and log.isEnabledFor(logging.TRACE):
        # Make sure no secret fields show up in logs
        if isinstance(data, dict):
            log_data = data.copy()
            if isinstance(hide_fields, list):
                for item in data:
                    for field in hide_fields:
                        if item == field:
                            log_data[item] = 'XXXXXXXXXX'
            log.trace('Request POST Data: %s', pprint.pformat(log_data))
        else:
            log.trace('Request POST Data: %s', pprint.pformat(data))

    if header_file is not None:
        header_tpl = _render(header_file, header_render, header_renderer,
                             template_dict, opts)
        if isinstance(header_tpl, dict):
            header_dict = header_tpl
        else:
            header_list = header_tpl.splitlines()

    if header_dict is None:
        header_dict = {}

    if header_list is None:
        header_list = []

    if cookie_jar is None:
        cookie_jar = os.path.join(
            opts.get('cachedir', salt.syspaths.CACHE_DIR), 'cookies.txt')
    if session_cookie_jar is None:
        session_cookie_jar = os.path.join(
            opts.get('cachedir', salt.syspaths.CACHE_DIR), 'cookies.session.p')

    if persist_session is True and HAS_MSGPACK:
        # TODO: This is hackish; it will overwrite the session cookie jar with
        # all cookies from this one connection, rather than behaving like a
        # proper cookie jar. Unfortunately, since session cookies do not
        # contain expirations, they can't be stored in a proper cookie jar.
        if os.path.isfile(session_cookie_jar):
            with salt.utils.files.fopen(session_cookie_jar, 'rb') as fh_:
                session_cookies = msgpack.load(fh_)
            if isinstance(session_cookies, dict):
                header_dict.update(session_cookies)
        else:
            with salt.utils.files.fopen(session_cookie_jar, 'wb') as fh_:
                msgpack.dump('', fh_)

    for header in header_list:
        comps = header.split(':')
        if len(comps) < 2:
            continue
        header_dict[comps[0].strip()] = comps[1].strip()

    if not auth:
        if username and password:
            auth = (username, password)

    if agent == USERAGENT:
        agent = '{0} http.query()'.format(agent)
    header_dict['User-agent'] = agent

    if backend == 'requests':
        sess = requests.Session()
        sess.auth = auth
        sess.headers.update(header_dict)
        log.trace('Request Headers: %s', sess.headers)
        sess_cookies = sess.cookies
        sess.verify = verify_ssl
    elif backend == 'urllib2':
        sess_cookies = None
    else:
        # Tornado
        sess_cookies = None

    if cookies is not None:
        if cookie_format == 'mozilla':
            sess_cookies = salt.ext.six.moves.http_cookiejar.MozillaCookieJar(
                cookie_jar)
        else:
            sess_cookies = salt.ext.six.moves.http_cookiejar.LWPCookieJar(
                cookie_jar)
        if not os.path.isfile(cookie_jar):
            sess_cookies.save()
        sess_cookies.load()

    if test is True:
        if test_url is None:
            return {}
        else:
            url = test_url
            ret['test'] = True

    if backend == 'requests':
        req_kwargs = {}
        if stream is True:
            if requests.__version__[0] == '0':
                # 'stream' was called 'prefetch' before 1.0, with flipped meaning
                req_kwargs['prefetch'] = False
            else:
                req_kwargs['stream'] = True

        # Client-side cert handling
        if cert is not None:
            if isinstance(cert, six.string_types):
                if os.path.exists(cert):
                    req_kwargs['cert'] = cert
            elif isinstance(cert, list):
                if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                    req_kwargs['cert'] = cert
            else:
                log.error(
                    'The client-side certificate path that'
                    ' was passed is not valid: %s', cert)

        result = sess.request(method,
                              url,
                              params=params,
                              data=data,
                              **req_kwargs)
        result.raise_for_status()
        if stream is True:
            # fake a HTTP response header
            header_callback('HTTP/1.0 {0} MESSAGE'.format(result.status_code))
            # fake streaming the content
            streaming_callback(result.content)
            return {
                'handle': result,
            }

        if handle is True:
            return {
                'handle': result,
                'body': result.content,
            }

        log.debug('Final URL location of Response: %s',
                  sanitize_url(result.url, hide_fields))

        result_status_code = result.status_code
        result_headers = result.headers
        result_text = result.content
        result_cookies = result.cookies
        body = result.content
        if not isinstance(body, six.text_type):
            body = body.decode(result.encoding or 'utf-8')
        ret['body'] = body
    elif backend == 'urllib2':
        request = urllib_request.Request(url_full, data)
        handlers = [
            urllib_request.HTTPHandler,
            urllib_request.HTTPCookieProcessor(sess_cookies)
        ]

        if url.startswith('https'):
            hostname = request.get_host()
            handlers[0] = urllib_request.HTTPSHandler(1)
            if not HAS_MATCHHOSTNAME:
                log.warning(
                    'match_hostname() not available, SSL hostname checking '
                    'not available. THIS CONNECTION MAY NOT BE SECURE!')
            elif verify_ssl is False:
                log.warning('SSL certificate verification has been explicitly '
                            'disabled. THIS CONNECTION MAY NOT BE SECURE!')
            else:
                if ':' in hostname:
                    hostname, port = hostname.split(':')
                else:
                    port = 443
                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                sock.connect((hostname, int(port)))
                sockwrap = ssl.wrap_socket(sock,
                                           ca_certs=ca_bundle,
                                           cert_reqs=ssl.CERT_REQUIRED)
                try:
                    match_hostname(sockwrap.getpeercert(), hostname)
                except CertificateError as exc:
                    ret['error'] = ('The certificate was invalid. '
                                    'Error returned was: %s',
                                    pprint.pformat(exc))
                    return ret

                # Client-side cert handling
                if cert is not None:
                    cert_chain = None
                    if isinstance(cert, six.string_types):
                        if os.path.exists(cert):
                            cert_chain = (cert)
                    elif isinstance(cert, list):
                        if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                            cert_chain = cert
                    else:
                        log.error(
                            'The client-side certificate path that was '
                            'passed is not valid: %s', cert)
                        return
                    if hasattr(ssl, 'SSLContext'):
                        # Python >= 2.7.9
                        context = ssl.SSLContext.load_cert_chain(*cert_chain)
                        handlers.append(
                            urllib_request.HTTPSHandler(context=context))  # pylint: disable=E1123
                    else:
                        # Python < 2.7.9
                        cert_kwargs = {
                            'host': request.get_host(),
                            'port': port,
                            'cert_file': cert_chain[0]
                        }
                        if len(cert_chain) > 1:
                            cert_kwargs['key_file'] = cert_chain[1]
                        handlers[
                            0] = salt.ext.six.moves.http_client.HTTPSConnection(
                                **cert_kwargs)

        opener = urllib_request.build_opener(*handlers)
        for header in header_dict:
            request.add_header(header, header_dict[header])
        request.get_method = lambda: method
        try:
            result = opener.open(request)
        except URLError as exc:
            return {'Error': six.text_type(exc)}
        if stream is True or handle is True:
            return {
                'handle': result,
                'body': result.content,
            }

        result_status_code = result.code
        result_headers = dict(result.info())
        result_text = result.read()
        if 'Content-Type' in result_headers:
            res_content_type, res_params = cgi.parse_header(
                result_headers['Content-Type'])
            if res_content_type.startswith('text/') and \
                    'charset' in res_params and \
                    not isinstance(result_text, six.text_type):
                result_text = result_text.decode(res_params['charset'])
        if six.PY3 and isinstance(result_text, bytes):
            result_text = result.body.decode('utf-8')
        ret['body'] = result_text
    else:
        # Tornado
        req_kwargs = {}

        # Client-side cert handling
        if cert is not None:
            if isinstance(cert, six.string_types):
                if os.path.exists(cert):
                    req_kwargs['client_cert'] = cert
            elif isinstance(cert, list):
                if os.path.exists(cert[0]) and os.path.exists(cert[1]):
                    req_kwargs['client_cert'] = cert[0]
                    req_kwargs['client_key'] = cert[1]
            else:
                log.error(
                    'The client-side certificate path that '
                    'was passed is not valid: %s', cert)

        if isinstance(data, dict):
            data = _urlencode(data)

        if verify_ssl:
            # tornado requires a str, cannot be unicode str in py2
            req_kwargs['ca_certs'] = salt.utils.stringutils.to_str(ca_bundle)

        max_body = opts.get('http_max_body',
                            salt.config.DEFAULT_MINION_OPTS['http_max_body'])
        connect_timeout = opts.get(
            'http_connect_timeout',
            salt.config.DEFAULT_MINION_OPTS['http_connect_timeout'])
        timeout = opts.get(
            'http_request_timeout',
            salt.config.DEFAULT_MINION_OPTS['http_request_timeout'])

        client_argspec = None

        proxy_host = opts.get('proxy_host', None)
        if proxy_host:
            # tornado requires a str for proxy_host, cannot be a unicode str in py2
            proxy_host = salt.utils.stringutils.to_str(proxy_host)
        proxy_port = opts.get('proxy_port', None)
        proxy_username = opts.get('proxy_username', None)
        if proxy_username:
            # tornado requires a str, cannot be unicode str in py2
            proxy_username = salt.utils.stringutils.to_str(proxy_username)
        proxy_password = opts.get('proxy_password', None)
        if proxy_password:
            # tornado requires a str, cannot be unicode str in py2
            proxy_password = salt.utils.stringutils.to_str(proxy_password)
        no_proxy = opts.get('no_proxy', [])

        # Since tornado doesnt support no_proxy, we'll always hand it empty proxies or valid ones
        # except we remove the valid ones if a url has a no_proxy hostname in it
        if urlparse(url_full).hostname in no_proxy:
            proxy_host = None
            proxy_port = None

        # We want to use curl_http if we have a proxy defined
        if proxy_host and proxy_port:
            if HAS_CURL_HTTPCLIENT is False:
                ret['error'] = (
                    'proxy_host and proxy_port has been set. This requires pycurl and tornado, '
                    'but the libraries does not seem to be installed')
                log.error(ret['error'])
                return ret

            tornado.httpclient.AsyncHTTPClient.configure(
                'tornado.curl_httpclient.CurlAsyncHTTPClient')
            client_argspec = salt.utils.args.get_function_argspec(
                tornado.curl_httpclient.CurlAsyncHTTPClient.initialize)
        else:
            client_argspec = salt.utils.args.get_function_argspec(
                tornado.simple_httpclient.SimpleAsyncHTTPClient.initialize)

        supports_max_body_size = 'max_body_size' in client_argspec.args

        try:
            download_client = HTTPClient(max_body_size=max_body) \
                if supports_max_body_size \
                else HTTPClient()
            result = download_client.fetch(
                url_full,
                method=method,
                headers=header_dict,
                auth_username=username,
                auth_password=password,
                body=data,
                validate_cert=verify_ssl,
                allow_nonstandard_methods=True,
                streaming_callback=streaming_callback,
                header_callback=header_callback,
                connect_timeout=connect_timeout,
                request_timeout=timeout,
                proxy_host=proxy_host,
                proxy_port=proxy_port,
                proxy_username=proxy_username,
                proxy_password=proxy_password,
                raise_error=raise_error,
                decompress_response=False,
                **req_kwargs)
        except tornado.httpclient.HTTPError as exc:
            ret['status'] = exc.code
            ret['error'] = six.text_type(exc)
            return ret
        except socket.gaierror as exc:
            if status is True:
                ret['status'] = 0
            ret['error'] = six.text_type(exc)
            return ret

        if stream is True or handle is True:
            return {
                'handle': result,
                'body': result.body,
            }

        result_status_code = result.code
        result_headers = result.headers
        result_text = result.body
        if 'Content-Type' in result_headers:
            res_content_type, res_params = cgi.parse_header(
                result_headers['Content-Type'])
            if res_content_type.startswith('text/') and \
                    'charset' in res_params and \
                    not isinstance(result_text, six.text_type):
                result_text = result_text.decode(res_params['charset'])
        if six.PY3 and isinstance(result_text, bytes):
            result_text = result_text.decode('utf-8')
        ret['body'] = result_text
        if 'Set-Cookie' in result_headers and cookies is not None:
            result_cookies = parse_cookie_header(result_headers['Set-Cookie'])
            for item in result_cookies:
                sess_cookies.set_cookie(item)
        else:
            result_cookies = None

    if isinstance(result_headers, list):
        result_headers_dict = {}
        for header in result_headers:
            comps = header.split(':')
            result_headers_dict[comps[0].strip()] = ':'.join(comps[1:]).strip()
        result_headers = result_headers_dict

    log.debug('Response Status Code: %s', result_status_code)
    log.trace('Response Headers: %s', result_headers)
    log.trace('Response Cookies: %s', sess_cookies)
    # log.trace("Content: %s", result_text)

    coding = result_headers.get('Content-Encoding', "identity")

    # Requests will always decompress the content, and working around that is annoying.
    if backend != 'requests':
        result_text = __decompressContent(coding, result_text)

    try:
        log.trace('Response Text: %s', result_text)
    except UnicodeEncodeError as exc:
        log.trace(
            'Cannot Trace Log Response Text: %s. This may be due to '
            'incompatibilities between requests and logging.', exc)

    if text_out is not None:
        with salt.utils.files.fopen(text_out, 'w') as tof:
            tof.write(result_text)

    if headers_out is not None and os.path.exists(headers_out):
        with salt.utils.files.fopen(headers_out, 'w') as hof:
            hof.write(result_headers)

    if cookies is not None:
        sess_cookies.save()

    if persist_session is True and HAS_MSGPACK:
        # TODO: See persist_session above
        if 'set-cookie' in result_headers:
            with salt.utils.files.fopen(session_cookie_jar, 'wb') as fh_:
                session_cookies = result_headers.get('set-cookie', None)
                if session_cookies is not None:
                    msgpack.dump({'Cookie': session_cookies}, fh_)
                else:
                    msgpack.dump('', fh_)

    if status is True:
        ret['status'] = result_status_code

    if headers is True:
        ret['headers'] = result_headers

    if decode is True:
        if decode_type == 'auto':
            content_type = result_headers.get('content-type',
                                              'application/json')
            if 'xml' in content_type:
                decode_type = 'xml'
            elif 'json' in content_type:
                decode_type = 'json'
            elif 'yaml' in content_type:
                decode_type = 'yaml'
            else:
                decode_type = 'plain'

        valid_decodes = ('json', 'xml', 'yaml', 'plain')
        if decode_type not in valid_decodes:
            ret['error'] = ('Invalid decode_type specified. '
                            'Valid decode types are: {0}'.format(
                                pprint.pformat(valid_decodes)))
            log.error(ret['error'])
            return ret

        if decode_type == 'json':
            ret['dict'] = salt.utils.json.loads(result_text)
        elif decode_type == 'xml':
            ret['dict'] = []
            items = ET.fromstring(result_text)
            for item in items:
                ret['dict'].append(xml.to_dict(item))
        elif decode_type == 'yaml':
            ret['dict'] = salt.utils.data.decode(
                salt.utils.yaml.safe_load(result_text))
        else:
            text = True

        if decode_out:
            with salt.utils.files.fopen(decode_out, 'w') as dof:
                dof.write(result_text)

    if text is True:
        ret['text'] = result_text

    return ret
Пример #34
0
def query(
    params=None,
    setname=None,
    requesturl=None,
    location=None,
    return_url=False,
    return_root=False,
    opts=None,
    provider=None,
    endpoint=None,
    product="ec2",
    sigver="2",
):
    """
    Perform a query against AWS services using Signature Version 2 Signing
    Process. This is documented at:

    http://docs.aws.amazon.com/general/latest/gr/signature-version-2.html

    Regions and endpoints are documented at:

    http://docs.aws.amazon.com/general/latest/gr/rande.html

    Default ``product`` is ``ec2``. Valid ``product`` names are:

    .. code-block:: yaml

        - autoscaling (Auto Scaling)
        - cloudformation (CloudFormation)
        - ec2 (Elastic Compute Cloud)
        - elasticache (ElastiCache)
        - elasticbeanstalk (Elastic BeanStalk)
        - elasticloadbalancing (Elastic Load Balancing)
        - elasticmapreduce (Elastic MapReduce)
        - iam (Identity and Access Management)
        - importexport (Import/Export)
        - monitoring (CloudWatch)
        - rds (Relational Database Service)
        - simpledb (SimpleDB)
        - sns (Simple Notification Service)
        - sqs (Simple Queue Service)
    """
    if params is None:
        params = {}

    if opts is None:
        opts = {}

    function = opts.get("function", (None, product))
    providers = opts.get("providers", {})

    if provider is None:
        prov_dict = providers.get(function[1], {}).get(product, {})
        if prov_dict:
            driver = list(list(prov_dict.keys()))[0]
            provider = providers.get(driver, product)
    else:
        prov_dict = providers.get(provider, {}).get(product, {})

    service_url = prov_dict.get("service_url", "amazonaws.com")

    if not location:
        location = get_location(opts, prov_dict)

    if endpoint is None:
        if not requesturl:
            endpoint = prov_dict.get(
                "endpoint", "{}.{}.{}".format(product, location, service_url))

            requesturl = "https://{}/".format(endpoint)
        else:
            endpoint = urllib.parse.urlparse(requesturl).netloc
            if endpoint == "":
                endpoint_err = (
                    "Could not find a valid endpoint in the "
                    "requesturl: {}. Looking for something "
                    "like https://some.aws.endpoint/?args").format(requesturl)
                log.error(endpoint_err)
                if return_url is True:
                    return {"error": endpoint_err}, requesturl
                return {"error": endpoint_err}

    log.debug("Using AWS endpoint: %s", endpoint)
    method = "GET"

    aws_api_version = prov_dict.get(
        "aws_api_version",
        prov_dict.get("{}_api_version".format(product),
                      DEFAULT_AWS_API_VERSION),
    )

    # Fallback to ec2's id & key if none is found, for this component
    if not prov_dict.get("id", None):
        prov_dict["id"] = providers.get(provider, {}).get("ec2",
                                                          {}).get("id", {})
        prov_dict["key"] = providers.get(provider, {}).get("ec2",
                                                           {}).get("key", {})

    if sigver == "4":
        headers, requesturl = sig4(
            method,
            endpoint,
            params,
            prov_dict,
            aws_api_version,
            location,
            product,
            requesturl=requesturl,
        )
        params_with_headers = {}
    else:
        params_with_headers = sig2(method, endpoint, params, prov_dict,
                                   aws_api_version)
        headers = {}

    attempts = 0
    while attempts < AWS_MAX_RETRIES:
        log.debug("AWS Request: %s", requesturl)
        log.trace("AWS Request Parameters: %s", params_with_headers)
        try:
            result = requests.get(requesturl,
                                  headers=headers,
                                  params=params_with_headers)
            log.debug("AWS Response Status Code: %s", result.status_code)
            log.trace("AWS Response Text: %s", result.text)
            result.raise_for_status()
            break
        except requests.exceptions.HTTPError as exc:
            root = ET.fromstring(exc.response.content)
            data = xml.to_dict(root)

            # check to see if we should retry the query
            err_code = data.get("Errors", {}).get("Error", {}).get("Code", "")
            if attempts < AWS_MAX_RETRIES and err_code and err_code in AWS_RETRY_CODES:
                attempts += 1
                log.error(
                    "AWS Response Status Code and Error: [%s %s] %s; "
                    "Attempts remaining: %s",
                    exc.response.status_code,
                    exc,
                    data,
                    attempts,
                )
                sleep_exponential_backoff(attempts)
                continue

            log.error(
                "AWS Response Status Code and Error: [%s %s] %s",
                exc.response.status_code,
                exc,
                data,
            )
            if return_url is True:
                return {"error": data}, requesturl
            return {"error": data}
    else:
        log.error(
            "AWS Response Status Code and Error: [%s %s] %s",
            exc.response.status_code,
            exc,
            data,
        )
        if return_url is True:
            return {"error": data}, requesturl
        return {"error": data}

    root = ET.fromstring(result.text)
    items = root[1]
    if return_root is True:
        items = root

    if setname:
        for idx, item in enumerate(root):
            comps = item.tag.split("}")
            if comps[1] == setname:
                items = root[idx]

    ret = []
    for item in items:
        ret.append(xml.to_dict(item))

    if return_url is True:
        return ret, requesturl

    return ret