Beispiel #1
0
def load_headers(url, **kwargs):
    """
    load the HTTP headers
    """
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)

    literal_match = re.compile(r"\\(\X(\d+)?\w+)?", re.I)

    req, _, _, _ = get_page(url, agent=agent, proxy=proxy)
    if len(req.cookies) > 0:
        logger.info(set_color(
            "found a request cookie, saving to file...", level=25
        ))
        try:
            cookie_start = req.cookies.keys()
            cookie_value = req.cookies.values()
            write_to_log_file(
                "{}={}".format(''.join(cookie_start), ''.join(cookie_value)),
                COOKIE_LOG_PATH, COOKIE_FILENAME.format(replace_http(url))
            )
        except Exception:
            write_to_log_file(
                [c for c in req.cookies.itervalues()], COOKIE_LOG_PATH,
                COOKIE_FILENAME.format(replace_http(url))
            )
    retval = {}
    do_not_use = []
    http_headers = req.headers
    for header in http_headers:
        try:
            # check for Unicode in the string, this is just a safety net in case something is missed
            # chances are nothing will be matched
            if literal_match.search(header) is not None:
                retval[header] = unicodedata.normalize(
                    "NFKD", u"{}".format(http_headers[header])
                ).encode("ascii", errors="ignore")
            else:
                # test to see if there are any unicode errors in the string
                retval[header] = unicodedata.normalize(
                    "NFKD", u"{}".format(http_headers[header])
                ).encode("ascii", errors="ignore")
        # just to be safe, we're going to put all the possible Unicode errors into a tuple
        except (UnicodeEncodeError, UnicodeDecodeError, UnicodeError, UnicodeTranslateError, UnicodeWarning):
            # if there are any errors, we're going to append them to a `do_not_use` list
            do_not_use.append(header)
    # clear the dict so we can re-add to it
    retval.clear()
    for head in http_headers:
        # if the header is in the list, we skip it
        if head not in do_not_use:
            retval[head] = http_headers[head]
    # return a dict of safe unicodeless HTTP headers
    return retval
Beispiel #2
0
def run_port_scan(host):
    """ Pointer to run a Port Scan on a given host """
    if re.search(IP_ADDRESS_REGEX, host) is not None:
        LOGGER.info("Starting port scan on IP: {}".format(host))
        PortScanner(host).connect_to_host()
    elif re.search(URL_REGEX,
                   host) is not None and re.search(QUERY_REGEX, host) is None:
        try:
            LOGGER.info("Fetching resolve IP...")
            ip_address = socket.gethostbyname(replace_http(host))
            LOGGER.info("Done! IP: {}".format(ip_address))
            LOGGER.info("Starting scan on URL: {} IP: {}".format(
                host, ip_address))
            PortScanner(ip_address).connect_to_host()
        except socket.gaierror:
            error_message = "Unable to resolve IP address from {}.".format(
                host)
            error_message += " You can manually get the IP address and try again,"
            error_message += " dropping the query parameter in the URL (IE php?id=),"
            error_message += " or dropping the http or https"
            error_message += " and adding www in place of it. IE www.google.com"
            error_message += " may fix this issue."
            LOGGER.fatal(error_message)
    else:
        error_message = "You need to provide a host to scan,"
        error_message += " this can be given in the form of a URL "
        error_message += "or a IP address. Dropping the query (GET) "
        error_message += "of the URL may resolve this problem, or "
        error_message += "verify that the IP is real"
        LOGGER.fatal(error_message)
Beispiel #3
0
def check_for_robots(url, ext="/robots.txt", data_sep="-" * 30):
    """
    check if the URL has a robots.txt in it and collect `interesting` information
    out of the page
    """
    url = replace_http(url)
    interesting = set()
    full_url = "{}{}{}".format("http://", url, ext)
    conn = requests.get(full_url)
    data = conn.content
    code = conn.status_code
    if code == 404:
        return False
    for line in data.split("\n"):
        if "Allow" in line:
            interesting.add(line.strip())
    if len(interesting) > 0:
        create_tree(full_url, list(interesting))
    else:
        to_display = prompt(
            "nothing interesting found in robots.txt would you like to display the entire page",
            opts="yN")
        if to_display.lower().startswith("y"):
            print("{}\n{}\n{}".format(data_sep, data, data_sep))
    logger.info(set_color("robots.txt page will be saved into a file..."))
    write_to_log_file(data, ROBOTS_PAGE_PATH, "robots-{}.log".format(url))
Beispiel #4
0
def load_headers(url, **kwargs):
    """
    load the URL headers
    """
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)

    if proxy is not None:
        proxy = proxy_string_to_dict(proxy)
    if not xforward:
        header_value = {
            HTTP_HEADER.CONNECTION: "close",
            HTTP_HEADER.USER_AGENT: agent
        }
    else:
        ip_list = create_random_ip(), create_random_ip(), create_random_ip()
        header_value = {
            HTTP_HEADER.CONNECTION: "close",
            HTTP_HEADER.USER_AGENT: agent,
            HTTP_HEADER.X_FORWARDED_FOR: "{}, {}, {}".format(
                ip_list[0], ip_list[1], ip_list[2]
            )
        }
    req = requests.get(url, params=header_value, proxies=proxy, timeout=10)
    if len(req.cookies) > 0:
        logger.info(set_color(
            "found a request cookie, saving to file...", level=25
        ))
        try:
            cookie_start = req.cookies.keys()
            cookie_value = req.cookies.values()
            write_to_log_file(
                "{}={}".format(''.join(cookie_start), ''.join(cookie_value)),
                COOKIE_LOG_PATH, COOKIE_FILENAME.format(replace_http(url))
            )
        except Exception:
            write_to_log_file(
                [c for c in req.cookies.itervalues()], COOKIE_LOG_PATH,
                COOKIE_FILENAME.format(replace_http(url))
            )
    return req.headers
Beispiel #5
0
def main_header_check(url, **kwargs):
    """
    main function
    """
    verbose = kwargs.get("verbose", False)
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)
    identify = kwargs.get("identify", True)

    protection = {"hostname": url}
    definition = {
        "x-xss": ("protection against XSS attacks", "XSS"),
        "strict-transport": ("protection against unencrypted connections (force HTTPS connection)", "HTTPS"),
        "x-frame": ("protection against clickjacking vulnerabilities", "CLICKJACKING"),
        "x-content": ("protection against MIME type attacks", "MIME"),
        "x-csrf": ("protection against Cross-Site Forgery attacks", "CSRF"),
        "x-xsrf": ("protection against Cross-Site Forgery attacks", "CSRF"),
        "public-key": ("protection to reduce success rates of MITM attacks", "MITM"),
        "content-security": ("header protection against multiple attack types", "ALL")
    }

    try:
        if identify:
            logger.info(set_color(
                "checking if target URL is protected by some kind of WAF/IPS/IDS..."
            ))
            identified = detect_protection(url, proxy=proxy, agent=agent, verbose=verbose, xforward=xforward)

            if identified is None:
                logger.info(set_color(
                    "no WAF/IDS/IPS has been identified on target URL...", level=25
                ))
            else:
                logger.warning(set_color(
                    "the target URL WAF/IDS/IPS has been identified as '{}'...".format(identified), level=35
                ))

        if verbose:
            logger.debug(set_color(
                "loading XML data...", level=10
            ))
        comparable_headers = load_xml_data(HEADER_XML_DATA)
        logger.info(set_color(
            "attempting to get request headers for '{}'...".format(url.strip())
        ))
        try:
            found_headers = load_headers(url, proxy=proxy, agent=agent, xforward=xforward)
        except (ConnectionError, Exception) as e:
            if "Read timed out." or "Connection reset by peer" in str(e):
                found_headers = None
            else:
                logger.exception(set_color(
                    "Zeus has hit an unexpected error and cannot continue '{}'...".format(e), level=50
                ))
                request_issue_creation()

        if found_headers is not None:
            if verbose:
                logger.debug(set_color(
                    "fetched {}...".format(found_headers), level=10
                ))
            headers_established = [str(h) for h in compare_headers(found_headers, comparable_headers)]
            for key in definition.iterkeys():
                if any(key in h.lower() for h in headers_established):
                    logger.warning(set_color(
                        "provided target has {}...".format(definition[key][0]), level=30
                    ))
            for key in found_headers.iterkeys():
                protection[key] = found_headers[key]
            logger.info(set_color(
                "writing found headers to log file...", level=25
            ))
            return write_to_log_file(protection, HEADER_RESULT_PATH, HEADERS_FILENAME.format(replace_http(url)))
        else:
            logger.error(set_color(
                "unable to retrieve headers for site '{}'...".format(url.strip()), level=40
            ))
    except KeyboardInterrupt:
        if not pause():
            shutdown()
Beispiel #6
0
def main_header_check(url, **kwargs):
    """
    main function
    """
    verbose = kwargs.get("verbose", False)
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)
    identify_waf = kwargs.get("identify_waf", True)
    identify_plugins = kwargs.get("identify_plugins", True)
    show_description = kwargs.get("show_description", False)
    attempts = kwargs.get("attempts", 3)

    default_sleep_time = 5
    protection = {"hostname": url}
    definition = {
        "x-xss": ("protection against XSS attacks", "XSS"),
        "strict-transport": ("protection against unencrypted connections (force HTTPS connection)", "HTTPS"),
        "x-frame": ("protection against clickjacking vulnerabilities", "CLICKJACKING"),
        "x-content": ("protection against MIME type attacks", "MIME"),
        "x-csrf": ("protection against Cross-Site Forgery attacks", "CSRF"),
        "x-xsrf": ("protection against Cross-Site Forgery attacks", "CSRF"),
        "public-key": ("protection to reduce success rates of MITM attacks", "MITM"),
        "content-security": ("header protection against multiple attack types", "ALL")
    }

    try:
        req, status, html, headers = get_page(url, proxy=proxy, agent=agent, xforward=xforward)

        logger.info(set_color(
            "detecting target charset"
        ))
        charset = get_charset(html, headers)
        if charset is not None:
            logger.info(set_color(
                "target charset appears to be '{}'".format(charset), level=25
            ))
        else:
            logger.warning(set_color(
                "unable to detect target charset", level=30
            ))
        if identify_waf:
            waf_url = "{} {}".format(url.strip(), PROTECTION_CHECK_PAYLOAD)
            _, waf_status, waf_html, waf_headers = get_page(waf_url, xforward=xforward, proxy=proxy, agent=agent)
            logger.info(set_color(
                "checking if target URL is protected by some kind of WAF/IPS/IDS"
            ))
            if verbose:
                logger.debug(set_color(
                    "attempting connection to '{}'".format(waf_url), level=10
                ))

            identified_waf = detect_protection(url, waf_status, waf_html, waf_headers, verbose=verbose)

            if identified_waf is None:
                logger.info(set_color(
                    "no WAF/IDS/IPS has been identified on target URL", level=25
                ))
            else:
                logger.warning(set_color(
                    "the target URL WAF/IDS/IPS has been identified as '{}'".format(identified_waf), level=35
                ))

        if identify_plugins:
            logger.info(set_color(
                "attempting to identify plugins"
            ))
            identified_plugin = detect_plugins(html, headers, verbose=verbose)
            if identified_plugin is not None:
                for plugin in identified_plugin:
                    if show_description:
                        logger.info(set_color(
                            "possible plugin identified as '{}' (description: '{}')".format(
                                plugin[0], plugin[1]
                            ), level=25
                        ))
                    else:
                        logger.info(set_color(
                            "possible plugin identified as '{}'".format(
                                plugin[0]
                            ), level=25
                        ))
            else:
                logger.warning(set_color(
                    "no known plugins identified on target", level=30
                ))

        if verbose:
            logger.debug(set_color(
                "loading XML data", level=10
            ))
        comparable_headers = load_xml_data(HEADER_XML_DATA)
        logger.info(set_color(
            "attempting to get request headers for '{}'".format(url.strip())
        ))
        try:
            found_headers = load_headers(url, req)
        except (ConnectionError, Exception) as e:
            if "Read timed out." or "Connection reset by peer" in str(e):
                found_headers = None
            else:
                logger.exception(set_color(
                    "Zeus has hit an unexpected error and cannot continue '{}'".format(e), level=50
                ))
                request_issue_creation()

        if found_headers is not None:
            if verbose:
                logger.debug(set_color(
                    "fetched {}".format(found_headers), level=10
                ))
            headers_established = [str(h) for h in compare_headers(found_headers, comparable_headers)]
            for key in definition.iterkeys():
                if any(key in h.lower() for h in headers_established):
                    logger.warning(set_color(
                        "provided target has {}".format(definition[key][0]), level=30
                    ))
            for key in found_headers.iterkeys():
                protection[key] = found_headers[key]
            logger.info(set_color(
                "writing found headers to log file", level=25
            ))
            return write_to_log_file(protection, HEADER_RESULT_PATH, HEADERS_FILENAME.format(replace_http(url)))
        else:
            logger.error(set_color(
                "unable to retrieve headers for site '{}'".format(url.strip()), level=40
            ))
    except ConnectionError:
        attempts = attempts - 1
        if attempts == 0:
            return False
        logger.warning(set_color(
            "target actively refused the connection, sleeping for {}s and retrying the request".format(
                default_sleep_time
            ), level=30
        ))
        time.sleep(default_sleep_time)
        main_header_check(
            url, proxy=proxy, agent=agent, xforward=xforward, show_description=show_description,
            identify_plugins=identify_plugins, identify_waf=identify_waf, verbose=verbose,
            attempts=attempts
        )
    except ReadTimeout:
        logger.error(set_color(
            "meta-data retrieval failed due to target URL timing out, skipping", level=40
        ))
    except KeyboardInterrupt:
        if not pause():
            shutdown()
    except Exception as e:
        logger.exception(set_color(
            "meta-data retrieval failed with unexpected error '{}'".format(
                str(e)
            ), level=50
        ))
Beispiel #7
0
def detect_protection(url, **kwargs):
    verbose = kwargs.get("verbose", False)
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)

    if xforward:
        ip_list = (create_random_ip(), create_random_ip(), create_random_ip())
        headers = {
            HTTP_HEADER.CONNECTION:
            "close",
            HTTP_HEADER.USER_AGENT:
            agent,
            HTTP_HEADER.X_FORWARDED_FOR:
            "{}, {}, {}".format(ip_list[0], ip_list[1], ip_list[2])
        }
    else:
        headers = {
            HTTP_HEADER.CONNECTION: "close",
            HTTP_HEADER.USER_AGENT: agent
        }

    url = "{} {}".format(url.strip(), PROTECTION_CHECK_PAYLOAD)

    if verbose:
        logger.debug(
            set_color("attempting connection to '{}'...".format(url),
                      level=10))
    try:
        protection_check_req = requests.get(
            url,
            params=headers,
            proxies=proxy_string_to_dict(proxy),
            timeout=20)

        html, status, headers = protection_check_req.content, protection_check_req.status_code, protection_check_req.headers

        for dbms in DBMS_ERRORS:  # make sure there are no DBMS errors in the HTML
            for regex in DBMS_ERRORS[dbms]:
                if re.compile(regex).search(html) is not None:
                    logger.info(
                        set_color(
                            "it appears that the WAF/IDS/IPS check threw a DBMS error and may be vulnerable "
                            "to SQL injection attacks. it appears the backend DBMS is '{}'..."
                            .format(dbms),
                            level=25))
                    return None

        retval = []
        if status != 200 and "not found" not in html.lower():
            file_list = [
                f for f in os.listdir(DETECT_FIREWALL_PATH)
                if not any(ex in f for ex in ["__init__", ".pyc"])
            ]
            for item in file_list:
                item = item[:-3]
                detection_name = "lib.firewall.{}"
                detection_name = detection_name.format(item)
                detection_name = importlib.import_module(detection_name)
                if detection_name.detect(html, headers=headers, status=status):
                    retval.append(detection_name.__item__)
            if len(retval) > 1:
                if "Generic (Unknown)" in retval:
                    item = retval.index("Generic (Unknown)")
                    del retval[item]
            else:
                if retval[0] == "Generic (Unknown)":
                    logger.warning(
                        set_color(
                            "identified WAF/IDS/IPS is unknown to Zeus, if you know the firewall and the context "
                            "of the firewall, please create an issue ({}), fingerprint of the firewall will be "
                            "written to a log file...".format(ISSUE_LINK),
                            level=30))
                    full_finger_print = "HTTP/1.1 {}\n{}\n{}".format(
                        status, headers, html)
                    write_to_log_file(
                        full_finger_print, UNKNOWN_FIREWALL_FINGERPRINT_PATH,
                        UNKNOWN_FIREWALL_FILENAME.format(replace_http(url)))
        else:
            retval = None

        return ''.join(retval) if isinstance(retval, list) else retval

    except Exception as e:
        if "Read timed out." or "Connection reset by peer" in str(e):
            logger.warning(
                set_color(
                    "detection request timed out, assuming no protection and continuing...",
                    level=30))
            return None
        else:
            logger.exception(
                set_color(
                    "Zeus ran into an unexpected error '{}'...".format(e),
                    level=50))
            request_issue_creation()
            return None
Beispiel #8
0
def main_intel_amt(url, agent=None, proxy=None, **kwargs):
    """
    main attack method to be called
    """
    do_ip_address = kwargs.get("do_ip", False)
    proxy = proxy_string_to_dict(proxy) or None
    agent = agent or DEFAULT_USER_AGENT
    if do_ip_address:
        logger.warning(
            set_color(
                "running against IP addresses may result in the targets refusing the connection...",
                level=30))
        logger.info(
            set_color("will run against IP address instead of hostname..."))
        try:
            url = replace_http(url)
            url = socket.gethostbyname(url)
            logger.info(set_color("discovered IP address {}...".format(url)))
        except Exception as e:
            logger.error(
                set_color(
                    "failed to gather IP address from hostname '{}', received an error '{}'. "
                    "will just run against hostname...".format(url, e),
                    level=40))
            url = url
    logger.info(
        set_color(
            "attempting to connect to '{}' and get hardware info...".format(
                url)))
    try:
        json_data = __get_hardware(url, agent=agent, proxy=proxy)
        if json_data is None:
            logger.error(
                set_color("unable to get any information, skipping...",
                          level=40))
            pass
        else:
            print("-" * 40)
            for key in json_data.keys():
                print("{}:".format(str(key).capitalize()))
                for item in json_data[key]:
                    print(" - {}: {}".format(item.capitalize(),
                                             json_data[key][item]))
            print("-" * 40)
    except requests.exceptions.ConnectionError as e:
        if "Max retries exceeded with url" in str(e):
            logger.error(
                set_color(
                    "failed connection, target machine is actively refusing the connection, skipping...",
                    level=40))
            pass
        else:
            logger.error(
                set_color("failed connection with '{}', skipping...",
                          level=40))
            pass
    except Exception as e:
        if "Temporary failure in name resolution" in str(e):
            logger.error(
                set_color("failed to connect on '{}', skipping...".format(url),
                          level=40))
            pass
        else:
            logger.exception(
                set_color(
                    "ran into exception '{}', cannot continue...".format(e)))
            request_issue_creation()
Beispiel #9
0
    def __run_attacks(url, **kwargs):
        """
        run the attacks if any are requested
        """
        nmap = kwargs.get("nmap", False)
        sqlmap = kwargs.get("sqlmap", False)
        intel = kwargs.get("intel", False)
        xss = kwargs.get("xss", False)
        admin = kwargs.get("admin", False)
        verbose = kwargs.get("verbose", False)
        batch = kwargs.get("batch", False)
        auto_start = kwargs.get("auto_start", False)

        __enabled_attacks = {
            "sqlmap": opt.runSqliScan,
            "port": opt.runPortScan,
            "xss": opt.runXssScan,
            "admin": opt.adminPanelFinder,
            "intel": opt.intelCheck
        }

        enabled = set()
        for key in __enabled_attacks.keys():
            if __enabled_attacks[key] is True:
                enabled.add(key)
            if len(enabled) > 1:
                logger.error(set_color(
                    "it appears that you have enabled multiple attack types, "
                    "as of now only 1 attack is supported at a time, choose "
                    "your attack and try again. You can use the -f flag if "
                    "you do not want to complete an entire search again...", level=40
                ))
                shutdown()

        if not batch:
            question = prompt(
                "would you like to process found URL: '{}'".format(url), opts=["y", "N"]
            )
        else:
            question = "y"

        if question.lower().startswith("y"):
            if sqlmap:
                return sqlmap_scan.sqlmap_scan_main(
                    url.strip(), verbose=verbose,
                    opts=__create_arguments(sqlmap=True), auto_start=auto_start)
            elif nmap:
                url_ip_address = replace_http(url.strip())
                return nmap_scan.perform_port_scan(
                    url_ip_address, verbose=verbose,
                    opts=__create_arguments(nmap=True)
                )
            elif intel:
                url = get_true_url(url)
                return intel_me.main_intel_amt(
                    url, agent=agent_to_use,
                    proxy=proxy_to_use, do_ip=opt.runAgainstIpAddress
                )
            elif admin:
                main(
                    url, show=opt.showAllConnections,
                    verbose=verbose, do_threading=opt.threadPanels
                )
            elif xss:
                main_xss(
                    url, verbose=verbose, proxy=proxy_to_use,
                    agent=agent_to_use, tamper=opt.tamperXssPayloads
                )
            else:
                pass
        else:
            logger.warning(set_color(
                "skipping '{}'...".format(url), level=30
            ))
Beispiel #10
0
def check_for_admin_page(url, exts, protocol="http://", **kwargs):
    verbose = kwargs.get("verbose", False)
    show_possibles = kwargs.get("show_possibles", False)
    possible_connections, connections = set(), set()
    stripped_url = replace_http(str(url).strip())
    for ext in exts:
        ext = ext.strip()
        true_url = "{}{}{}".format(protocol, stripped_url, ext)
        if verbose:
            logger.debug(set_color("trying '{}'...".format(true_url),
                                   level=10))
        try:
            urlopen(true_url, timeout=5)
            logger.info(
                set_color(
                    "connected successfully to '{}'...".format(true_url)))
            connections.add(true_url)
        except HTTPError as e:
            data = str(e).split(" ")
            if verbose:
                if "Access Denied" in str(e):
                    logger.warning(
                        set_color(
                            "got access denied, possible control panel found without external access on '{}'..."
                            .format(true_url),
                            level=30))
                    possible_connections.add(true_url)
                else:
                    logger.error(
                        set_color(
                            "failed to connect got error code {}...".format(
                                data[2]),
                            level=40))
        except Exception as e:
            if verbose:
                if "<urlopen error timed out>" or "timeout: timed out" in str(
                        e):
                    logger.warning(
                        set_color(
                            "connection timed out after five seconds "
                            "assuming won't connect and skipping...",
                            level=30))
                else:
                    logger.exception(
                        set_color(
                            "failed to connect with unexpected error '{}'...".
                            format(str(e)),
                            level=50))
                    request_issue_creation()
    possible_connections, connections = list(possible_connections), list(
        connections)
    data_msg = "found {} possible connections(s) and {} successful connection(s)..."
    logger.info(
        set_color(data_msg.format(len(possible_connections),
                                  len(connections))))
    if len(connections) != 0:
        logger.info(set_color("creating connection tree..."))
        create_tree(url, connections)
    else:
        logger.fatal(
            set_color(
                "did not receive any successful connections to the admin page of "
                "{}...".format(url),
                level=50))
    if show_possibles:
        if len(possible_connections) != 0:
            logger.info(set_color("creating possible connection tree..."))
            create_tree(url, possible_connections)
        else:
            logger.fatal(
                set_color(
                    "did not find any possible connections to {}'s "
                    "admin page",
                    level=50))
Beispiel #11
0
def load_headers(url, **kwargs):
    """
    load the HTTP headers
    """
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)

    # literal_match = re.compile(r"\\(\X(\d+)?\w+)?", re.I)

    if proxy is not None:
        proxy = proxy_string_to_dict(proxy)
    if not xforward:
        header_value = {
            HTTP_HEADER.CONNECTION: "close",
            HTTP_HEADER.USER_AGENT: agent
        }
    else:
        ip_list = create_random_ip(), create_random_ip(), create_random_ip()
        header_value = {
            HTTP_HEADER.CONNECTION:
            "close",
            HTTP_HEADER.USER_AGENT:
            agent,
            HTTP_HEADER.X_FORWARDED_FOR:
            "{}, {}, {}".format(ip_list[0], ip_list[1], ip_list[2])
        }
    req = requests.get(url, params=header_value, proxies=proxy, timeout=10)
    if len(req.cookies) > 0:
        logger.info(
            set_color("found a request cookie, saving to file...", level=25))
        try:
            cookie_start = req.cookies.keys()
            cookie_value = req.cookies.values()
            write_to_log_file(
                "{}={}".format(''.join(cookie_start), ''.join(cookie_value)),
                COOKIE_LOG_PATH, COOKIE_FILENAME.format(replace_http(url)))
        except Exception:
            write_to_log_file([c for c in req.cookies.itervalues()],
                              COOKIE_LOG_PATH,
                              COOKIE_FILENAME.format(replace_http(url)))
    retval = {}
    do_not_use = []
    http_headers = req.headers
    for header in http_headers:
        try:
            # test to see if there are any unicode errors in the string
            retval[header] = unicodedata.normalize(
                "NFKD",
                u"{}".format(http_headers[header])).encode("ascii",
                                                           errors="ignore")
        # just to be safe, we're going to put all the possible Unicode errors into a tuple
        except (UnicodeEncodeError, UnicodeDecodeError, UnicodeError,
                UnicodeTranslateError, UnicodeWarning):
            # if there are any errors, we're going to append them to a `do_not_use` list
            do_not_use.append(header)
    # clear the dict so we can re-add to it
    retval.clear()
    for head in http_headers:
        # if the header is in the list, we skip it
        if head not in do_not_use:
            retval[head] = http_headers[head]
    # return a dict of safe unicodeless HTTP headers
    return retval