def test_reloading(self, steps, proxy, user, runs, host, pass_rate):

        direct_stats = []
        with steps.start("Reloading: colecting stats with proxy off"):
            with Chrome(grid_server=user) as chrome:
                chrome.get(host)
                for _ in range(runs):
                    chrome.refresh()
                stats = chrome.get_stats()
            data = BrowserResponseAnalyzer(stats)
            direct_stats.append((data.get_requests_statistics(),
                                 data.get_response_statistics()))

        proxyied_stats = []
        with steps.start("Reloading: colecting stats with proxy on"):
            with Chrome(grid_server=user, proxy_server=proxy) as chrome:
                chrome.get(host)
                for _ in range(runs):
                    chrome.refresh()
                stats = chrome.get_stats()
            data = BrowserResponseAnalyzer(stats)
            proxyied_stats.append((data.get_requests_statistics(),
                                   data.get_response_statistics()))

        with steps.start("Anylizing results"):

            rates_req = [
                tup1[0] / tup2[0]
                for tup1, tup2 in zip(proxyied_stats, direct_stats)
                if tup2[0] != 0
            ]
            rates_rsp = [
                tup1[1] / tup2[1]
                for tup1, tup2 in zip(proxyied_stats, direct_stats)
                if tup2[1] != 0
            ]
            pass_condition = all(rate >= pass_rate
                                 for rate in rates_req) and all(
                                     rate >= pass_rate for rate in rates_rsp)

            console_log = log_table_resources(
                hosts=(host, ),
                runs=runs,
                direct_stats=direct_stats,
                proxyied_stats=proxyied_stats,
                request_avg_success=rates_req,
                response_avg_success=rates_rsp,
            )

            _log.info(console_log)
            if not pass_condition:
                self.failed("Too many resources were lost during reloading!")
Example #2
0
    def test_cache_cleaning(self, proxy, user, host, cleanings):

        for i in range(1, cleanings + 1):
            with Chrome(
                grid_server=user, proxy_server=proxy, session_wide_proxy=False
            ) as chrome:
                chrome.get(host)
            if not self.proxy_connection.is_alive():
                self.failed(f"Proxy server shuted down after session `{i}`")
Example #3
0
    def test_socks_handshake(self, user, proxy, host):

        with Chrome(grid_server=user, proxy_server=proxy,
                    traffic_dump=True) as chrome:
            chrome.get(host)

        pcap_file = f"{user.name}_tshark.pcap"
        pcap_file = os.path.join(_temp_files_dir, pcap_file)
        pcap_obj = TsharkPcap(pcap_file)

        if pcap_obj.find_packets_in_stream(packet_type="socks")[0] is False:
            self.failed("Socks 5 handshake sequence not found")
Example #4
0
    def con_error_test(self, user, proxy, host):

        with Chrome(grid_server=user, proxy_server=proxy) as chrome:
            chrome.get(host)
            stats = chrome.get_stats()

        data = BrowserResponseAnalyzer(stats)
        errors = data.get_browser_errors()
        pass_condition = len(errors) == 1 and "ERR_CONNECTION_REFUSED" in errors[0]
        if not pass_condition:
            _log.info(f"Web Brower logs:\n{pformat(stats)}")
            self.failed("Invalod response, no `ERR_CONNECTION_REFUSED` occured!")
Example #5
0
    def no_error_test(self, user, proxy, host):

        with Chrome(grid_server=user, proxy_server=proxy) as chrome:
            chrome.get(host)
            stats = chrome.get_stats()

        data = BrowserResponseAnalyzer(stats)
        status_code = data.get_status_code()
        if status_code != 200:
            _log.info(f"Web Brower logs:\n{pformat(stats)}")
            self.failed(
                f"Invalid response, expected status code 200, got {status_code}!"
            )
    def loading_time_test(self, steps, proxy, user, host, delay_rate, runs,
                          fails):

        time_proxy_off = []
        with steps.start("Collecting statistics with proxy off"):
            for _ in range(runs):
                with Chrome(grid_server=user) as chrome:
                    chrome.get(host)
                    time = chrome._get_page_loading_time()
                time_proxy_off.append(time)

        time_proxy_on = []
        with steps.start("Collecting statistics with proxy on"):
            for _ in range(runs):
                with Chrome(grid_server=user, proxy_server=proxy) as chrome:
                    chrome.get(host)
                    time = chrome._get_page_loading_time()
                time_proxy_on.append(time)

        with steps.start("Comparing results"):
            rates = [y / x for x, y in zip(time_proxy_off, time_proxy_on)]
            overtime_entries = [i for i in rates if i > delay_rate]

            console_log = log_table_time(
                host=host,
                runs=runs,
                proxyied_times=time_proxy_on,
                direct_times=time_proxy_off,
                avg_success=rates,
            )

            _log.info(console_log)
            if len(overtime_entries) > fails:
                self.failed(
                    f"{len(overtime_entries)} out of {runs} times page loading"
                    " time with proxy on exceeded normal loading time for more than"
                    f" {delay_rate} times",
                    goto=["next_tc"],
                )
    def brocken_certs_test(self, user, proxy, host):

        with Chrome(grid_server=user, proxy_server=proxy) as chrome:
            chrome.get(host)
            stats = chrome.get_stats()

        data = BrowserResponseAnalyzer(stats)
        errors = data.get_browser_errors()
        pass_condition = len(
            errors) >= 1 and "ERR_CERT_AUTHORITY_INVALID" in errors[0]
        if not pass_condition:
            _log.info(f"Web Brower logs:\n{pformat(stats)}")
            self.failed(
                "Invalod response, no `ERR_CERT_AUTHORITY_INVALID` occured!")
Example #8
0
    def get_remote_ip(self, user, proxy, host):
        proxy_net_ifs = proxy.interfaces.names.pop()
        proxy_ip = proxy.interfaces[proxy_net_ifs].ipv4.ip.compressed

        with Chrome(grid_server=user, proxy_server=proxy) as chrome:
            chrome.get(host)
            stats = chrome.get_stats()

        data = BrowserResponseAnalyzer(stats)
        remote_ip = data.get_remote_ip_port()[0]
        if remote_ip != proxy_ip:
            _log.info(f"Web Brower logs:\n{pformat(stats)}")
            self.failed(
                f"Invalid remote address, expected {proxy_ip}, got {remote_ip}!"
            )
    def tls_1_2_handshake_test(self, user, proxy, host):

        with Chrome(
                grid_server=user,
                proxy_server=proxy,
                chrome_arguments=[
                    "--ssl-version-max=tls1.2",
                ],
                traffic_dump=True,
        ) as chrome:
            chrome.get(host)

        pcap_file = f"{user.name}_tshark.pcap"
        pcap_file = os.path.join(_temp_files_dir, pcap_file)
        pcap_obj = TsharkPcap(pcap_file)

        if pcap_obj.find_packets_in_stream(packet_type="tls1.2")[0] is False:
            self.failed("TLS handshake sequence not found")
    def obsolete_tls_test(self, user, proxy, host):

        with Chrome(grid_server=user, proxy_server=proxy) as chrome:
            chrome.get(host)
            stats = chrome.get_stats()

        data = BrowserResponseAnalyzer(stats)
        errors = data.get_browser_errors()
        expected_message = (
            f"The connection used to load resources from {host}"
            " used TLS 1.0 or TLS 1.1, which are deprecated and will be disabled"
            " in the future.")
        pass_condition = False
        if errors:
            for error in errors:
                if expected_message in error:
                    pass_condition = True
        if not pass_condition:
            _log.info(f"Web Brower logs:\n{pformat(stats)}")
            self.failed(
                "Invalod response, no `ERR_SSL_OBSOLETE_VERSION` occured!")
Example #11
0
    def count_page_resources(self, steps, proxy, user, host, runs, pass_rate):

        direct = []
        with steps.start("Loading page: colecting stats with proxy off"):
            for _ in range(runs):
                with Chrome(grid_server=user) as chrome:
                    chrome.get(host)
                    stats = chrome.get_stats()

                data = BrowserResponseAnalyzer(stats)
                direct.append(
                    [
                        (
                            data.get_requests_statistics(),
                            data.get_response_statistics(),
                        ),
                    ]
                )

        proxyied = []
        with steps.start("Loading page: colecting stats with proxy on"):
            for _ in range(runs):
                with Chrome(grid_server=user, proxy_server=proxy) as chrome:
                    chrome.get(host)
                    stats = chrome.get_stats()

                data = BrowserResponseAnalyzer(stats)
                proxyied.append(
                    [
                        (
                            data.get_requests_statistics(),
                            data.get_response_statistics(),
                        ),
                    ]
                )

        with steps.start("Anylizing results"):

            direct_stats = []
            for entry in zip(*direct):
                mean_req = statistics.mean(stat[0] for stat in entry)
                mean_rsp = statistics.mean(stat[1] for stat in entry)
                direct_stats.append((mean_req, mean_rsp))

            proxyied_stats = []
            for entry in zip(*proxyied):
                mean_req = statistics.mean(stat[0] for stat in entry)
                mean_rsp = statistics.mean(stat[1] for stat in entry)
                proxyied_stats.append((mean_req, mean_rsp))

            rates_req = [
                tup1[0] / tup2[0]
                for tup1, tup2 in zip(proxyied_stats, direct_stats)
                if tup2[0] != 0
            ]
            rates_rsp = [
                tup1[1] / tup2[1]
                for tup1, tup2 in zip(proxyied_stats, direct_stats)
                if tup2[1] != 0
            ]
            pass_condition = all(rate >= pass_rate for rate in rates_req) and all(
                rate >= pass_rate for rate in rates_rsp
            )

            console_log = log_table_resources(
                hosts=(host,),
                runs=runs,
                direct_stats=direct_stats,
                proxyied_stats=proxyied_stats,
                request_avg_success=rates_req,
                response_avg_success=rates_rsp,
            )

            _log.info(console_log)
            if not pass_condition:
                self.failed("To many resources were lost", goto=["next_tc"])