def test_server_exit_before_timeout(self) -> None: with self.assertRaises(utils.TestServerProcessError): utils.TestServerProcess(logger, server="non_existing_server.py") # Test starting a server which immediately exits." with self.assertRaises(utils.TestServerProcessError): utils.TestServerProcess(logger, server="fast_server_exit.py")
def test_simple_https_server_startup(self): # Test normal case good_cert_path = os.path.join('ssl_certs', 'ssl_cert.crt') self.server_process_handler = utils.TestServerProcess( log=logger, server='simple_https_server.py', extra_cmd_args=[good_cert_path]) # Make sure we can connect to the server self.assertTrue(self.can_connect()) self.server_process_handler.clean() # Test when no cert file is provided self.server_process_handler = utils.TestServerProcess( log=logger, server='simple_https_server.py') # Make sure we can connect to the server self.assertTrue(self.can_connect()) self.server_process_handler.clean() # Test with a non existing cert file. non_existing_cert_path = os.path.join('ssl_certs', 'non_existing.crt') self.server_process_handler = utils.TestServerProcess( log=logger, server='simple_https_server.py', extra_cmd_args=[non_existing_cert_path]) # Make sure we can connect to the server self.assertTrue(self.can_connect())
def test_proxy_server_startup(self): # Test normal case self.server_process_handler = utils.TestServerProcess( log=logger, server='proxy_server.py') # Make sure we can connect to the server. self.assertTrue(self.can_connect()) self.server_process_handler.clean() # Test start proxy_server using certificate files. good_cert_fpath = os.path.join('ssl_certs', 'ssl_cert.crt') self.server_process_handler = utils.TestServerProcess( log=logger, server='proxy_server.py', extra_cmd_args=['intercept', good_cert_fpath]) # Make sure we can connect to the server. self.assertTrue(self.can_connect()) self.server_process_handler.clean() # Test with a non existing cert file. non_existing_cert_path = os.path.join('ssl_certs', 'non_existing.crt') self.server_process_handler = utils.TestServerProcess( log=logger, server='proxy_server.py', extra_cmd_args=[non_existing_cert_path]) # Make sure we can connect to the server. self.assertTrue(self.can_connect())
def test_slow_retrieval_server_startup(self): # Test normal case self.server_process_handler = utils.TestServerProcess( log=logger, server='slow_retrieval_server.py') # Make sure we can connect to the server self.assertTrue(self.can_connect())
def setUp(self): """ Create a temporary file and launch a simple server in the current working directory. """ unittest_toolbox.Modified_TestCase.setUp(self) # Making a temporary file. current_dir = os.getcwd() target_filepath = self.make_temp_data_file(directory=current_dir) self.target_fileobj = open(target_filepath, 'r') self.file_contents = self.target_fileobj.read() self.file_length = len(self.file_contents) # Launch a SimpleHTTPServer (serves files in the current dir). self.server_process_handler = utils.TestServerProcess(log=logger) rel_target_filepath = os.path.basename(target_filepath) self.url = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ + str(self.server_process_handler.port) + '/' + rel_target_filepath # Create a temporary file where the target file chunks are written # during fetching self.temp_file = tempfile.TemporaryFile() self.fetcher = tuf.requests_fetcher.RequestsFetcher()
def test_simple_server_startup(self) -> None: # Test normal case server_process_handler = utils.TestServerProcess(log=logger) # Make sure we can connect to the server self.assertTrue(can_connect(server_process_handler.port)) server_process_handler.clean()
def setUp(self): """ Create a temporary file and launch a simple server in the current working directory. """ unittest_toolbox.Modified_TestCase.setUp(self) # Making a temporary file. current_dir = os.getcwd() target_filepath = self.make_temp_data_file(directory=current_dir) self.target_fileobj = open(target_filepath, 'r') self.target_data = self.target_fileobj.read() self.target_data_length = len(self.target_data) # Launch a SimpleHTTPServer (serves files in the current dir). self.server_process_handler = utils.TestServerProcess(log=logger) rel_target_filepath = os.path.basename(target_filepath) self.url = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ + str(self.server_process_handler.port) + '/' + rel_target_filepath # Computing hash of target file data. m = hashlib.md5() m.update(self.target_data.encode('utf-8')) digest = m.hexdigest() self.target_hash = {'md5': digest} # Initialize the default fetcher for the download self.fetcher = tuf.requests_fetcher.RequestsFetcher()
def setUpClass(cls) -> None: cls.tmp_test_root_dir = tempfile.mkdtemp(dir=os.getcwd()) # Launch a SimpleHTTPServer # Test cases will request metadata and target files that have been # pre-generated in 'tuf/tests/repository_data', and are copied to # CWD/tmp_test_root_dir/* cls.server_process_handler = utils.TestServerProcess(log=logger)
def test_cleanup(self): # Test normal case self.server_process_handler = utils.TestServerProcess( log=logger, server='simple_server.py') self.server_process_handler.clean() # Check if the process has successfully been killed. self.assertFalse(self.server_process_handler.is_process_running())
def test_read_timeout(self): # Reduce the read socket timeout to speed up the test # while keeping the connect timeout default_socket_timeout = self.fetcher.socket_timeout self.fetcher.socket_timeout = (default_socket_timeout, 0.1) # Launch a new "slow retrieval" server sending one byte each 40s slow_server_process_handler = utils.TestServerProcess( log=logger, server='slow_retrieval_server.py') self.url = f"http://{utils.TEST_HOST_ADDRESS}:{str(slow_server_process_handler.port)}/{self.rel_target_filepath}" with self.assertRaises(exceptions.SlowRetrievalError): next(self.fetcher.fetch(self.url)) slow_server_process_handler.clean()
def setUpClass(cls): # Create a temporary directory to store the repository, metadata, and target # files. 'temporary_directory' must be deleted in TearDownModule() so that # temporary files are always removed, even when exceptions occur. cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) # Launch a SimpleHTTPServer (serves files in the current directory). # Test cases will request metadata and target files that have been # pre-generated in 'tuf/tests/repository_data', which will be served by the # SimpleHTTPServer launched here. The test cases of this unit test assume # the pre-generated metadata files have a specific structure, such # as a delegated role 'targets/role1', three target files, five key files, # etc. cls.server_process_handler = utils.TestServerProcess(log=logger)
def setUpClass(cls) -> None: """ Create a temporary file and launch a simple server in the current working directory. """ cls.server_process_handler = utils.TestServerProcess(log=logger) cls.file_contents = b"junk data" cls.file_length = len(cls.file_contents) with tempfile.NamedTemporaryFile(dir=os.getcwd(), delete=False) as cls.target_file: cls.target_file.write(cls.file_contents) cls.url_prefix = (f"http://{utils.TEST_HOST_ADDRESS}:" f"{str(cls.server_process_handler.port)}") target_filename = os.path.basename(cls.target_file.name) cls.url = f"{cls.url_prefix}/{target_filename}"
def setUpClass(cls): # Create a temporary directory to store the repository, metadata, and target # files. 'temporary_directory' must be deleted in TearDownModule() so that # temporary files are always removed, even when exceptions occur. cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) # Needed because in some tests simple_server.py cannot be found. # The reason is that the current working directory # has been changed when executing a subprocess. cls.SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), "simple_server.py") # Launch a SimpleHTTPServer (serves files in the current directory). # Test cases will request metadata and target files that have been # pre-generated in 'tuf/tests/repository_data', which will be served # by the SimpleHTTPServer launched here. cls.server_process_handler = utils.TestServerProcess( log=logger, server=cls.SIMPLE_SERVER_PATH)
def setUpClass(cls): """ Setup performed before the first test function (TestWithProxies class method) runs. Launch HTTP, HTTPS, and proxy servers in the current working directory. We'll set up four servers: - HTTP server (simple_server.py) - HTTPS server (simple_https_server.py) - HTTP proxy server (proxy_server.py) (that supports HTTP CONNECT to funnel HTTPS connections) - HTTPS proxy server (proxy_server.py) (trusted by the client to intercept and resign connections) """ unittest_toolbox.Modified_TestCase.setUpClass() if not six.PY2: raise NotImplementedError( "TestWithProxies only works with Python 2" " (proxy_server.py is Python2 only)") # Launch a simple HTTP server (serves files in the current dir). cls.http_server_handler = utils.TestServerProcess(log=logger) # Launch an HTTPS server (serves files in the current dir). cls.https_server_handler = utils.TestServerProcess( log=logger, server='simple_https_server.py') # Launch an HTTP proxy server derived from inaz2/proxy2. # This one is able to handle HTTP CONNECT requests, and so can pass HTTPS # requests on to the target server. cls.http_proxy_handler = utils.TestServerProcess( log=logger, server='proxy_server.py') # Note that the HTTP proxy server's address uses http://, regardless of the # type of connection used with the target server. cls.http_proxy_addr = 'http://127.0.0.1:' + str( cls.http_proxy_handler.port) # Launch an HTTPS proxy server, also derived from inaz2/proxy2. # (An HTTPS proxy performs its own TLS connection with the client and must # be trusted by it, and is capable of tampering.) # We instruct the proxy server to expect certain certificates from the # target server. # 1st arg: port # 2nd arg: whether to intercept (HTTPS proxy) or relay (TCP tunnel using # HTTP CONNECT verb, to facilitate an HTTPS connection between the client # and server which the proxy cannot inspect) # 3rd arg: (optional) certificate file for telling the proxy what target # server certs to accept in its HTTPS connection to the target server. # This is only relevant if the proxy is in intercept mode. good_cert_fpath = os.path.join('ssl_certs', 'ssl_cert.crt') cls.https_proxy_handler = utils.TestServerProcess( log=logger, server='proxy_server.py', extra_cmd_args=['intercept', good_cert_fpath]) # Note that the HTTPS proxy server's address uses https://, regardless of # the type of connection used with the target server. cls.https_proxy_addr = 'https://localhost:' + str( cls.https_proxy_handler.port)
def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) self.repository_name = 'test_repository1' # Create a temporary directory to store the repository, metadata, and target # files. 'temporary_directory' must be deleted in TearDownModule() so that # temporary files are always removed, even when exceptions occur. self.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. # The 'repository_data' directory is expected to exist in 'tuf/tests/'. original_repository_files = os.path.join(os.getcwd(), 'repository_data') temporary_repository_root = \ self.make_temp_directory(directory=self.temporary_directory) # The original repository, keystore, and client directories will be copied # for each test case. original_repository = os.path.join(original_repository_files, 'repository') original_client = os.path.join(original_repository_files, 'client') original_keystore = os.path.join(original_repository_files, 'keystore') # Save references to the often-needed client repository directories. # Test cases need these references to access metadata and target files. self.repository_directory = \ os.path.join(temporary_repository_root, 'repository') self.client_directory = os.path.join(temporary_repository_root, 'client') self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') # Copy the original 'repository', 'client', and 'keystore' directories # to the temporary repository the test cases can use. shutil.copytree(original_repository, self.repository_directory) shutil.copytree(original_client, self.client_directory) shutil.copytree(original_keystore, self.keystore_directory) # Produce a longer target file than exists in the other test repository # data, to provide for a long-duration slow attack. Then we'll write new # top-level metadata that includes a hash over that file, and provide that # metadata to the client as well. # The slow retrieval server, in mode 2 (1 byte per second), will only # sleep for a total of (target file size) seconds. Add a target file # that contains sufficient number of bytes to trigger a slow retrieval # error. A transfer should not be permitted to take 1 second per byte # transferred. Because this test is currently expected to fail, I'm # limiting the size to 10 bytes (10 seconds) to avoid expected testing # delays.... Consider increasing again after fix, to, e.g. 400. total_bytes = 10 repository = repo_tool.load_repository(self.repository_directory) file1_filepath = os.path.join(self.repository_directory, 'targets', 'file1.txt') with open(file1_filepath, 'wb') as file_object: data = 'a' * int(round(total_bytes)) file_object.write(data.encode('utf-8')) key_file = os.path.join(self.keystore_directory, 'timestamp_key') timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, 'password') key_file = os.path.join(self.keystore_directory, 'snapshot_key') snapshot_private = repo_tool.import_ed25519_privatekey_from_file(key_file, 'password') key_file = os.path.join(self.keystore_directory, 'targets_key') targets_private = repo_tool.import_ed25519_privatekey_from_file(key_file, 'password') repository.targets.load_signing_key(targets_private) repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), os.path.join(self.repository_directory, 'metadata')) # Since we've changed the repository metadata in this setup (by lengthening # a target file and then writing new metadata), we also have to update the # client metadata to get to the expected initial state, where the client # knows the right target info (and so expects the right, longer target # length. # We'll skip using updater.refresh since we don't have a server running, # and we'll update the metadata locally, manually. shutil.rmtree(os.path.join( self.client_directory, self.repository_name, 'metadata', 'current')) shutil.copytree(os.path.join(self.repository_directory, 'metadata'), os.path.join(self.client_directory, self.repository_name, 'metadata', 'current')) # Set the url prefix required by the 'tuf/client/updater.py' updater. # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. repository_basepath = self.repository_directory[len(os.getcwd()):] self.server_process_handler = utils.TestServerProcess(log=logger, server='slow_retrieval_server.py') logger.info('Slow Retrieval Server process started.') url_prefix = 'http://localhost:' \ + str(self.server_process_handler.port) + repository_basepath # Setting 'tuf.settings.repository_directory' with the temporary client # directory copied from the original repository files. tuf.settings.repositories_directory = self.client_directory self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, 'metadata_path': 'metadata', 'targets_path': 'targets'}} # Create the repository instance. The test cases will use this client # updater to refresh metadata, fetch target files, etc. self.repository_updater = updater.Updater(self.repository_name, self.repository_mirrors)
def test_simple_server_startup(self): # Test normal case self.server_process_handler = utils.TestServerProcess(log=logger) # Make sure we can connect to the server self.assertTrue(self.can_connect())
def setUp(self): # Modified_Testcase can handle temp dir removal unittest_toolbox.Modified_TestCase.setUp(self) self.temporary_directory = self.make_temp_directory( directory=os.getcwd()) # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. # The 'repository_data' directory is expected to exist in 'tuf/tests/'. original_repository_files = os.path.join(os.getcwd(), 'repository_data') self.temporary_repository_root = tempfile.mkdtemp( dir=self.temporary_directory) # The original repository, keystore, and client directories will be copied # for each test case. original_repository = os.path.join(original_repository_files, 'repository') original_client = os.path.join(original_repository_files, 'client', 'test_repository1') original_keystore = os.path.join(original_repository_files, 'keystore') original_map_file = os.path.join(original_repository_files, 'map.json') # Save references to the often-needed client repository directories. # Test cases need these references to access metadata and target files. self.repository_directory = os.path.join( self.temporary_repository_root, 'repository_server1') self.repository_directory2 = os.path.join( self.temporary_repository_root, 'repository_server2') # Setting 'tuf.settings.repositories_directory' with the temporary client # directory copied from the original repository files. tuf.settings.repositories_directory = self.temporary_repository_root self.repository_name = 'test_repository1' self.repository_name2 = 'test_repository2' self.client_directory = os.path.join(self.temporary_repository_root, self.repository_name) self.client_directory2 = os.path.join(self.temporary_repository_root, self.repository_name2) self.keystore_directory = os.path.join(self.temporary_repository_root, 'keystore') self.map_file = os.path.join(self.client_directory, 'map.json') self.map_file2 = os.path.join(self.client_directory2, 'map.json') # Copy the original 'repository', 'client', and 'keystore' directories # to the temporary repository the test cases can use. shutil.copytree(original_repository, self.repository_directory) shutil.copytree(original_repository, self.repository_directory2) shutil.copytree(original_client, self.client_directory) shutil.copytree(original_client, self.client_directory2) shutil.copyfile(original_map_file, self.map_file) shutil.copyfile(original_map_file, self.map_file2) shutil.copytree(original_keystore, self.keystore_directory) # Launch a SimpleHTTPServer (serves files in the current directory). # Test cases will request metadata and target files that have been # pre-generated in 'tuf/tests/repository_data', which will be served by the # SimpleHTTPServer launched here. The test cases of this unit test assume # the pre-generated metadata files have a specific structure, such # as a delegated role 'targets/role1', three target files, five key files, # etc. # Needed because in some tests simple_server.py cannot be found. # The reason is that the current working directory # has been changed when executing a subprocess. SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), 'simple_server.py') # Creates a subprocess running a server. self.server_process_handler = utils.TestServerProcess( log=logger, server=SIMPLE_SERVER_PATH, popen_cwd=self.repository_directory) logger.debug('Server process started.') # Creates a subprocess running a server. self.server_process_handler2 = utils.TestServerProcess( log=logger, server=SIMPLE_SERVER_PATH, popen_cwd=self.repository_directory2) logger.debug('Server process 2 started.') url_prefix = \ 'http://' + utils.TEST_HOST_ADDRESS + ':' + \ str(self.server_process_handler.port) url_prefix2 = \ 'http://' + utils.TEST_HOST_ADDRESS + ':' + \ str(self.server_process_handler2.port) self.repository_mirrors = { 'mirror1': { 'url_prefix': url_prefix, 'metadata_path': 'metadata', 'targets_path': 'targets' } } self.repository_mirrors2 = { 'mirror1': { 'url_prefix': url_prefix2, 'metadata_path': 'metadata', 'targets_path': 'targets' } } # Create the repository instances. The test cases will use these client # updaters to refresh metadata, fetch target files, etc. self.repository_updater = updater.Updater(self.repository_name, self.repository_mirrors) self.repository_updater2 = updater.Updater(self.repository_name2, self.repository_mirrors2)
def test_https_connection(self): """ Try various HTTPS downloads using trusted and untrusted certificates with and without the correct hostname listed in the SSL certificate. """ # Make a temporary file to be served to the client. current_directory = os.getcwd() target_filepath = self.make_temp_data_file(directory=current_directory) with open(target_filepath, 'r') as target_file_object: target_data_length = len(target_file_object.read()) # These cert files provide various test cases: # good: A valid cert from an older generation of test_download.py tests. # good2: A valid cert made simultaneous to the bad certs below, with the # same settings otherwise, tested here in case the difference # between the way the new bad certs and the old good cert were # generated turns out to matter at some point. # bad: An otherwise-valid cert with the wrong hostname. The good certs # list "localhost", but this lists "notmyhostname". # expired: An otherwise-valid cert but which is expired (no valid dates # exist, fwiw: startdate > enddate). good_cert_fname = os.path.join('ssl_certs', 'ssl_cert.crt') good2_cert_fname = os.path.join('ssl_certs', 'ssl_cert_2.crt') bad_cert_fname = os.path.join('ssl_certs', 'ssl_cert_wronghost.crt') expired_cert_fname = os.path.join('ssl_certs', 'ssl_cert_expired.crt') # Launch four HTTPS servers (serve files in the current dir). # 1: we expect to operate correctly # 2: also good; uses a slightly different cert (controls for the cert # generation method used for the next two, in case it comes to matter) # 3: run with an HTTPS certificate with an unexpected hostname # 4: run with an HTTPS certificate that is expired # Be sure to offset from the port used in setUp to avoid collision. good_https_server_handler = utils.TestServerProcess( log=logger, server='simple_https_server.py', extra_cmd_args=[good_cert_fname]) good2_https_server_handler = utils.TestServerProcess( log=logger, server='simple_https_server.py', extra_cmd_args=[good2_cert_fname]) bad_https_server_handler = utils.TestServerProcess( log=logger, server='simple_https_server.py', extra_cmd_args=[bad_cert_fname]) expd_https_server_handler = utils.TestServerProcess( log=logger, server='simple_https_server.py', extra_cmd_args=[expired_cert_fname]) suffix = '/' + os.path.basename(target_filepath) good_https_url = 'https://localhost:' \ + str(good_https_server_handler.port) + suffix good2_https_url = 'https://localhost:' \ + str(good2_https_server_handler.port) + suffix bad_https_url = 'https://localhost:' \ + str(bad_https_server_handler.port) + suffix expired_https_url = 'https://localhost:' \ + str(expd_https_server_handler.port) + suffix # Download the target file using an HTTPS connection. # Use try-finally solely to ensure that the server processes are killed. try: # Trust the certfile that happens to use a different hostname than we # will expect. os.environ['REQUESTS_CA_BUNDLE'] = bad_cert_fname # Clear sessions to ensure that the certificate we just specified is used. # TODO: Confirm necessity of this session clearing and lay out mechanics. self.fetcher._sessions = {} # Try connecting to the server process with the bad cert while trusting # the bad cert. Expect failure because even though we trust it, the # hostname we're connecting to does not match the hostname in the cert. logger.info('Trying HTTPS download of target file: ' + bad_https_url) with warnings.catch_warnings(): # We're ok with a slightly fishy localhost cert warnings.filterwarnings( 'ignore', category=urllib3.exceptions.SubjectAltNameWarning) with self.assertRaises(requests.exceptions.SSLError): download.safe_download(bad_https_url, target_data_length, self.fetcher) with self.assertRaises(requests.exceptions.SSLError): download.unsafe_download(bad_https_url, target_data_length, self.fetcher) # Try connecting to the server processes with the good certs while not # trusting the good certs (trusting the bad cert instead). Expect failure # because even though the server's cert file is otherwise OK, we don't # trust it. logger.info('Trying HTTPS download of target file: ' + good_https_url) with self.assertRaises(requests.exceptions.SSLError): download.safe_download(good_https_url, target_data_length, self.fetcher) with self.assertRaises(requests.exceptions.SSLError): download.unsafe_download(good_https_url, target_data_length, self.fetcher) logger.info('Trying HTTPS download of target file: ' + good2_https_url) with self.assertRaises(requests.exceptions.SSLError): download.safe_download(good2_https_url, target_data_length, self.fetcher) with self.assertRaises(requests.exceptions.SSLError): download.unsafe_download(good2_https_url, target_data_length, self.fetcher) # Configure environment to now trust the certfile that is expired. os.environ['REQUESTS_CA_BUNDLE'] = expired_cert_fname # Clear sessions to ensure that the certificate we just specified is used. # TODO: Confirm necessity of this session clearing and lay out mechanics. self.fetcher._sessions = {} # Try connecting to the server process with the expired cert while # trusting the expired cert. Expect failure because even though we trust # it, it is expired. logger.info('Trying HTTPS download of target file: ' + expired_https_url) with self.assertRaises(requests.exceptions.SSLError): download.safe_download(expired_https_url, target_data_length, self.fetcher) with self.assertRaises(requests.exceptions.SSLError): download.unsafe_download(expired_https_url, target_data_length, self.fetcher) # Try connecting to the server processes with the good certs while # trusting the appropriate good certs. Expect success. # TODO: expand testing to switch expected certificates back and forth a # bit more while clearing / not clearing sessions. os.environ['REQUESTS_CA_BUNDLE'] = good_cert_fname # Clear sessions to ensure that the certificate we just specified is used. # TODO: Confirm necessity of this session clearing and lay out mechanics. self.fetcher._sessions = {} logger.info('Trying HTTPS download of target file: ' + good_https_url) download.safe_download(good_https_url, target_data_length, self.fetcher).close() download.unsafe_download(good_https_url, target_data_length, self.fetcher).close() os.environ['REQUESTS_CA_BUNDLE'] = good2_cert_fname # Clear sessions to ensure that the certificate we just specified is used. # TODO: Confirm necessity of this session clearing and lay out mechanics. self.fetcher._sessions = {} logger.info('Trying HTTPS download of target file: ' + good2_https_url) download.safe_download(good2_https_url, target_data_length, self.fetcher).close() download.unsafe_download(good2_https_url, target_data_length, self.fetcher).close() finally: for proc_handler in [ good_https_server_handler, good2_https_server_handler, bad_https_server_handler, expd_https_server_handler ]: # Cleans the resources and flush the logged lines (if any). proc_handler.clean()
def setUpClass(cls): # Launch a SimpleHTTPServer (serves files in the current dir). cls.server_process_handler = utils.TestServerProcess(log=logger)