def test_file_scheme(self):
     """
     In this test, we're making sure that file:// URLs work and is reported as succeeded
     when the path is valid.
     """
     # Test
     config = DownloaderConfig(max_concurrent=1)
     downloader = HTTPCurlDownloader(config)
     request_list = self._file_download_requests()[:1]
     listener = AggregatingEventListener()
     downloader.event_listener = listener
     downloader.download(request_list)
     # Verify
     self.assertEqual(len(listener.succeeded_reports), 1)
     self.assertEqual(len(listener.failed_reports), 0)
     self.assertTrue(os.path.exists(request_list[0].destination))
     # verify the downloaded file matches
     path_in = urlparse.urlparse(request_list[0].url).path
     fp = open(path_in)
     original_content = fp.read()
     fp.close()
     fp = open(request_list[0].destination)
     destination_content = fp.read()
     fp.close()
     self.assertEqual(original_content, destination_content)
 def test_file_scheme_with_invalid_path(self):
     """
     In this test, we're making sure that file:// URLs work and is reported as failed
     when the path is invalid.
     """
     # Test
     config = DownloaderConfig(max_concurrent=1)
     downloader = HTTPCurlDownloader(config)
     request_list = self._file_download_requests()[:1]
     request_list[0].url += 'BADPATHBADPATHBADPATH'  # booger up the path
     listener = AggregatingEventListener()
     downloader.event_listener = listener
     downloader.download(request_list)
     # Verify
     self.assertEqual(len(listener.succeeded_reports), 0)
     self.assertEqual(len(listener.failed_reports), 1)
     report = listener.failed_reports[0]
     self.assertEqual(report.bytes_downloaded, 0)
     self.assertEqual(report.error_report['response_code'], 0)