async def on_request_successful(self, entry): if len(entry.response.raw) == 0: raise RejectRequest( "Response received didn't match the expectation for the request." ) if "expected_hash" not in entry.arguments: return if self._response_hash_matches_expected_hash(entry): return if "expected_mime_type" in entry.arguments: mime_type_match = self._mime_type_match_response( entry.arguments["expected_mime_type"], entry.response) if mime_type_match and self._is_mime_type_significant( entry.arguments["expected_mime_type"]): return elif not mime_type_match: raise RejectRequest( "Response received didn't match the expectation for the request." ) if "expected_status_code" in entry.arguments: if not self._status_code_match(entry): raise RejectRequest( "Response received didn't match the expectation for the request." )
async def before_request(self, entry): url = entry.request.url if self.allowed_filters: if not self._match_found(url, self.allowed_filters): raise RejectRequest("Request URL %s is not in URL whitelist" % url) elif self.forbidden_filters: if self._match_found(url, self.forbidden_filters): raise RejectRequest("Request URL %s is in URL blacklist" % url)
async def _follow_redirects(self, entry): status_code = entry.response.code redirect_count = 0 while status_code in valid_redirects: if redirect_count > self.max_redirect: raise RejectRequest("Max redirect limit reached") try: url = entry.response.headers["location"] last_url = entry.result.redirects[-1].request.url _entry = await self._perform_request(url, base_url=last_url) entry.result.redirects.append(_entry) entry.response = _entry.response status_code = entry.response.code redirect_count += 1 except KeyError: raise RejectRequest( "Missing location field in header of redirect")
async def after_response(self, entry): if entry.result.string_match: # We found what we were looking for, this entry has to be valid. return if getattr(entry.result, "error_behavior", False): raise StopRequest("Error behavior detected.") if getattr(entry.result, "soft404", False): raise RejectRequest("Soft 404 detected")
async def test_fetcher_awaiting_requests_dont_increase_timeout_count_on_reject_request( self, loop): self._setup_async_test(loop) self.hammertime.request_engine.perform = make_mocked_coro( raise_exception=RejectRequest()) requests = self.fetcher.request_files(self.plugin_key, self.files_to_fetch) await asyncio.wait_for(requests, None, loop=loop) self.assertEqual(self.fetcher.timeouts, 0)
async def test_fetch_paths_add_valid_path_to_database(self, output_result, loop): valid = ["/a", "b", "/c", "/1", "/2", "/3"] invalid = ["/d", "/e", "/4", "/5"] paths = valid + invalid self.async_setup(loop) self.hammertime.heuristics.add(RaiseForPaths(invalid, RejectRequest("Invalid path"))) await self.directory_fetcher.fetch_paths(create_json_data(paths)) self.assertEqual(len(valid), len(valid_paths)) for path in valid_paths: self.assertIn(path["url"], valid) self.assertNotIn(path["url"], invalid)
async def test_fetch_paths_output_found_directory(self, output_result, loop): found = ["/%d" % i for i in range(10)] not_found = ["/1%d" % i for i in range(10)] paths = found + not_found self.async_setup(loop) self.hammertime.heuristics.add(RaiseForPaths(not_found, RejectRequest("404 not found"))) await self.directory_fetcher.fetch_paths(create_json_data(paths)) calls = [] for path in create_json_data(found): message, data = self.expected_output(path) calls.append(call(message, data=data)) output_result.assert_has_calls(calls, any_order=True)
async def after_response(self, entry): if self.bigip_asm in entry.response.raw: raise RejectRequest("BIG-IP ASM Triggered")
async def after_response(self, entry): url = urlparse(entry.request.url) if len(url.query) > 0: sample_simhash = await self._get_sample(url) if self._match(entry.response, sample_simhash): raise RejectRequest("Junk query response")