def test_parse_gethash(self): h = b"4:32\n" d = (b"\xdd\x01J\xf5", b"\xedk8\xd9", b"\x13\x0e?F", b"o\x85\x0eF", b"\xd2\x1b\x95\x11", b"\x99\xd5:\x18", b"\xef)\xee\x93", b"AaN\xaf") s = b'' s += h for i in d: s += i p = parse_gethash(dummy(s, path="/gethash")) self.assertEqual(p, set(d)) # Make sure no repeats of issue #32 pop up: test with a single hash # prefix s = b"4:4\n\xdd\x01J\xf5" p = parse_gethash(dummy(s, path="/gethash")) self.assertEqual(p, set([b"\xdd\x01J\xf5"]))
def test_parse_gethash(self): h = "4:32\n" d = ("\xdd\x01J\xf5", "\xedk8\xd9", "\x13\x0e?F", "o\x85\x0eF", "\xd2\x1b\x95\x11", "\x99\xd5:\x18", "\xef)\xee\x93", "AaN\xaf") s = '' s += h for i in d: s += i p = parse_gethash(dummy(s, path="/gethash")) self.assertEqual(p, set(d)) # Make sure no repeats of issue #32 pop up: test with a single hash # prefix s = "4:4\n\xdd\x01J\xf5" p = parse_gethash(dummy(s, path="/gethash")) self.assertEqual(p, set(["\xdd\x01J\xf5"]))
def gethash_view(request): try: parsed = parse_gethash(request) except ParseError as e: annotate_request(request, "shavar.gethash.unknown.format", 1) raise HTTPBadRequest(str(e)) full = lookup_prefixes(request, parsed) # Nothing found? Return a 204 if len(full) == 0: return HTTPNoContent() # FIXME MAC handling body = b'' for lname, chunk_data in full.items(): for chunk_num, hashes in sorted(chunk_data.items()): h = b''.join(hashes) body += '{list_name}:{chunk_number}:{data_len}\n' \ .format(list_name=lname, chunk_number=chunk_num, data_len=len(h)).encode() body += h return HTTPOk(content_type="application/octet-stream", body=body)
def gethash_view(request): try: parsed = parse_gethash(request) except ParseError, e: annotate_request(request, "shavar.gethash.unknown.format", 1) raise HTTPBadRequest(str(e))
def test_parse_gethash_errors(self): # Too short with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("4:\n")) self.assertEqual(str(ecm.exception), "Improbably small or large gethash header size: 2") # Too long with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("4:" + "1" * 256 + "\n")) self.assertEqual(str(ecm.exception), "Improbably small or large gethash header size: 258") # Invalid sizes with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("steve:4\n")) self.assertEqual(str(ecm.exception), 'Invalid prefix or payload size: "steve:4\n"') with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("4:steve\n")) self.assertEqual(str(ecm.exception), 'Invalid prefix or payload size: "4:steve\n"') # Improper payload length with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("4:17\n")) self.assertEqual(str(ecm.exception), 'Payload length invalid: "17"') # Ditto but with a payload shorter than the prefix with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("8:4\n")) self.assertEqual(str(ecm.exception), 'Payload length invalid: "4"') # It seems some clients are hitting the gethash endpoint with a # request intended for the downloads endpoint with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("mozpub-track-digest256;a:1423242002")) self.assertEqual(str(ecm.exception), "Improbably small or large gethash header size: -1") # See https://github.com/mozilla-services/shavar/issues/67 with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("1:10000000000\n")) self.assertEqual(str(ecm.exception), "Hash read mismatch: client claimed 10000000000, " "read 0") # Stated length of payload is longer than actual payload. Only 7 # bytes instead of 8 here. with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("4:8\n\xdd\x01J\xf5\xedk8")) self.assertEqual(str(ecm.exception), "Hash read mismatch: client claimed 2, read 1") # Extraneous trailing data with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("4:8\n\xdd\x01J\xf5\xedk8\xd9\x13\x0e?F")) self.assertEqual(str(ecm.exception), "Oversized payload!")
def test_parse_gethash_errors(self): # Too short with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("4:\n")) self.assertEqual(str(ecm.exception), "Improbably small or large gethash header size: 2") # Too long with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("4:" + "1" * 256 + "\n")) self.assertEqual(str(ecm.exception), "Improbably small or large gethash header size: 258") # Invalid sizes with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("steve:4\n")) self.assertEqual(str(ecm.exception), 'Invalid prefix or payload size: "steve:4\n"') with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("4:steve\n")) self.assertEqual(str(ecm.exception), 'Invalid prefix or payload size: "4:steve\n"') # Improper payload length with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("4:17\n")) self.assertEqual(str(ecm.exception), 'Payload length invalid: "17"') # Ditto but with a payload shorter than the prefix with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("8:4\n")) self.assertEqual(str(ecm.exception), 'Payload length invalid: "4"') # It seems some clients are hitting the gethash endpoint with a # request intended for the downloads endpoint with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("mozpub-track-digest256;a:1423242002")) self.assertEqual(str(ecm.exception), "Improbably small or large gethash header size: -1") # See https://github.com/mozilla-services/shavar/issues/67 with self.assertRaises(ParseError) as ecm: parse_gethash(dummy("1:10000000000\n")) self.assertEqual( str(ecm.exception), "Hash read mismatch: client claimed 10000000000, " "read 0") # Stated length of payload is longer than actual payload. Only 7 # bytes instead of 8 here. with self.assertRaises(ParseError) as ecm: parse_gethash(dummy(b"4:8\n\xdd\x01J\xf5\xedk8")) self.assertEqual(str(ecm.exception), "Hash read mismatch: client claimed 2, read 1")