Example #1
0
    def test_2_gethash_view(self):
        from shavar.views import gethash_view
        prefixes = (b"\xd0\xe1\x96\xa0"
                    b"\xfdm~\xb5"
                    b"v\x9c\xf8i"
                    b"\t\xa8\xb90")
        body = "4:{payload_len}\n".format(payload_len=len(prefixes))
        body = body.encode() + prefixes
        header_1 = b"moz-abp-shavar:1:64\n"
        chunk_1 = [hashes['moz'], hashes['goog']]
        header_2 = b"moz-abp-shavar:2:64\n"
        chunk_2 = [hashes['hub'], hashes['py']]
        request = dummy(body, path='/gethash')
        response = gethash_view(request)
        actual = response.body

        self.assertEqual(actual[:len(header_1)], header_1)
        actual = actual.replace(header_1, b'')
        chunk_len = len(chunk_1[0] + chunk_1[1])
        self.assertIn(chunk_1[0], actual[:chunk_len])
        self.assertIn(chunk_1[1], actual[:chunk_len])
        actual = actual[chunk_len:]
        chunk_len = len(chunk_2[0] + chunk_2[1])
        self.assertEqual(actual[:len(header_2)], header_2)
        actual = actual.replace(header_2, b'')
        self.assertIn(chunk_2[0], actual[:chunk_len])
        self.assertIn(chunk_2[1], actual[:chunk_len])
        # Make sure we return a 204 No Content for a prefix that doesn't map
        # to a hash we're serving
        request = dummy("4:4\n\x00\x00\x00\x00", path='/gethash')
        response = gethash_view(request)
        self.assertEqual(response.code, 204)
Example #2
0
    def test_2_downloads_view(self):
        from shavar.views import downloads_view

        req = "mozpub-track-digest256;a:1-2,7,9-14,16:s:6"

        downloads_resp_header = (b"n:1800\n" b"i:mozpub-track-digest256\n")
        chunks_to_add = b"ad:1,2,7,9,10,11,12,13,14,16\n"
        list_header = b"a:17:32:64\n"
        hash_1 = (b"\xd0\xe1\x96\xa0\xc2]5\xdd\n\x84Y<\xba\xe0\xf3\x833\xaaXR"
                  b"\x996DN\xa2dS\xea\xb2\x8d\xfc\x86")
        hash_2 = (b"\xfdm~\xb5\xf82\x1f\x8a\xden)\\;RW\xcaK\xb0\x90V1Z"
                  b"\x0bz\xe3?\xf6\x00\x81g\xcd\x97")
        expected = downloads_resp_header + chunks_to_add + list_header

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(expected, response.body[:len(expected)])
        # In the Chunk class the hash attritube is a set of hashes. Since a set
        # is an unordered collection the order for `b''.join(chunk.hashes)` in
        # format_downloads will vary.
        self.assertIn(hash_1, response.body[len(expected):])
        self.assertIn(hash_2, response.body[len(expected):])

        # New downloads request means there should be no adddel or subdel
        # entries in the response even if not_publishing_deltas is enabled
        # for the list.
        req = "mozpub-track-digest256;"
        expected = downloads_resp_header + list_header

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(expected, response.body[:len(expected)])
        self.assertIn(hash_1, response.body[len(expected):])
        self.assertIn(hash_2, response.body[len(expected):])
Example #3
0
    def test_2_downloads_view(self):
        from shavar.views import downloads_view

        req = "mozpub-track-digest256;a:1-2,7,9-14,16:s:6"
        expected = "n:1800\n" \
                   "i:mozpub-track-digest256\n" \
                   "ad:1,2,7,9,10,11,12,13,14,16\n" \
                   "a:17:32:64\n" \
                   "\xd0\xe1\x96\xa0\xc2]5\xdd\n\x84Y<\xba\xe0\xf3\x833\xaaX" \
                   "R\x996DN\xa2dS\xea\xb2\x8d\xfc\x86\xfdm~\xb5\xf82\x1f" \
                   "\x8a\xden)\\;RW\xcaK\xb0\x90V1Z\x0bz\xe3?\xf6\x00\x81g" \
                   "\xcd\x97"

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(response.body, expected)

        # New downloads request means there should be no adddel or subdel
        # entries in the response even if not_publishing_deltas is enabled
        # for the list.
        req = "mozpub-track-digest256;"
        expected = "n:1800\n" \
                   "i:mozpub-track-digest256\n" \
                   "a:17:32:64\n" \
                   "\xd0\xe1\x96\xa0\xc2]5\xdd\n\x84Y<\xba\xe0\xf3\x833\xaaX" \
                   "R\x996DN\xa2dS\xea\xb2\x8d\xfc\x86\xfdm~\xb5\xf82\x1f" \
                   "\x8a\xden)\\;RW\xcaK\xb0\x90V1Z\x0bz\xe3?\xf6\x00\x81g" \
                   "\xcd\x97"

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(response.body, expected)
Example #4
0
    def test_2_downloads_view(self):
        from shavar.views import downloads_view

        req = "mozpub-track-digest256;a:1-2,7,9-14,16:s:6"
        expected = "n:1800\n" \
                   "i:mozpub-track-digest256\n" \
                   "ad:1,2,7,9,10,11,12,13,14,16\n" \
                   "a:17:32:64\n" \
                   "\xd0\xe1\x96\xa0\xc2]5\xdd\n\x84Y<\xba\xe0\xf3\x833\xaaX" \
                   "R\x996DN\xa2dS\xea\xb2\x8d\xfc\x86\xfdm~\xb5\xf82\x1f" \
                   "\x8a\xden)\\;RW\xcaK\xb0\x90V1Z\x0bz\xe3?\xf6\x00\x81g" \
                   "\xcd\x97"

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(response.body, expected)

        # New downloads request means there should be no adddel or subdel
        # entries in the response even if not_publishing_deltas is enabled
        # for the list.
        req = "mozpub-track-digest256;"
        expected = "n:1800\n" \
                   "i:mozpub-track-digest256\n" \
                   "a:17:32:64\n" \
                   "\xd0\xe1\x96\xa0\xc2]5\xdd\n\x84Y<\xba\xe0\xf3\x833\xaaX" \
                   "R\x996DN\xa2dS\xea\xb2\x8d\xfc\x86\xfdm~\xb5\xf82\x1f" \
                   "\x8a\xden)\\;RW\xcaK\xb0\x90V1Z\x0bz\xe3?\xf6\x00\x81g" \
                   "\xcd\x97"

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(response.body, expected)
Example #5
0
    def test_parse_download_errors(self):
        self.assertRaises(LimitExceededError, parse_downloads,
                          dummy("mozpub-track-digest256;a:1-20000"))

        self.assertRaises(LimitExceededError, parse_downloads,
                          dummy("mozpub-track-digest256;a:1-1002"))

        self.assertRaises(ParseError, parse_downloads,
                          dummy("mozpub-track-digest256"))
Example #6
0
 def test_parse_gethash_errors(self):
     # Too short
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("4:\n"))
     self.assertEqual(str(ecm.exception),
                      "Improbably small or large gethash header size: 2")
     # Too long
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("4:" + "1" * 256 + "\n"))
     self.assertEqual(str(ecm.exception),
                      "Improbably small or large gethash header size: 258")
     # Invalid sizes
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("steve:4\n"))
     self.assertEqual(str(ecm.exception),
                      'Invalid prefix or payload size: "steve:4\n"')
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("4:steve\n"))
     self.assertEqual(str(ecm.exception),
                      'Invalid prefix or payload size: "4:steve\n"')
     # Improper payload length
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("4:17\n"))
     self.assertEqual(str(ecm.exception),
                      'Payload length invalid: "17"')
     # Ditto but with a payload shorter than the prefix
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("8:4\n"))
     self.assertEqual(str(ecm.exception),
                      'Payload length invalid: "4"')
     # It seems some clients are hitting the gethash endpoint with a
     # request intended for the downloads endpoint
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("mozpub-track-digest256;a:1423242002"))
     self.assertEqual(str(ecm.exception),
                      "Improbably small or large gethash header size: -1")
     # See https://github.com/mozilla-services/shavar/issues/67
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("1:10000000000\n"))
     self.assertEqual(str(ecm.exception),
                      "Hash read mismatch: client claimed 10000000000, "
                      "read 0")
     # Stated length of payload is longer than actual payload.  Only 7
     # bytes instead of 8 here.
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("4:8\n\xdd\x01J\xf5\xedk8"))
     self.assertEqual(str(ecm.exception),
                      "Hash read mismatch: client claimed 2, read 1")
     # Extraneous trailing data
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("4:8\n\xdd\x01J\xf5\xedk8\xd9\x13\x0e?F"))
     self.assertEqual(str(ecm.exception), "Oversized payload!")
Example #7
0
 def test_0_list_view(self):
     from shavar.views import list_view
     request = dummy('', path='/list')
     response = list_view(request)
     self.assertEqual(response.text,
                      "moz-abp-shavar\nmozpub-track-digest256\n"
                      "testpub-bananas-digest256\n")
Example #8
0
 def test_3_s3_sources_in_list_instantiation(self):
     # Basically the same tests in test_0_get_list and test_2_delta above
     dumdum = dummy(body='4:4\n%s' % self.hg[:4], path='/gethash')
     for list_ in ('mozpub-track-digest256', 'testpub-bananas-digest256'):
         sblist = get_list(dumdum, list_)
         self.assertIsInstance(sblist, Digest256)
         self.assertEqual(sblist.delta([1, 2], [3]), ([4, 5], [6]))
Example #9
0
 def test_3_s3_sources_in_list_instantiation(self):
     # Basically the same tests in test_0_get_list and test_2_delta above
     dumdum = dummy(body='4:4\n%s' % self.hg[:4], path='/gethash')
     for list_ in ('mozpub-track-digest256', 'testpub-bananas-digest256'):
         sblist = get_list(dumdum, list_)
         self.assertIsInstance(sblist, Digest256)
         self.assertEqual(sblist.delta([1, 2], [3]), ([4, 5], [6]))
Example #10
0
 def test_0_list_view(self):
     from shavar.views import list_view
     request = dummy('', path='/list')
     response = list_view(request)
     self.assertEqual(
         response.text, "moz-abp-shavar\nmozpub-track-digest256\n"
         "testpub-bananas-digest256\n")
Example #11
0
 def test_parse_gethash(self):
     h = b"4:32\n"
     d = (b"\xdd\x01J\xf5", b"\xedk8\xd9", b"\x13\x0e?F", b"o\x85\x0eF",
          b"\xd2\x1b\x95\x11", b"\x99\xd5:\x18", b"\xef)\xee\x93",
          b"AaN\xaf")
     s = b''
     s += h
     for i in d:
         s += i
     p = parse_gethash(dummy(s, path="/gethash"))
     self.assertEqual(p, set(d))
     # Make sure no repeats of issue #32 pop up: test with a single hash
     # prefix
     s = b"4:4\n\xdd\x01J\xf5"
     p = parse_gethash(dummy(s, path="/gethash"))
     self.assertEqual(p, set([b"\xdd\x01J\xf5"]))
Example #12
0
 def test_1_test_version_view(self):
     from shavar.views.version import version_view
     request = dummy('', path="/__version__")
     response = version_view(request)
     # compare against version.json in the top level dir
     with open('version.json', 'r') as f:
         self.assertEqual(response.body, f.read())
Example #13
0
 def test_1_test_version_view(self):
     from shavar.views.version import version_view
     request = dummy('', path="/__version__")
     response = version_view(request)
     # compare against version.json in the top level dir
     with open('version.json', 'r') as f:
         self.assertEqual(response.body, f.read())
Example #14
0
 def test_5_data_refresh(self):
     dumdum = dummy(body='4:4\n%s' % self.hg[:4], path='/gethash')
     d = dumdum.registry.settings.get('shavar.refresh_check_interval')
     self.assertEqual(d, 29)
     l = dumdum.registry['shavar.serving']['moz-abp-shavar']
     self.assertEqual(l._source.interval, 29)
     l = dumdum.registry['shavar.serving']['mozpub-track-digest256']
     self.assertEqual(l._source.interval, 23)
Example #15
0
 def test_5_data_refresh(self):
     dumdum = dummy(body='4:4\n%s' % self.hg[:4], path='/gethash')
     d = dumdum.registry.settings.get('shavar.refresh_check_interval')
     self.assertEqual(d, 29)
     abp = dumdum.registry['shavar.serving']['moz-abp-shavar']
     self.assertEqual(abp._source.interval, 29)
     track = dumdum.registry['shavar.serving']['mozpub-track-digest256']
     self.assertEqual(track._source.interval, 23)
Example #16
0
    def test_1_downloads_view(self):
        from shavar.views import downloads_view
        req = "moz-abp-shavar;a:1-2,5:s:3\n"
        req += "mozpub-track-digest256;a:1-2:s:6"
        expected = "n:1800\n" \
                   "i:mozpub-track-digest256\n" \
                   "a:4:32:64\n" \
                   "\xd9\xa7\xffA\xe0\xd8\x92\xbe\x17\xb3\xc3\x04\xf3fA\xf4:" \
                   "\xc1\x1d$\xbe\x13\xa6\x19\xd2\x14\x02DW\xc8\x02\xf2" \
                   "\xdaw\xc4\xd1\xe3\xf8\x10\xbaz\x0b\x83&l\x7f\xaeI\xba" \
                   "\xcf\x0b\xe0\xd2\x86F>k68\xee\xe7\xea+\xeb" \
                   "a:5:32:64\n" \
                   "\x82\x7f2\x0e\x94\xc2\xaf,\xc9\xc7d\x9d\x9e\xc9\t\x06<J" \
                   "\xf5\xe7\xebsh\x86\n3\xfe\xe0\xab\xdc?\xb1" \
                   "%\x85\xf3\xc9\xc0?j\xf2\x9f\xeeC\x90_`\x10j\xc8\x1c\x9d" \
                   "\xe5\xea\xa5\xd1,\xf0\x92\xa0\x93\x17o\x82\x83" \
                   "s:3:32:32\n" \
                   "\t\xa8\xb90\xc8\xb7\x9e|1>^t\x1e\x1dY\xc3\x9a\xe9\x1b" \
                   "\xc1\xf1\x0c\xde\xfah\xb4{\xf7u\x19\xbeW" \
                   "i:moz-abp-shavar\n" \
                   "u:https://tracking.services.mozilla.com/moz-abp-shavar/4" \
                   "\n" \
                   "u:https://tracking.services.mozilla.com/moz-abp-shavar/6\n"

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(response.body, expected)

        # Make sure redirects on an empty list are working correctly
        baseurl = "tracking.services.mozilla.com/test-redir-digest256"
        req = "test-redir-digest256;"
        expected = "n:1800\n" \
                   "i:test-redir-digest256\n" \
                   "u:{baseurl}/1\n" \
                   "u:{baseurl}/2\n" \
                   "u:{baseurl}/4\n" \
                   "u:{baseurl}/5\n" \
                   "u:{baseurl}/3\n" \
                   "u:{baseurl}/6\n".format(baseurl=baseurl)

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(response.body, expected)
Example #17
0
    def test_1_downloads_view(self):
        from shavar.views import downloads_view
        req = "moz-abp-shavar;a:1-2,5:s:3\n"
        req += "mozpub-track-digest256;a:1-2:s:6"
        expected = "n:1800\n" \
                   "i:mozpub-track-digest256\n" \
                   "a:4:32:64\n" \
                   "\xd9\xa7\xffA\xe0\xd8\x92\xbe\x17\xb3\xc3\x04\xf3fA\xf4:" \
                   "\xc1\x1d$\xbe\x13\xa6\x19\xd2\x14\x02DW\xc8\x02\xf2" \
                   "\xdaw\xc4\xd1\xe3\xf8\x10\xbaz\x0b\x83&l\x7f\xaeI\xba" \
                   "\xcf\x0b\xe0\xd2\x86F>k68\xee\xe7\xea+\xeb" \
                   "a:5:32:64\n" \
                   "\x82\x7f2\x0e\x94\xc2\xaf,\xc9\xc7d\x9d\x9e\xc9\t\x06<J" \
                   "\xf5\xe7\xebsh\x86\n3\xfe\xe0\xab\xdc?\xb1" \
                   "%\x85\xf3\xc9\xc0?j\xf2\x9f\xeeC\x90_`\x10j\xc8\x1c\x9d" \
                   "\xe5\xea\xa5\xd1,\xf0\x92\xa0\x93\x17o\x82\x83" \
                   "s:3:32:32\n" \
                   "\t\xa8\xb90\xc8\xb7\x9e|1>^t\x1e\x1dY\xc3\x9a\xe9\x1b" \
                   "\xc1\xf1\x0c\xde\xfah\xb4{\xf7u\x19\xbeW" \
                   "i:moz-abp-shavar\n" \
                   "u:https://tracking.services.mozilla.com/moz-abp-shavar/4" \
                   "\n" \
                   "u:https://tracking.services.mozilla.com/moz-abp-shavar/6\n"

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(response.body, expected)

        # Make sure redirects on an empty list are working correctly
        baseurl = "tracking.services.mozilla.com/test-redir-digest256"
        req = "test-redir-digest256;"
        expected = "n:1800\n" \
                   "i:test-redir-digest256\n" \
                   "u:{baseurl}/1\n" \
                   "u:{baseurl}/2\n" \
                   "u:{baseurl}/4\n" \
                   "u:{baseurl}/5\n" \
                   "u:{baseurl}/3\n" \
                   "u:{baseurl}/6\n".format(baseurl=baseurl)

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(response.body, expected)
Example #18
0
 def test_parse_gethash_errors(self):
     # Too short
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("4:\n"))
     self.assertEqual(str(ecm.exception),
                      "Improbably small or large gethash header size: 2")
     # Too long
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("4:" + "1" * 256 + "\n"))
     self.assertEqual(str(ecm.exception),
                      "Improbably small or large gethash header size: 258")
     # Invalid sizes
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("steve:4\n"))
     self.assertEqual(str(ecm.exception),
                      'Invalid prefix or payload size: "steve:4\n"')
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("4:steve\n"))
     self.assertEqual(str(ecm.exception),
                      'Invalid prefix or payload size: "4:steve\n"')
     # Improper payload length
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("4:17\n"))
     self.assertEqual(str(ecm.exception), 'Payload length invalid: "17"')
     # Ditto but with a payload shorter than the prefix
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("8:4\n"))
     self.assertEqual(str(ecm.exception), 'Payload length invalid: "4"')
     # It seems some clients are hitting the gethash endpoint with a
     # request intended for the downloads endpoint
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("mozpub-track-digest256;a:1423242002"))
     self.assertEqual(str(ecm.exception),
                      "Improbably small or large gethash header size: -1")
     # See https://github.com/mozilla-services/shavar/issues/67
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy("1:10000000000\n"))
     self.assertEqual(
         str(ecm.exception),
         "Hash read mismatch: client claimed 10000000000, "
         "read 0")
     # Stated length of payload is longer than actual payload.  Only 7
     # bytes instead of 8 here.
     with self.assertRaises(ParseError) as ecm:
         parse_gethash(dummy(b"4:8\n\xdd\x01J\xf5\xedk8"))
     self.assertEqual(str(ecm.exception),
                      "Hash read mismatch: client claimed 2, read 1")
Example #19
0
 def test_2_gethash_view(self):
     from shavar.views import gethash_view
     prefixes = "\xd0\xe1\x96\xa0" \
                "\xfdm~\xb5" \
                "v\x9c\xf8i" \
                "\t\xa8\xb90"
     body = "4:{payload_len}\n{payload}".format(payload=prefixes,
                                                payload_len=len(prefixes))
     expected = "moz-abp-shavar:1:64\n{0}{1}" \
                "moz-abp-shavar:2:64\n{2}{3}".format(hashes['moz'],
                                                     hashes['goog'],
                                                     hashes['hub'],
                                                     hashes['py'])
     request = dummy(body, path='/gethash')
     response = gethash_view(request)
     self.assertEqual(response.body, expected)
     # Make sure we return a 204 No Content for a prefix that doesn't map
     # to a hash we're serving
     request = dummy("4:4\n\x00\x00\x00\x00", path='/gethash')
     response = gethash_view(request)
     self.assertEqual(response.code, 204)
Example #20
0
 def test_2_gethash_view(self):
     from shavar.views import gethash_view
     prefixes = "\xd0\xe1\x96\xa0" \
                "\xfdm~\xb5" \
                "v\x9c\xf8i" \
                "\t\xa8\xb90"
     body = "4:{payload_len}\n{payload}".format(payload=prefixes,
                                                payload_len=len(prefixes))
     expected = "moz-abp-shavar:1:64\n{0}{1}" \
                "moz-abp-shavar:2:64\n{2}{3}".format(hashes['moz'],
                                                     hashes['goog'],
                                                     hashes['hub'],
                                                     hashes['py'])
     request = dummy(body, path='/gethash')
     response = gethash_view(request)
     self.assertEqual(response.body, expected)
     # Make sure we return a 204 No Content for a prefix that doesn't map
     # to a hash we're serving
     request = dummy("4:4\n\x00\x00\x00\x00", path='/gethash')
     response = gethash_view(request)
     self.assertEqual(response.code, 204)
Example #21
0
 def test_parse_gethash(self):
     h = "4:32\n"
     d = ("\xdd\x01J\xf5",
          "\xedk8\xd9",
          "\x13\x0e?F",
          "o\x85\x0eF",
          "\xd2\x1b\x95\x11",
          "\x99\xd5:\x18",
          "\xef)\xee\x93",
          "AaN\xaf")
     s = ''
     s += h
     for i in d:
         s += i
     p = parse_gethash(dummy(s, path="/gethash"))
     self.assertEqual(p, set(d))
     # Make sure no repeats of issue #32 pop up: test with a single hash
     # prefix
     s = "4:4\n\xdd\x01J\xf5"
     p = parse_gethash(dummy(s, path="/gethash"))
     self.assertEqual(p, set(["\xdd\x01J\xf5"]))
Example #22
0
 def test_2_delta(self):
     dumdum = dummy(body='4:4\n%s' % self.hg[:4], path='/gethash')
     sblist = get_list(dumdum, 'mozpub-track-digest256')
     # By way of explanation:
     #
     # In the data file.
     #   Chunks 1, 2, 4, and 5 are "add" chunks
     #   Chunks 3 and 6 are "sub" chunks
     #
     # So delta([1, 2], [3]) should return
     #    ([4, 5], [6])
     self.assertEqual(sblist.delta([1, 2], [3]), ([4, 5], [6]))
Example #23
0
 def test_2_delta(self):
     dumdum = dummy(body='4:4\n%s' % self.hg[:4], path='/gethash')
     sblist = get_list(dumdum, 'mozpub-track-digest256')
     # By way of explanation:
     #
     # In the data file.
     #   Chunks 1, 2, 4, and 5 are "add" chunks
     #   Chunks 3 and 6 are "sub" chunks
     #
     # So delta([1, 2], [3]) should return
     #    ([4, 5], [6])
     self.assertEqual(sblist.delta([1, 2], [3]), ([4, 5], [6]))
Example #24
0
 def test_1_add_versioned_lists_to_registry(self):
     list_name = 'test-track-digest256'
     # sample settings from /tests/lists_served_s3/test-track-digest256
     settings = {
         'type': 'digest256',
         'source': 's3+file://tracking/delta_chunk_source',
         'redirect_url_base': 'https://tracking.services.mozilla.com/',
     }
     serving = {
         list_name: dummy(body='').registry['shavar.serving'][list_name],
     }
     ver_lists = {
         list_name: [],
     }
     type_ = 'digest256'
     shavar_prod_lists_branches = [{'name': '69.0'}]
     add_versioned_lists_to_registry(
         settings, serving, ver_lists, type_, list_name,
         shavar_prod_lists_branches
     )
     self.assertIn(list_name, serving)
     self.assertIn('69.0-' + list_name, serving)
Example #25
0
 def test_9_get_list_version_not_specified(self):
     dumdum = dummy(body='4:4\n%s' % self.hg[:4], path='/gethash')
     sblist, list_ver = get_list(dumdum, 'mozpub-track-digest256')
     self.assertIsNone(list_ver)
Example #26
0
 def test_2_get_list_list_not_served(self):
     dumdum = dummy(body='4:4\n%s' % self.hg[:4], path='/gethash')
     self.assertRaises(
         MissingListDataError, get_list, dumdum, 'this-list-dne'
     )
Example #27
0
    def test_parse_download(self):
        """
        Test bodies taken from
        https://developers.google.com/safe-browsing/developers_guide_v2
        """
        # empty list
        p = parse_downloads(dummy("acme-malware-shavar;"))
        d = Downloads()
        d.append(DownloadsListInfo("acme-malware-shavar"))
        self.assertEqual(p, d)

        # empty list w/ MAC
        p = parse_downloads(dummy("acme-malware-shavar;mac"))
        d = Downloads()
        d.append(DownloadsListInfo("acme-malware-shavar", wants_mac=True))
        self.assertEqual(p, d)

        # with size
        p = parse_downloads(dummy("s;200\nacme-malware-shavar;"))
        d = Downloads(200)
        d.append(DownloadsListInfo("acme-malware-shavar"))
        self.assertEqual(p, d)

        # with chunks
        p = parse_downloads(dummy("googpub-phish-shavar;a:1,2,3,4,5"))
        d = Downloads()
        dli = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli)
        dli.add_range_claim('a', 1, 5)
        self.assertEqual(p, d)

        # chunks w/ MAC
        p = parse_downloads(dummy("googpub-phish-shavar;a:1,2,3:mac"))
        d = Downloads()
        dli = DownloadsListInfo("googpub-phish-shavar", wants_mac=True)
        d.append(dli)
        dli.add_range_claim('a', 1, 3)
        self.assertEqual(p, d)

        # chunks w/ ranges
        p = parse_downloads(dummy("googpub-phish-shavar;a:1-5,10,12"))
        d = Downloads()
        dli = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli)
        dli.add_range_claim('a', 1, 5)
        dli.add_claim('a', 10)
        dli.add_claim('a', 12)
        self.assertEqual(p, d)

        # with add & subtract chunks
        p = parse_downloads(dummy("googpub-phish-shavar;a:1-5,10:s:3-8"))
        d = Downloads()
        dli = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli)
        dli.add_range_claim('a', 1, 5)
        dli.add_claim('a', 10)
        dli.add_range_claim('s', 3, 8)
        self.assertEqual(p, d)

        # with add & subtract chunks out of order
        p = parse_downloads(dummy("googpub-phish-shavar;a:3-5,1,10"))
        d = Downloads()
        dli = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli)
        dli.add_range_claim('a', 3, 5)
        dli.add_claim('a', 1)
        dli.add_claim('a', 10)
        self.assertEqual(p, d)

        # with multiple lists
        s = "googpub-phish-shavar;a:1-3,5:s:4-5\n"
        s += "acme-white-shavar;a:1-7:s:1-2"
        p = parse_downloads(dummy(s))

        d = Downloads()
        dli0 = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli0)
        dli0.add_range_claim('a', 1, 3)
        dli0.add_claim('a', 5)
        dli0.add_range_claim('s', 4, 5)

        dli1 = DownloadsListInfo("acme-white-shavar")
        d.append(dli1)
        dli1.add_range_claim('a', 1, 7)
        dli1.add_range_claim('s', 1, 2)
        self.assertEqual(p, d)

        # with multiple lists, at least one empty
        # See https://github.com/mozilla-services/shavar/issues/56
        s = "googpub-phish-shavar;\n"
        s += "acme-white-shavar;a:1-7:s:1-2"
        p = parse_downloads(dummy(s))

        d = Downloads()
        dli0 = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli0)

        dli1 = DownloadsListInfo("acme-white-shavar")
        d.append(dli1)
        dli1.add_range_claim('a', 1, 7)
        dli1.add_range_claim('s', 1, 2)
        self.assertEqual(p, d)
Example #28
0
 def test_1_lookup_prefixes(self):
     dumdum = dummy(body='4:4\n%s' % hashes['goog'][:4], path='/gethash')
     prefixes = lookup_prefixes(dumdum, [self.hg[:4]])
     self.assertEqual(prefixes, {'moz-abp-shavar': {17: [hashes['goog']]}})
Example #29
0
    def test_1_downloads_view(self):
        from shavar.views import downloads_view
        req = "moz-abp-shavar;a:1-2,5:s:3\n"
        req += "mozpub-track-digest256;a:1-2:s:6"

        # expected response
        n_header = b"n:1800\n"
        abp_i_header = b"i:moz-abp-shavar\n"
        abp_chunk_download_urls = [
            b"u:https://tracking.services.mozilla.com/moz-abp-shavar/4\n",
            b"u:https://tracking.services.mozilla.com/moz-abp-shavar/6\n"
        ]
        mozpub_i_header = b"i:mozpub-track-digest256\n"
        # chunk 4
        mozpub_chunk_4_list_header = b'a:4:32:64\n'
        mozpub_chunk_4_hashes = [
            (b'\xd9\xa7\xffA\xe0\xd8\x92\xbe\x17\xb3\xc3\x04\xf3fA\xf4:\xc1'
             b'\x1d$\xbe\x13\xa6\x19\xd2\x14\x02DW\xc8\x02\xf2'),
            (b'\xdaw\xc4\xd1\xe3\xf8\x10\xbaz\x0b\x83&l\x7f\xaeI\xba\xcf\x0b'
             b'\xe0\xd2\x86F>k68\xee\xe7\xea+\xeb')
        ]
        # chunk 5
        mozpub_chunk_5_list_header = b"a:5:32:64\n"
        mozpub_chunk_5_hashes = [
            (b'\x82\x7f2\x0e\x94\xc2\xaf,\xc9\xc7d\x9d\x9e\xc9\t\x06<J\xf5\xe7'
             b'\xebsh\x86\n3\xfe\xe0\xab\xdc?\xb1'),
            (b'%\x85\xf3\xc9\xc0?j\xf2\x9f\xeeC\x90_`\x10j\xc8\x1c\x9d\xe5\xea'
             b'\xa5\xd1,\xf0\x92\xa0\x93\x17o\x82\x83')
        ]
        # chunk 3
        mozpub_chunk_3_list_header = b"s:3:32:32\n"
        mozpub_sub_hash_1 = (
            b'\t\xa8\xb90\xc8\xb7\x9e|1>^t\x1e\x1dY\xc3\x9a\xe9\x1b\xc1\xf1'
            b'\x0c\xde\xfah\xb4{\xf7u\x19\xbeW'
        )

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        actual = response.body
        self.assertEqual(actual[:len(n_header)], n_header)
        actual = actual.replace(n_header, b'')
        self.assertEqual(actual[:len(abp_i_header)], abp_i_header)
        actual = actual.replace(abp_i_header, b'')
        urls_len = len(abp_chunk_download_urls[0] + abp_chunk_download_urls[0])
        self.assertIn(abp_chunk_download_urls[0], actual[:urls_len])
        self.assertIn(abp_chunk_download_urls[1], actual[:urls_len])
        actual = actual[urls_len:]

        self.assertEqual(actual[:len(mozpub_i_header)], mozpub_i_header)
        actual = actual.replace(mozpub_i_header, b'')
        self.assertEqual(actual[:len(mozpub_chunk_4_list_header)],
                         mozpub_chunk_4_list_header)
        actual = actual.replace(mozpub_chunk_4_list_header, b'')
        chunk_len = len(mozpub_chunk_4_hashes[0] + mozpub_chunk_4_hashes[1])
        self.assertIn(mozpub_chunk_4_hashes[0], actual[:chunk_len])
        self.assertIn(mozpub_chunk_4_hashes[1], actual[:chunk_len])
        actual = actual[chunk_len:]
        self.assertEqual(actual[:len(mozpub_chunk_5_list_header)],
                         mozpub_chunk_5_list_header)
        actual = actual.replace(mozpub_chunk_5_list_header, b'')
        chunk_len = len(mozpub_chunk_5_hashes[0] + mozpub_chunk_5_hashes[1])
        self.assertIn(mozpub_chunk_5_hashes[0], actual[:chunk_len])
        self.assertIn(mozpub_chunk_5_hashes[1], actual[:chunk_len])
        actual = actual[chunk_len:]
        self.assertEqual(actual,
                         mozpub_chunk_3_list_header + mozpub_sub_hash_1)

        # Make sure redirects on an empty list are working correctly
        baseurl = "tracking.services.mozilla.com/test-redir-digest256"
        req = "test-redir-digest256;"
        expected = "n:1800\n" \
                   "i:test-redir-digest256\n" \
                   "u:{baseurl}/1\n" \
                   "u:{baseurl}/2\n" \
                   "u:{baseurl}/4\n" \
                   "u:{baseurl}/5\n" \
                   "u:{baseurl}/3\n" \
                   "u:{baseurl}/6\n".format(baseurl=baseurl)

        request = dummy(req, path='/downloads')
        response = downloads_view(request)
        self.assertEqual(response.body, expected.encode())
Example #30
0
 def test_0_get_list(self):
     dumdum = dummy(body='4:4\n%s' % self.hg[:4], path='/gethash')
     sblist = get_list(dumdum, 'mozpub-track-digest256')
     self.assertIsInstance(sblist, Digest256)
Example #31
0
 def test_1_lookup_prefixes(self):
     dumdum = dummy(body='4:4\n%s' % hashes['goog'][:4], path='/gethash')
     prefixes = lookup_prefixes(dumdum, [self.hg[:4]])
     self.assertEqual(prefixes, {'moz-abp-shavar': {17: [hashes['goog']]}})
Example #32
0
 def test_0_get_list(self):
     dumdum = dummy(body='4:4\n%s' % self.hg[:4], path='/gethash')
     sblist = get_list(dumdum, 'mozpub-track-digest256')
     self.assertIsInstance(sblist, Digest256)
Example #33
0
    def test_parse_download(self):
        """
        Test bodies taken from
        https://developers.google.com/safe-browsing/developers_guide_v2
        """
        # empty list
        p = parse_downloads(dummy("acme-malware-shavar;"))
        d = Downloads()
        d.append(DownloadsListInfo("acme-malware-shavar"))
        self.assertEqual(p, d)

        # empty list w/ MAC
        p = parse_downloads(dummy("acme-malware-shavar;mac"))
        d = Downloads()
        d.append(DownloadsListInfo("acme-malware-shavar", wants_mac=True))
        self.assertEqual(p, d)

        # with size
        p = parse_downloads(dummy("s;200\nacme-malware-shavar;"))
        d = Downloads(200)
        d.append(DownloadsListInfo("acme-malware-shavar"))
        self.assertEqual(p, d)

        # with chunks
        p = parse_downloads(dummy("googpub-phish-shavar;a:1,2,3,4,5"))
        d = Downloads()
        dli = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli)
        dli.add_range_claim('a', 1, 5)
        self.assertEqual(p, d)

        # chunks w/ MAC
        p = parse_downloads(dummy("googpub-phish-shavar;a:1,2,3:mac"))
        d = Downloads()
        dli = DownloadsListInfo("googpub-phish-shavar", wants_mac=True)
        d.append(dli)
        dli.add_range_claim('a', 1, 3)
        self.assertEqual(p, d)

        # chunks w/ ranges
        p = parse_downloads(dummy("googpub-phish-shavar;a:1-5,10,12"))
        d = Downloads()
        dli = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli)
        dli.add_range_claim('a', 1, 5)
        dli.add_claim('a', 10)
        dli.add_claim('a', 12)
        self.assertEqual(p, d)

        # with add & subtract chunks
        p = parse_downloads(dummy("googpub-phish-shavar;a:1-5,10:s:3-8"))
        d = Downloads()
        dli = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli)
        dli.add_range_claim('a', 1, 5)
        dli.add_claim('a', 10)
        dli.add_range_claim('s', 3, 8)
        self.assertEqual(p, d)

        # with add & subtract chunks out of order
        p = parse_downloads(dummy("googpub-phish-shavar;a:3-5,1,10"))
        d = Downloads()
        dli = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli)
        dli.add_range_claim('a', 3, 5)
        dli.add_claim('a', 1)
        dli.add_claim('a', 10)
        self.assertEqual(p, d)

        # with multiple lists
        s = "googpub-phish-shavar;a:1-3,5:s:4-5\n"
        s += "acme-white-shavar;a:1-7:s:1-2"
        p = parse_downloads(dummy(s))

        d = Downloads()
        dli0 = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli0)
        dli0.add_range_claim('a', 1, 3)
        dli0.add_claim('a', 5)
        dli0.add_range_claim('s', 4, 5)

        dli1 = DownloadsListInfo("acme-white-shavar")
        d.append(dli1)
        dli1.add_range_claim('a', 1, 7)
        dli1.add_range_claim('s', 1, 2)
        self.assertEqual(p, d)

        # with multiple lists, at least one empty
        # See https://github.com/mozilla-services/shavar/issues/56
        s = "googpub-phish-shavar;\n"
        s += "acme-white-shavar;a:1-7:s:1-2"
        p = parse_downloads(dummy(s))

        d = Downloads()
        dli0 = DownloadsListInfo("googpub-phish-shavar")
        d.append(dli0)

        dli1 = DownloadsListInfo("acme-white-shavar")
        d.append(dli1)
        dli1.add_range_claim('a', 1, 7)
        dli1.add_range_claim('s', 1, 2)
        self.assertEqual(p, d)
Example #34
0
    def test_parse_download_errors(self):
        self.assertRaises(LimitExceededError, parse_downloads,
                          dummy("mozpub-track-digest256;a:1-20000"))

        self.assertRaises(LimitExceededError, parse_downloads,
                          dummy("mozpub-track-digest256;a:1-1002"))