Esempio n. 1
0
    def setUp(self):
        self.mock = mock_s3()
        self.mock.start()

        #
        # Populate the data in mock S3
        #
        # s3+file first
        conn = boto.connect_s3()
        b = conn.create_bucket(self.bucket_name)
        k = Key(b)
        k.name = self.key_name
        with open(test_file(self.key_name), 'rb') as f:
            k.set_contents_from_file(f)

        # s3+dir
        b = conn.create_bucket(self.dir_bucket_name)
        for fname in ('index.json', '1', '2', '3', '4', '5', '6'):
            k = Key(b)
            k.name = posixpath.join(self.dir_list_name, fname)
            with open(test_file(posixpath.join('delta_dir_source', fname)),
                      'rb') as f:
                k.set_contents_from_file(f)

        # initialize the internal list data structure via the normal method
        super(S3SourceListsTest, self).setUp()
Esempio n. 2
0
 def test_parse_dir_source(self):
     p = parse_dir_source(
         open(test_file('delta_dir_source/index.json'), 'rb'))
     self.assertEqual(p, DELTA_RESULT)
     # Test with the use of basedir
     p = parse_dir_source(open(test_file('index.json')))
     self.assertEqual(p, DELTA_RESULT)
Esempio n. 3
0
    def setUp(self):
        self.mock = mock_s3()
        self.mock.start()

        #
        # Populate the data in mock S3
        #
        conn = boto.connect_s3()

        # s3+dir lists_served bucket first
        b = conn.create_bucket(self.lists_served_bucket_name)
        for fname in ['mozpub-track-digest256.ini',
                      'testpub-bananas-digest256.ini']:
            k = Key(b)
            k.name = fname
            f = open(os.path.join(
                os.path.dirname(__file__), 'lists_served_s3', fname
            ))
            k.set_contents_from_file(f)

        # s3+file contents
        b = conn.create_bucket(self.bucket_name)
        k = Key(b)
        k.name = self.key_name
        with open(test_file(self.key_name), 'rb') as f:
            k.set_contents_from_file(f)

        # s3+dir keys and contents
        b = conn.create_bucket(self.dir_bucket_name)
        for fname in ('index.json', '1', '2', '3', '4', '5', '6'):
            k = Key(b)
            k.name = posixpath.join(self.dir_list_name, fname)
            with open(test_file(posixpath.join('delta_dir_source', fname)),
                      'rb') as f:
                k.set_contents_from_file(f)

        responses.start()
        GITHUB_API_URL = 'https://api.github.com'
        SHAVAR_PROD_LISTS_BRANCHES_PATH = (
            '/repos/mozilla-services/shavar-prod-lists/branches'
        )
        resp_body = """
            [{
              "name": "69.0",
              "commit": {
                "sha": "35665559e9e4a85c12bb8211b5f9217fbb96062d",
                "url": "https://api.github.com/repos/mozilla-services/\
                    shavar-prod-lists/commits/\
                    35665559e9e4a85c12bb8211b5f9217fbb96062d"
              }
            }]
        """
        responses.add(
            responses.GET, GITHUB_API_URL + SHAVAR_PROD_LISTS_BRANCHES_PATH,
            body=resp_body
        )
        # initialize the internal list data structure via the normal method
        super(S3SourceListsTest, self).setUp()
Esempio n. 4
0
 def test_refresh(self):
     path = test_file("delta_dir_source")
     d = DirectorySource("dir://{0}".format(path), 1)
     d.load()
     times = os.stat(d.url.path)
     os.utime(d.url.path, (times.st_atime, int(time.time()) + 2))
     self.assertTrue(d.needs_refresh())
Esempio n. 5
0
 def test_refresh(self):
     path = test_file("delta_dir_source")
     d = DirectorySource("dir://{0}".format(path), 1)
     d.load()
     times = os.stat(d.url.path)
     os.utime(d.url.path, (times.st_atime, int(time.time()) + 2))
     self.assertTrue(d.needs_refresh())
Esempio n. 6
0
 def setUp(self):
     with mock_s3():
         conn = boto.connect_s3()
         b = conn.create_bucket(self.bucket_name)
         k = Key(b)
         k.name = self.key_name
         with open(test_file(self.key_name), 'rb') as f:
             k.set_contents_from_file(f)
         super(S3SourceListsTest, self).setUp()
Esempio n. 7
0
    def setUp(self):
        self.mock = mock_s3()
        self.mock.start()

        #
        # Populate the data in mock S3
        #
        conn = boto.connect_s3()

        # s3+dir lists_served bucket first
        b = conn.create_bucket(self.lists_served_bucket_name)
        for fname in [
                'mozpub-track-digest256.ini', 'testpub-bananas-digest256.ini'
        ]:
            k = Key(b)
            k.name = fname
            f = open(
                os.path.join(os.path.dirname(__file__), 'lists_served_s3',
                             fname))
            k.set_contents_from_file(f)

        # s3+file contents
        b = conn.create_bucket(self.bucket_name)
        k = Key(b)
        k.name = self.key_name
        with open(test_file(self.key_name), 'rb') as f:
            k.set_contents_from_file(f)

        # s3+dir keys and contents
        b = conn.create_bucket(self.dir_bucket_name)
        for fname in ('index.json', '1', '2', '3', '4', '5', '6'):
            k = Key(b)
            k.name = posixpath.join(self.dir_list_name, fname)
            with open(test_file(posixpath.join('delta_dir_source', fname)),
                      'rb') as f:
                k.set_contents_from_file(f)

        # initialize the internal list data structure via the normal method
        super(S3SourceListsTest, self).setUp()
Esempio n. 8
0
    def setUp(self):
        self.mock = mock_s3()
        self.mock.start()

        #
        # Populate the data in mock S3
        #
        conn = boto.connect_s3()

        # s3+dir lists_served bucket first
        b = conn.create_bucket(self.lists_served_bucket_name)
        for fname in ['mozpub-track-digest256.ini',
                      'testpub-bananas-digest256.ini']:
            k = Key(b)
            k.name = fname
            f = open(os.path.join(
                os.path.dirname(__file__), 'lists_served_s3', fname
            ))
            k.set_contents_from_file(f)

        # s3+file contents
        b = conn.create_bucket(self.bucket_name)
        k = Key(b)
        k.name = self.key_name
        with open(test_file(self.key_name), 'rb') as f:
            k.set_contents_from_file(f)

        # s3+dir keys and contents
        b = conn.create_bucket(self.dir_bucket_name)
        for fname in ('index.json', '1', '2', '3', '4', '5', '6'):
            k = Key(b)
            k.name = posixpath.join(self.dir_list_name, fname)
            with open(test_file(posixpath.join('delta_dir_source', fname)),
                      'rb') as f:
                k.set_contents_from_file(f)

        # initialize the internal list data structure via the normal method
        super(S3SourceListsTest, self).setUp()
Esempio n. 9
0
    def test_load(self):
        with mock_s3():
            conn = boto.connect_s3()
            b = conn.create_bucket(self.bucket_name)
            for fname in ('index.json', '1', '2', '3', '4', '5', '6'):
                k = Key(b)
                k.name = posixpath.join(self.list_name, fname)
                with open(test_file(posixpath.join('delta_dir_source', fname)),
                          'rb') as f:
                    k.set_contents_from_file(f)

            d = S3DirectorySource("s3+dir://{0}/{1}".format(self.bucket_name,
                                                            self.list_name), 1)
            d.load()
            self.assertEqual(d.chunks, DELTA_RESULT)
Esempio n. 10
0
    def test_load(self):
        with mock_s3():
            conn = boto.connect_s3()
            b = conn.create_bucket(self.bucket_name)
            for fname in ('index.json', '1', '2', '3', '4', '5', '6'):
                k = Key(b)
                k.name = posixpath.join(self.list_name, fname)
                with open(test_file(posixpath.join('delta_dir_source', fname)),
                          'rb') as f:
                    k.set_contents_from_file(f)

            d = S3DirectorySource(
                "s3+dir://{0}/{1}".format(self.bucket_name, self.list_name), 1)
            d.load()
            self.assertEqual(d.chunks, DELTA_RESULT)
Esempio n. 11
0
    def test_parse_file_source_delta(self):
        def chunkit(n, typ, *urls):
            return Chunk(number=n, chunk_type=typ,
                         hashes=[hashlib.sha256(u).digest() for u in urls])

        result = ChunkList(add_chunks=[chunkit(1, 'a',
                                               'https://www.mozilla.org/',
                                               'https://www.google.com/'),
                                       chunkit(2, 'a', 'https://github.com/',
                                               'http://www.python.org/'),
                                       chunkit(4, 'a',
                                               'http://www.haskell.org/',
                                               'https://www.mozilla.com/'),
                                       chunkit(5, 'a', 'http://www.erlang.org',
                                               'http://golang.org/')],
                           sub_chunks=[chunkit(3, 's',
                                               'https://github.com/'),
                                       chunkit(6, 's',
                                               'http://golang.org')])
        p = parse_file_source(open(test_file('delta_chunk_source')))
        self.assertEqual(p, result)
Esempio n. 12
0
 def test_load(self):
     path = test_file("delta_dir_source")
     d = DirectorySource("dir://{0}".format(path), 1)
     d.load()
     self.assertEqual(d.chunks, DELTA_RESULT)
Esempio n. 13
0
 def test_load(self):
     path = test_file("delta_dir_source")
     d = DirectorySource("dir://{0}".format(path), 1)
     d.load()
     self.assertEqual(d.chunks, DELTA_RESULT)
Esempio n. 14
0
 def test_parse_file_source_delta(self):
     p = parse_file_source(open(test_file('delta_chunk_source'), 'rb'))
     self.assertEqual(p, DELTA_RESULT)