def test_build_indices(httpd, tmpdir, args, build_index, builds):
    """Testing for correct build_index in DailyScraper"""

    scraper = DailyScraper(destination=tmpdir,
                           base_url=httpd.get_url(),
                           **args)
    assert scraper.build_index == build_index
    assert scraper.builds == builds
def test_scraper(httpd, tmpdir, args, filename, url):
    """Testing various download scenarios for DailyScraper"""

    scraper = DailyScraper(destination=str(tmpdir), base_url=httpd.get_url(), **args)
    expected_target = os.path.join(str(tmpdir), filename)
    assert scraper.filename == expected_target

    assert urllib.unquote(scraper.url) == urljoin(httpd.get_url(), url)
Exemplo n.º 3
0
    def test_invalid_revision(self, query_builds_by_revision):
        query_builds_by_revision.return_value = []

        with self.assertRaises(errors.NotFoundError):
            DailyScraper(destination=self.temp_dir,
                         base_url=self.wdir,
                         logger=self.logger,
                         platform='linux',
                         revision='not_valid')
Exemplo n.º 4
0
    def test_build_indices(self):
        """Testing for correct build_index in DailyScraper"""

        for entry in test_params:
            scraper = DailyScraper(directory=self.temp_dir, base_url=self.wdir,
                                   version=None, log_level='ERROR',
                                   **entry['args'])
            self.assertEqual(scraper.build_index, entry['build_index'])
            self.assertEqual(scraper.builds, entry['builds'])
Exemplo n.º 5
0
    def test_build_indices(self):
        """Testing for correct build_index in DailyScraper"""

        for entry in test_params:
            scraper = DailyScraper(destination=self.temp_dir,
                                   base_url=self.wdir,
                                   logger=self.logger,
                                   **entry['args'])
            self.assertEqual(scraper.build_index, entry['build_index'])
            self.assertEqual(scraper.builds, entry['builds'])
Exemplo n.º 6
0
    def test_valid_revision(self, query_builds_by_revision):
        build_path = self.wdir + '/firefox/nightly/2013/10/2013-10-01-03-02-04-mozilla-central/'
        query_builds_by_revision.return_value = [build_path]

        scraper = DailyScraper(destination=self.temp_dir,
                               base_url=self.wdir,
                               logger=self.logger,
                               platform='linux',
                               revision='6b92cb377496')
        self.assertEqual(len(scraper.builds), 1)
        self.assertEqual(scraper.url, build_path + 'firefox-27.0a1.en-US.linux-i686.tar.bz2')
def test_invalid_revision(httpd, tmpdir, mocker):
    """Testing invalid revision"""

    query_builds_by_revision = mocker.patch(
        'mozdownload.treeherder.Treeherder.query_builds_by_revision')
    query_builds_by_revision.return_value = []
    with pytest.raises(errors.NotFoundError):
        DailyScraper(destination=tmpdir,
                     base_url=httpd.get_url(),
                     platform='linux',
                     revision='not_valid')
Exemplo n.º 8
0
    def test_scraper(self):
        """Testing various download scenarios for DailyScraper"""

        for entry in tests:
            scraper = DailyScraper(destination=self.temp_dir,
                                   base_url=self.wdir,
                                   logger=self.logger,
                                   **entry['args'])

            expected_target = os.path.join(self.temp_dir, entry['filename'])
            self.assertEqual(scraper.filename, expected_target)
            self.assertEqual(urllib.unquote(scraper.url),
                             urljoin(self.wdir, entry['url']))
    def test_scraper(self):
        """Testing various download scenarios for DailyScraper"""

        for entry in tests:
            scraper = DailyScraper(directory=self.temp_dir,
                                   base_url=self.wdir,
                                   version=None,
                                   log_level='ERROR',
                                   **entry['args'])
            expected_target = os.path.join(self.temp_dir, entry['target'])
            self.assertEqual(scraper.target, expected_target)
            self.assertEqual(urllib.unquote(scraper.final_url),
                             urljoin(self.wdir, entry['target_url']))
def test_valid_revision(httpd, tmpdir, mocker):
    """Testing valid revision"""

    query_builds_by_revision = mocker.patch(
        'mozdownload.treeherder.Treeherder.query_builds_by_revision')
    build_path = httpd.get_url(
    ) + 'firefox/nightly/2013/10/2013-10-01-03-02-04-mozilla-central/'
    query_builds_by_revision.return_value = [build_path]
    scraper = DailyScraper(destination=tmpdir,
                           base_url=httpd.get_url(),
                           platform='linux',
                           revision='6b92cb377496')
    assert len(scraper.builds) == 1
    assert scraper.url == (build_path +
                           'firefox-27.0a1.en-US.linux-i686.tar.bz2')
Exemplo n.º 11
0
def test_invalid_branch(httpd, tmpdir, args):
    """Testing download scenarios with invalid branch parameters for DailyScraper"""

    with pytest.raises(errors.NotFoundError):
        DailyScraper(destination=str(tmpdir), base_url=httpd.get_url(), **args)
Exemplo n.º 12
0
def test_scraper(httpd, tmpdir, args):
    """Testing download scenarios with invalid parameters for DailyScraper"""

    with pytest.raises(ValueError):
        DailyScraper(destination=tmpdir, base_url=httpd.get_url(), **args)