def test_links_priority(self, server): # Download links from the pypi simple index should be used before # external download links. # http://bitbucket.org/tarek/distribute/issue/163/md5-validation-error # # Usecase : # - someone uploads a package on pypi, a md5 is generated # - someone manually coindexes this link (with the md5 in the url) onto # an external page accessible from the package page. # - someone reuploads the package (with a different md5) # - while easy_installing, an MD5 error occurs because the external # link is used # -> The index should use the link from pypi, not the external one. # start an index server index_url = server.full_address + '/simple/' # scan a test index crawler = Crawler(index_url, follow_externals=True) releases = crawler.get_releases("foobar") server.stop() # we have only one link, because links are compared without md5 self.assertEqual(1, len(releases)) self.assertEqual(1, len(releases[0].dists)) # the link should be from the index self.assertEqual(2, len(releases[0].dists['sdist'].urls)) self.assertEqual('12345678901234567', releases[0].dists['sdist'].url['hashval']) self.assertEqual('md5', releases[0].dists['sdist'].url['hashname'])
def test_browse_local_files(self): # Test that we can browse local files""" index_url = "file://" + PYPI_DEFAULT_STATIC_PATH if sys.platform == 'win32': # under windows the correct syntax is: # file:///C|\the\path\here # instead of # file://C:\the\path\here fix = re.compile(r'^(file://)([A-Za-z])(:)') index_url = fix.sub('\\1/\\2|', index_url) index_path = os.sep.join([index_url, "test_found_links", "simple"]) crawler = Crawler(index_path) dists = crawler.get_releases("foobar") self.assertEqual(4, len(dists))
def test_uses_mirrors(self): # When the main repository seems down, try using the given mirrors""" server = PyPIServer("foo_bar_baz") mirror = PyPIServer("foo_bar_baz") mirror.start() # we dont start the server here try: # create the index using both servers crawler = Crawler(server.full_address + "/simple/", hosts=('*',), # set the timeout to 1s for the tests timeout=1, mirrors=[mirror.full_address]) # this should not raise a timeout self.assertEqual(4, len(crawler.get_releases("foo"))) finally: mirror.stop() server.stop()