Exemple #1
0
 def _cache_url(self, filepath):
     response = urlopen_original(self.url)
     fp = open(filepath, 'wb')
     # when it uses file:// scheme, code is None and there is no msg attr
     # but it has been successfully opened
     status = b('%s %s' % (getattr(response, 'code', 200) or 200, getattr(response, 'msg', 'OK')))
     headers = [b('%s: %s' % (key, value)) for key, value in list(response.headers.items())]
     body = response.read()
     fp.write(b('\n').join([status] + headers + [b(''), body]))
     fp.close()
Exemple #2
0
def test_unpack_http_url_bad_downloaded_checksum(mock_get_response, mock_unpack_file):
    """
    If already-downloaded file has bad checksum, re-download.
    """
    base_url = 'http://www.example.com/somepackage.tgz'
    contents = b('downloaded')
    download_hash = hashlib.new('sha1', contents)
    link = Link(base_url + '#sha1=' + download_hash.hexdigest())
    response = mock_get_response.return_value = MockResponse(contents)
    response.info = lambda: {'content-type': 'application/x-tar'}
    response.geturl = lambda: base_url

    download_dir = mkdtemp()
    try:
        downloaded_file = os.path.join(download_dir, 'somepackage.tgz')
        _write_file(downloaded_file, 'some contents')

        unpack_http_url(link, 'location', download_cache=None, download_dir=download_dir)

        # despite existence of downloaded file with bad hash, downloaded again
        mock_get_response.assert_called_once_with(base_url, link)
        # cached file is replaced with newly downloaded file
        with open(downloaded_file) as fh:
            assert fh.read() == 'downloaded'

    finally:
        rmtree(download_dir)
Exemple #3
0
def test_unpack_http_url_bad_cache_checksum(mock_get_response,
                                            mock_unpack_file):
    """
    If cached download has bad checksum, re-download.
    """
    base_url = 'http://www.example.com/somepackage.tgz'
    contents = b('downloaded')
    download_hash = hashlib.new('sha1', contents)
    link = Link(base_url + '#sha1=' + download_hash.hexdigest())
    response = mock_get_response.return_value = MockResponse(contents)
    response.info = lambda: {'content-type': 'application/x-tar'}
    response.geturl = lambda: base_url

    cache_dir = mkdtemp()
    try:
        cache_file = os.path.join(cache_dir, urllib.quote(base_url, ''))
        cache_ct_file = cache_file + '.content-type'
        _write_file(cache_file, 'some contents')
        _write_file(cache_ct_file, 'application/x-tar')

        unpack_http_url(link, 'location', download_cache=cache_dir)

        # despite existence of cached file with bad hash, downloaded again
        mock_get_response.assert_called_once_with(base_url, link)
        # cached file is replaced with newly downloaded file
        with open(cache_file) as fh:
            assert fh.read() == 'downloaded'

    finally:
        rmtree(cache_dir)
Exemple #4
0
def test_searching_through_Search_class():
    """
    Verify if ``pip.vcs.Search`` uses tests xmlrpclib.Transport class
    """
    original_xmlrpclib_transport = pip.download.xmlrpclib_transport
    pip.download.xmlrpclib_transport = fake_transport = Mock()
    query = 'mylittlequerythatdoesnotexists'
    dumped_xmlrpc_request = b(
        xmlrpclib.dumps(({
            'name': query,
            'summary': query
        }, 'or'), 'search'))
    expected = [{
        '_pypi_ordering': 100,
        'name': 'foo',
        'summary': 'foo summary',
        'version': '1.0'
    }]
    fake_transport.request.return_value = (expected, )
    pypi_searcher = SearchCommand(create_main_parser())
    result = pypi_searcher.search(query, 'http://pypi.python.org/pypi')
    try:
        assert expected == result, result
        fake_transport.request.assert_called_with('pypi.python.org',
                                                  '/pypi',
                                                  dumped_xmlrpc_request,
                                                  verbose=VERBOSE_FALSE)
    finally:
        pip.download.xmlrpclib_transport = original_xmlrpclib_transport
Exemple #5
0
 def _set_all_fields(self, folder):
     filename = os.path.join(folder, urllib.quote(self.url, ''))
     if not os.path.exists(filename):
         self._cache_url(filename)
     fp = open(filename, 'rb')
     try:
         line = fp.readline().strip()
         self.code, self.msg = line.split(None, 1)
     except ValueError:
         raise ValueError('Bad field line: %r' % line)
     self.code = int(self.code)
     self.msg = u(self.msg)
     for line in fp:
         if line == b('\n'):
             break
         key, value = line.split(b(': '), 1)
         self.headers[u(key)] = u(value.strip())
     for line in fp:
         self._body += line
     fp.close()
Exemple #6
0
 def remove(self):
     logger.info('Removing pth entries from %s:' % self.file)
     fh = open(self.file, 'rb')
     # windows uses '\r\n' with py3k, but uses '\n' with py2.x
     lines = fh.readlines()
     self._saved_lines = lines
     fh.close()
     if any(b('\r\n') in line for line in lines):
         endline = '\r\n'
     else:
         endline = '\n'
     for entry in self.entries:
         try:
             logger.info('Removing entry: %s' % entry)
             lines.remove(b(entry + endline))
         except ValueError:
             pass
     fh = open(self.file, 'wb')
     fh.writelines(lines)
     fh.close()
 def remove(self):
     logger.info("Removing pth entries from %s:" % self.file)
     fh = open(self.file, "rb")
     # windows uses '\r\n' with py3k, but uses '\n' with py2.x
     lines = fh.readlines()
     self._saved_lines = lines
     fh.close()
     if any(b("\r\n") in line for line in lines):
         endline = "\r\n"
     else:
         endline = "\n"
     for entry in self.entries:
         try:
             logger.info("Removing entry: %s" % entry)
             lines.remove(b(entry + endline))
         except ValueError:
             pass
     fh = open(self.file, "wb")
     fh.writelines(lines)
     fh.close()
Exemple #8
0
 def __init__(self, url, folder):
     self.headers = emailmessage.Message()
     self.code = 500
     self.msg = 'Internal Server Error'
     # url can be a simple string, or a urllib2.Request object
     if isinstance(url, string_types):
         self.url = url
     else:
         self.url = url.get_full_url()
         for key, value in url.headers.items():
             self.headers[key] = value
     self._body = b('')
     self._set_all_fields(folder)
Exemple #9
0
def test_searching_through_Search_class():
    """
    Verify if ``pip.vcs.Search`` uses tests xmlrpclib.Transport class
    """
    pip.download.xmlrpclib_transport = fake_transport = Mock()
    query = 'mylittlequerythatdoesnotexists'
    dumped_xmlrpc_request = b(xmlrpclib.dumps(({'name': query, 'summary': query}, 'or'), 'search'))
    expected = [{'_pypi_ordering': 100, 'name': 'foo', 'summary': 'foo summary', 'version': '1.0'}]
    fake_transport.request.return_value = (expected,)
    pypi_searcher = SearchCommand()
    result = pypi_searcher.search(query, 'http://pypi.python.org/pypi')
    assert expected == result, result
    fake_transport.request.assert_called_with('pypi.python.org', '/pypi', dumped_xmlrpc_request, verbose=VERBOSE_FALSE)
Exemple #10
0
def test_unpack_http_url_bad_cache_checksum(mock_unpack_file):
    """
    If cached download has bad checksum, re-download.
    """
    base_url = 'http://www.example.com/somepackage.tgz'
    contents = b('downloaded')
    download_hash = hashlib.new('sha1', contents)
    link = Link(base_url + '#sha1=' + download_hash.hexdigest())

    session = Mock()
    session.get = Mock()
    response = session.get.return_value = MockResponse(contents)
    response.headers = {'content-type': 'application/x-tar'}
    response.url = base_url

    cache_dir = mkdtemp()
    try:
        cache_file = os.path.join(cache_dir, urllib.quote(base_url, ''))
        cache_ct_file = cache_file + '.content-type'
        _write_file(cache_file, 'some contents')
        _write_file(cache_ct_file, 'application/x-tar')

        unpack_http_url(
            link,
            'location',
            download_cache=cache_dir,
            session=session,
        )

        # despite existence of cached file with bad hash, downloaded again
        session.get.assert_called_once_with(
            "http://www.example.com/somepackage.tgz",
            stream=True,
        )
        # cached file is replaced with newly downloaded file
        with open(cache_file) as fh:
            assert fh.read() == 'downloaded'

    finally:
        rmtree(cache_dir)
Exemple #11
0
def test_unpack_http_url_bad_downloaded_checksum(mock_unpack_file):
    """
    If already-downloaded file has bad checksum, re-download.
    """
    base_url = 'http://www.example.com/somepackage.tgz'
    contents = b('downloaded')
    download_hash = hashlib.new('sha1', contents)
    link = Link(base_url + '#sha1=' + download_hash.hexdigest())

    session = Mock()
    session.get = Mock()
    response = session.get.return_value = MockResponse(contents)
    response.headers = {'content-type': 'application/x-tar'}
    response.url = base_url

    download_dir = mkdtemp()
    try:
        downloaded_file = os.path.join(download_dir, 'somepackage.tgz')
        _write_file(downloaded_file, 'some contents')

        unpack_http_url(
            link,
            'location',
            download_cache=None,
            download_dir=download_dir,
            session=session,
        )

        # despite existence of downloaded file with bad hash, downloaded again
        session.get.assert_called_once_with(
            'http://www.example.com/somepackage.tgz',
            stream=True,
        )
        # cached file is replaced with newly downloaded file
        with open(downloaded_file) as fh:
            assert fh.read() == 'downloaded'

    finally:
        rmtree(download_dir)
Exemple #12
0
    def __init__(self, url, folder):
        self.headers = emailmessage.Message()

        # patch due to setuptools>=0.7 header processing
        # easy_install fails w/o this on windows/py2
        # https://github.com/pypa/pip/issues/946#issuecomment-20860320
        if sys.version_info < (3,):
            def getheaders(key):
                return self.headers.get_all(key)
            self.headers.getheaders = getheaders

        self.code = 500
        self.msg = 'Internal Server Error'
        # url can be a simple string, or a urllib2.Request object
        if isinstance(url, string_types):
            self.url = url
        else:
            self.url = url.get_full_url()
            for key, value in url.headers.items():
                self.headers[key] = value
        self._body = b('')
        self._set_all_fields(folder)