def test_addpath(self): url = URL("http://root.com/path") assert url.addpath("sub").url == "http://root.com/path/sub" assert url.addpath("sub", asdir=1).url == "http://root.com/path/sub/" url = URL("http://root.com/path/") assert url.addpath("sub").url == "http://root.com/path/sub" assert url.addpath("sub", asdir=1).url == "http://root.com/path/sub/"
def _get_remote_projects(self): headers = {"Accept": "text/html"} # use a minimum of 30 seconds as timeout for remote server and # 60s when running as replica, because the list can be quite large # and the master might take a while to process it if self.xom.is_replica(): timeout = max(self.timeout, 60) else: timeout = max(self.timeout, 30) response = self.httpget( self.mirror_url, allow_redirects=True, extra_headers=headers, timeout=timeout) if response.status_code != 200: raise self.UpstreamError("URL %r returned %s %s", self.mirror_url, response.status_code, response.reason) page = HTMLPage(response.text, response.url) projects = set() baseurl = URL(response.url) basehost = baseurl.replace(path='') for link in page.links: newurl = URL(link.url) # remove trailing slashes, so basename works correctly newurl = newurl.asfile() if not newurl.is_valid_http_url(): continue if not newurl.path.startswith(baseurl.path): continue if basehost != newurl.replace(path=''): continue projects.add(newurl.basename) return projects
def test_relpath(self, url, path, expected): test_url = URL("http://example.com" + url) relpath = test_url.relpath(path) assert relpath == expected test_url = URL("http://example.com" + url + "foo=bar") relpath = test_url.relpath(path) assert relpath == expected
def parse_index(self, disturl, html, scrape=True): p = HTMLPage(html, disturl.url) seen = set() for link in p.links: newurl = URL(link.url) if not newurl.is_valid_http_url(): continue eggfragment = newurl.eggfragment if scrape and eggfragment: if normalize_name(eggfragment).startswith(self.projectname): # XXX seems we have to maintain a particular # order to keep pip/easy_install happy with some # packages (e.g. nose) if newurl not in self.egglinks: self.egglinks.insert(0, newurl) else: log.debug("skip egg link %s (projectname: %s)", newurl, self.projectname) continue if is_archive_of_project(newurl, self.projectname): if not newurl.is_valid_http_url(): log.warn("unparseable/unsupported url: %r", newurl) else: seen.add(newurl.url) self._mergelink_ifbetter(newurl) continue if scrape: for link in p.rel_links(): if link.url not in seen: disturl = URL(link.url) if disturl.is_valid_http_url(): self.crawllinks.add(disturl)
def test_hashtypes(self, hashtype, hash_value): link = URL('py-1.4.12.zip#%s=%s' % (hashtype, hash_value)) assert link.hash_algo == getattr(hashlib, hashtype) assert link.hash_value == hash_value link = URL('py-1.4.12.zip?foo=bar#%s=%s' % (hashtype, hash_value)) assert link.hash_algo == getattr(hashlib, hashtype) assert link.hash_value == hash_value assert link.query == "foo=bar"
def test_parentbasename(self): d = URL("http://codespeak.net/simple/basename/") assert d.parentbasename == "basename" assert d.basename == "" d = URL("http://codespeak.net/simple/basename/?foo=bar") assert d.parentbasename == "basename" assert d.basename == "" assert d.query == "foo=bar"
def test_maplink_replaced_release_not_cached_yet(self, filestore): link = URL("https://pypi.python.org/pkg/pytest-1.2.zip#md5=123") entry1 = filestore.maplink(link, refresh=False) assert not entry1.iscached() assert entry1.md5 == "123" newlink = URL("https://pypi.python.org/pkg/pytest-1.2.zip#md5=456") entry2 = filestore.maplink(newlink, refresh=False) assert entry2.md5 == "456"
def test_url_nofrag(self): url = URL("http://a/py.tar.gz#egg=py-dev") res = url.url_nofrag assert not isinstance(res, URL) assert res == "http://a/py.tar.gz" url = URL("http://a/py.tar.gz?foo=bar#egg=py-dev") res = url.url_nofrag assert not isinstance(res, URL) assert res == "http://a/py.tar.gz?foo=bar"
def test_md5(self): url = URL("http://a/py.tar.gz#md5=123123") assert url.md5 == "123123" assert url.hash_algo == hashlib.md5 assert url.hash_value == "123123" url = URL("http://a/py.tar.gz?foo=bar#md5=123123") assert url.md5 == "123123" assert url.hash_algo == hashlib.md5 assert url.hash_value == "123123" assert url.query == "foo=bar"
def test_maplink_replaced_release_already_cached(self, filestore): link = URL("https://pypi.python.org/pkg/pytest-1.2.zip#md5=123") entry1 = filestore.maplink(link, refresh=False) # pseudo-write a release file entry1.FILE.set(b"content") assert entry1.iscached() newlink = URL("https://pypi.python.org/pkg/pytest-1.2.zip#md5=456") entry2 = filestore.maplink(newlink, refresh=False) assert entry2.md5 == "456" assert not entry2.iscached()
def master_url(self): if hasattr(self, '_master_url'): return self._master_url master_url = None if self.args.master_url: master_url = URL(self.args.master_url) elif self.nodeinfo.get("masterurl"): master_url = URL(self.nodeinfo["masterurl"]) self._master_url = master_url return master_url
def test_basename(self): d = URL("http://codespeak.net/basename") assert d.basename == "basename" d = URL("http://codespeak.net") assert not d.basename d = URL("http://codespeak.net/basename?foo=bar") assert d.basename == "basename" assert d.query == "foo=bar" d = URL("http://codespeak.net?foo=bar") assert not d.basename assert d.query == "foo=bar"
def test_joinpath_asdir(self): url = URL("http://heise.de") new = url.joinpath("hello", asdir=1) assert new.url == "http://heise.de/hello/" new = url.joinpath("hello/", asdir=1) assert new.url == "http://heise.de/hello/" url = URL("http://heise.de?foo=bar") new = url.joinpath("hello", asdir=1) assert new.url == "http://heise.de/hello/?foo=bar" new = url.joinpath("hello/", asdir=1) assert new.url == "http://heise.de/hello/?foo=bar"
def test_joinpath(self, url, path, expected): d_url = URL(url) url_joined = d_url.joinpath(path).url assert url_joined == expected assert URL(url, path).url == expected assert d_url.joinpath(path, "end").url == expected.rstrip('/') + "/end" assert URL(url, path, "end").url == expected.rstrip('/') + "/end" assert d_url.joinpath(path, "end", asdir=1).url == expected.rstrip('/') + "/end/" assert URL(url, path, "end", asdir=1).url == expected.rstrip('/') + "/end/"
def write_trustedhost(self, indexserver): self.ensure_backup_file() if not self.path.exists(): return newlines = [] found = False insection = False indexserver = URL(indexserver) trustedhost = "trusted-host = %s\n" % indexserver.hostname for line in self.path.readlines(cr=1): if insection: if line.strip().startswith('['): if not found: newlines.append(trustedhost) found = True insection = False if not found and self.section_name in line.lower( ) and not insection: insection = True if not found and insection and re.match( r'trusted-host\s*=\s*%s' % indexserver.hostname, line): found = True newlines.append(line) if not found: newlines.append(self.section_name + "\n") newlines.append(trustedhost) self.path.write("".join(newlines))
def simpleindex_auth(self): indexserver = URL(self.simpleindex) basic_auth = self.get_basic_auth(indexserver) if basic_auth: indexserver = indexserver.replace( netloc="%s@%s" % (':'.join(basic_auth), indexserver.netloc)) return indexserver.url
def test_canonical_url_path_mappings(url): url = URL(url) path = url.torelpath() assert path[0] != "/" assert posixpath.normpath(path) == path back_url = URL.fromrelpath(path) assert url == back_url
def runtox(self, link): # publishing some infos to the commands started by tox #setenv_devpi(self.hub, env, posturl=self.current.resultlog, # packageurl=link.url, # packagemd5=link.md5) jsonreport = link.pkg.rootdir.join("toxreport.json") path_archive = link.pkg.path_archive toxargs = [ "--installpkg", str(path_archive), "-i ALL=%s" % str(self.current.simpleindex), "--result-json", str(jsonreport), ] unpack_path = link.pkg.path_unpacked toxargs.extend(self.get_tox_args(unpack_path=unpack_path)) with link.pkg.path_unpacked.as_cwd(): self.hub.info("%s$ tox %s" % (os.getcwd(), " ".join(toxargs))) try: ret = tox.cmdline(toxargs) except SystemExit as e: ret = e.args[0] if ret != 2: jsondata = json.load(jsonreport.open("r")) url = URL(link.url) post_tox_json_report(self.hub, url.url_nofrag, jsondata) if ret != 0: self.hub.error("tox command failed", ret) return 1 return 0
def parse_index(disturl, html, scrape=True): if not isinstance(disturl, URL): disturl = URL(disturl) projectname = disturl.basename or disturl.parentbasename parser = IndexParser(projectname) parser.parse_index(disturl, html, scrape=scrape) return parser
def test_iterfile(self, filestore, httpget): link = URL("http://pypi.python.org/pkg/pytest-1.8.zip") entry = filestore.maplink(link, refresh=False) assert not entry.md5 headers = { "content-length": "3", "last-modified": "Thu, 25 Nov 2010 20:00:27 GMT", "content-type": "application/zip" } httpget.url2response[link.url] = dict(status_code=200, headers=headers, raw=BytesIO(b"123")) rheaders, riter = filestore.iterfile(entry.relpath, httpget, chunksize=1) assert rheaders["content-length"] == "3" assert rheaders["content-type"] == "application/zip" assert rheaders["last-modified"] == headers["last-modified"] bytes = b"".join(riter) assert bytes == b"123" # reget entry and check about content entry = filestore.getentry(entry.relpath) assert entry.iscached() assert entry.md5 == getmd5(bytes) assert entry.size == "3" rheaders, riter = filestore.iterfile(entry.relpath, None, chunksize=1) assert rheaders == headers bytes = b"".join(riter) assert bytes == b"123"
def mirror_url(self): if self.xom.is_replica(): url = self.xom.config.master_url return url.joinpath("%s/+simple/" % self.name).url else: url = URL(self.ixconfig['mirror_url']) return url.asdir().url
def test_package_with_version_specs(self, monkeypatch, specs, link): indexurl = "http://my/simple/" current = Current() current.reconfigure(dict(simpleindex=indexurl)) ri = RemoteIndex(current) def mockget(url): assert url.startswith(indexurl) assert url.endswith("pkg/") return url, """ <a href="http://my/pkg-0.3.tar.gz"/> <a href="http://my/pkg-0.2.8.tar.gz"/> <a href="http://my/pkg-0.2.7.tar.gz"/> <a href="http://my/pkg-0.2.6.tar.gz"/> <a href="http://my/pkg-0.2.5.tar.gz"/> <a href="http://my/pkg-0.2.5a1.tar.gz"/> <a href="http://my/pkg-0.2.4.1.tar.gz"/> <a href="http://my/pkg-0.2.4.tar.gz"/> <a href="http://my/pkg-0.2.3.tar.gz"/> <a href="http://my/pkg-0.2.2.tar.gz"/> <a href="http://my/pkg-0.2.1.tar.gz"/> <a href="http://my/pkg-0.2.0.tar.gz"/> """ monkeypatch.setattr(ri, "getcontent", mockget) lnk = ri.getbestlink(specs) assert URL(lnk.url).url_nofrag == link
def _simple_list_project(self, stage, project, result, embed_form, blocked_index): response = self.request.response response.content_type = "text/html ; charset=utf-8" title = "%s: links for %s" % (stage.name, project) yield ("<html><head><title>%s</title></head><body><h1>%s</h1>\n" % (title, title)).encode("utf-8") if embed_form: yield self._index_refresh_form(stage, project).encode("utf-8") if blocked_index: yield ("<p><strong>INFO:</strong> Because this project isn't in " "the <code>mirror_whitelist</code>, no releases from " "<strong>%s</strong> are included.</p>" % blocked_index).encode('utf-8') url = URL(self.request.path_info) for key, href in result: yield ('%s <a href="%s">%s</a><br/>\n' % ("/".join(href.split("/", 2)[:2]), url.relpath("/" + href), key)).encode("utf-8") yield "</body></html>".encode("utf-8")
def test_iterfile_local_failing_will_retry_remote(self, httpget, filestore): def raising(*args, **kwargs): raise KeyError() link = URL("http://pypi.python.org/pkg/pytest-2.8.zip") entry = filestore.maplink(link, refresh=False) entry.FILE.set(b"") testheaders = { "size": "2", "content_type": "application/zip", "last_modified": "Thu, 25 Nov 2010 20:00:27 GMT" } digest = getmd5(b"12") entry.set(md5=digest, **testheaders) assert entry.iscached() httpget.mockresponse(link.url, headers=entry.gethttpheaders(), raw=BytesIO(b"12")) rheaders, riter = filestore.iterfile(entry.relpath, httpget, chunksize=1) assert rheaders["content-length"] == "2" assert rheaders["content-type"] == "application/zip" bytes = b"".join(riter) assert bytes == b"12"
def mock_extfile(self, path, content, **kw): headers = {"content-length": len(content), "content-type": mimetypes.guess_type(path), "last-modified": "today",} url = URL(self.mirror_url).joinpath(path) return self.httpget.mockresponse(url.url, raw=py.io.BytesIO(content), headers=headers, **kw)
def test_maplink_file_there_but_no_entry(self, filestore, keyfs): link = URL("https://pypi.python.org/pkg/pytest-1.2.zip#md5=123") entry1 = filestore.maplink(link, refresh=False) entry1.FILE.set(b"hello") entry1.PATHENTRY.delete() headers, itercontent = filestore.iterfile_local(entry1, 1) assert itercontent is None
def runtox(self, link, pkg, sdist_pkg=None, upload_tox_results=True): jsonreport = pkg.rootdir.join("toxreport.json") path_archive = pkg.path_archive toxargs = ["--installpkg", str(path_archive), "-i ALL=%s" % str(self.current.simpleindex), "--recreate", "--result-json", str(jsonreport), ] if sdist_pkg is None: sdist_pkg = pkg toxargs.extend(self.get_tox_args(unpack_path=sdist_pkg.path_unpacked)) with sdist_pkg.path_unpacked.as_cwd(): self.hub.info("%s$ tox %s" %(os.getcwd(), " ".join(toxargs))) toxrunner = self.get_tox_runner() try: ret = toxrunner(toxargs) except SystemExit as e: ret = e.args[0] if ret != 2 and upload_tox_results: jsondata = json.load(jsonreport.open("r")) url = URL(link.href) post_tox_json_report(self.hub, url.url_nofrag, jsondata) if ret != 0: self.hub.error("tox command failed", ret) return 1 return 0
def test_invalidate_cache(self, filestore): link = URL("https://pypi.python.org/pkg/pytest-1.2.zip") entry1 = filestore.maplink(link, refresh=False) entry1.FILE.set(b"") assert entry1.iscached() entry1.invalidate_cache() assert not entry1.iscached()
def parse_index(disturl, html): if not isinstance(disturl, URL): disturl = URL(disturl) project = disturl.basename or disturl.parentbasename parser = IndexParser(project) parser.parse_index(disturl, html) return parser
def test_maplink_deterministic(self, filestore): link = URL("https://pypi.python.org/pkg/pytest-1.2.zip#md5=123") entry1 = filestore.maplink(link) entry2 = filestore.maplink(link) assert entry1.relpath == entry2.relpath assert entry1.basename == "pytest-1.2.zip" assert py.builtin._istext(entry1.md5)