def test_content_view_text(client, archive_data, content): sha1_git = content["sha1_git"] url = reverse( "browse-content", url_args={"query_string": content["sha1"]}, query_params={"path": content["path"]}, ) url_raw = reverse("browse-content-raw", url_args={"query_string": content["sha1"]}) resp = check_html_get_response(client, url, status_code=200, template_used="browse/content.html") content_display = _process_content_for_display(archive_data, content) mimetype = content_display["mimetype"] if mimetype.startswith("text/"): assert_contains(resp, '<code class="%s">' % content_display["language"]) assert_contains(resp, escape(content_display["content_data"])) assert_contains(resp, url_raw) swh_cnt_id = gen_swhid(CONTENT, sha1_git) swh_cnt_id_url = reverse("browse-swhid", url_args={"swhid": swh_cnt_id}) assert_contains(resp, swh_cnt_id) assert_contains(resp, swh_cnt_id_url) assert_not_contains(resp, "swh-metadata-popover")
def test_pull_request_branches_filtering(client, origin): # check no pull request branches are displayed in the Branches / Releases dropdown url = reverse("browse-origin-directory", query_params={"origin_url": origin.url}) resp = check_html_get_response( client, url, status_code=200, template_used="browse/directory.html" ) assert_not_contains(resp, "refs/pull/") # check no pull request branches are displayed in the branches view url = reverse("browse-origin-branches", query_params={"origin_url": origin.url}) resp = check_html_get_response( client, url, status_code=200, template_used="browse/branches.html" ) assert_not_contains(resp, "refs/pull/")
def test_revision_invalid_path(client, archive_data, revision): path = "foo/bar" url = reverse("browse-revision", url_args={"sha1_git": revision}, query_params={"path": path}) resp = check_html_get_response(client, url, status_code=404, template_used="browse/revision.html") directory = archive_data.revision_get(revision)["directory"] error_message = ( f"Directory entry with path {path} from root directory {directory} not found" ) assert_contains(resp, error_message, status_code=404) assert_not_contains(resp, "swh-metadata-popover", status_code=404)
def test_content_view_text_with_path(client, archive_data, content): path = content["path"] url = reverse( "browse-content", url_args={"query_string": content["sha1"]}, query_params={"path": path}, ) resp = check_html_get_response(client, url, status_code=200, template_used="browse/content.html") assert_contains(resp, '<nav class="bread-crumbs') content_display = _process_content_for_display(archive_data, content) mimetype = content_display["mimetype"] if mimetype.startswith("text/"): hljs_language = content["hljs_language"] assert_contains(resp, '<code class="%s">' % hljs_language) assert_contains(resp, escape(content_display["content_data"])) split_path = path.split("/") root_dir_sha1 = split_path[0] filename = split_path[-1] path = path.replace(root_dir_sha1 + "/", "").replace(filename, "") swhid_context = { "anchor": gen_swhid(DIRECTORY, root_dir_sha1), "path": f"/{path}{filename}", } swh_cnt_id = gen_swhid(CONTENT, content["sha1_git"], metadata=swhid_context) swh_cnt_id_url = reverse("browse-swhid", url_args={"swhid": swh_cnt_id}) assert_contains(resp, swh_cnt_id) assert_contains(resp, swh_cnt_id_url) path_info = gen_path_info(path) root_dir_url = reverse("browse-directory", url_args={"sha1_git": root_dir_sha1}) assert_contains(resp, '<li class="swh-path">', count=len(path_info) + 1) assert_contains( resp, '<a href="' + root_dir_url + '">' + root_dir_sha1[:7] + "</a>") for p in path_info: dir_url = reverse( "browse-directory", url_args={"sha1_git": root_dir_sha1}, query_params={"path": p["path"]}, ) assert_contains(resp, '<a href="' + dir_url + '">' + p["name"] + "</a>") assert_contains(resp, "<li>" + filename + "</li>") url_raw = reverse( "browse-content-raw", url_args={"query_string": content["sha1"]}, query_params={"filename": filename}, ) assert_contains(resp, url_raw) url = reverse( "browse-content", url_args={"query_string": content["sha1"]}, query_params={"path": filename}, ) resp = check_html_get_response(client, url, status_code=200, template_used="browse/content.html") assert_not_contains(resp, '<nav class="bread-crumbs') invalid_path = "%s/foo/bar/baz" % root_dir_sha1 url = reverse( "browse-content", url_args={"query_string": content["sha1"]}, query_params={"path": invalid_path}, ) resp = check_html_get_response(client, url, status_code=404, template_used="error.html")
def test_layout_without_staging_ribbon(client): url = reverse("swh-web-homepage") resp = check_http_get_response(client, url, status_code=200) assert_not_contains(resp, "swh-corner-ribbon")
def _origin_directory_view_test_helper( client, archive_data, origin_info, origin_visit, snapshot_sizes, origin_branches, origin_releases, root_directory_sha1, directory_entries, visit_id=None, timestamp=None, snapshot_id=None, path=None, ): dirs = [e for e in directory_entries if e["type"] in ("dir", "rev")] files = [e for e in directory_entries if e["type"] == "file"] if not visit_id and not snapshot_id: visit_id = origin_visit["visit"] query_params = {"origin_url": origin_info["url"]} if timestamp: query_params["timestamp"] = timestamp elif visit_id: query_params["visit_id"] = visit_id else: query_params["snapshot"] = snapshot_id if path: query_params["path"] = path url = reverse("browse-origin-directory", query_params=query_params) resp = check_html_get_response( client, url, status_code=200, template_used="browse/directory.html" ) assert_contains(resp, '<td class="swh-directory">', count=len(dirs)) assert_contains(resp, '<td class="swh-content">', count=len(files)) if timestamp: query_params["timestamp"] = format_utc_iso_date( parse_iso8601_date_to_utc(timestamp).isoformat(), "%Y-%m-%dT%H:%M:%SZ" ) for d in dirs: if d["type"] == "rev": dir_url = reverse("browse-revision", url_args={"sha1_git": d["target"]}) else: dir_path = d["name"] if path: dir_path = "%s/%s" % (path, d["name"]) query_params["path"] = dir_path dir_url = reverse("browse-origin-directory", query_params=query_params,) assert_contains(resp, dir_url) for f in files: file_path = f["name"] if path: file_path = "%s/%s" % (path, f["name"]) query_params["path"] = file_path file_url = reverse("browse-origin-content", query_params=query_params) assert_contains(resp, file_url) if "path" in query_params: del query_params["path"] root_dir_branch_url = reverse("browse-origin-directory", query_params=query_params) nb_bc_paths = 1 if path: nb_bc_paths = len(path.split("/")) + 1 assert_contains(resp, '<li class="swh-path">', count=nb_bc_paths) assert_contains( resp, '<a href="%s">%s</a>' % (root_dir_branch_url, root_directory_sha1[:7]) ) origin_branches_url = reverse("browse-origin-branches", query_params=query_params) assert_contains(resp, f'href="{escape(origin_branches_url)}"') assert_contains(resp, f"Branches ({snapshot_sizes['revision']})") origin_releases_url = reverse("browse-origin-releases", query_params=query_params) nb_releases = len(origin_releases) if nb_releases > 0: assert_contains(resp, f'href="{escape(origin_releases_url)}"') assert_contains(resp, f"Releases ({snapshot_sizes['release']})") if path: query_params["path"] = path assert_contains(resp, '<li class="swh-branch">', count=len(origin_branches)) for branch in origin_branches: query_params["branch"] = branch["name"] root_dir_branch_url = reverse( "browse-origin-directory", query_params=query_params ) assert_contains(resp, '<a href="%s">' % root_dir_branch_url) assert_contains(resp, '<li class="swh-release">', count=len(origin_releases)) query_params["branch"] = None for release in origin_releases: query_params["release"] = release["name"] root_dir_release_url = reverse( "browse-origin-directory", query_params=query_params ) assert_contains(resp, 'href="%s"' % root_dir_release_url) assert_contains(resp, "vault-cook-directory") assert_contains(resp, "vault-cook-revision") snapshot = archive_data.snapshot_get(origin_visit["snapshot"]) head_rev_id = archive_data.snapshot_get_head(snapshot) swhid_context = { "origin": origin_info["url"], "visit": gen_swhid(SNAPSHOT, snapshot["id"]), "anchor": gen_swhid(REVISION, head_rev_id), "path": f"/{path}" if path else None, } swh_dir_id = gen_swhid( DIRECTORY, directory_entries[0]["dir_id"], metadata=swhid_context ) swh_dir_id_url = reverse("browse-swhid", url_args={"swhid": swh_dir_id}) assert_contains(resp, swh_dir_id) assert_contains(resp, swh_dir_id_url) assert_contains(resp, "swh-take-new-snapshot") _check_origin_link(resp, origin_info["url"]) assert_not_contains(resp, "swh-metadata-popover")
def _origin_content_view_test_helper( client, archive_data, origin_info, origin_visit, snapshot_sizes, origin_branches, origin_releases, root_dir_sha1, content, visit_id=None, timestamp=None, snapshot_id=None, ): content_path = "/".join(content["path"].split("/")[1:]) if not visit_id and not snapshot_id: visit_id = origin_visit["visit"] query_params = {"origin_url": origin_info["url"], "path": content_path} if timestamp: query_params["timestamp"] = timestamp if visit_id: query_params["visit_id"] = visit_id elif snapshot_id: query_params["snapshot"] = snapshot_id url = reverse("browse-origin-content", query_params=query_params) resp = check_html_get_response( client, url, status_code=200, template_used="browse/content.html" ) assert type(content["data"]) == str assert_contains(resp, '<code class="%s">' % content["hljs_language"]) assert_contains(resp, escape(content["data"])) split_path = content_path.split("/") filename = split_path[-1] path = content_path.replace(filename, "")[:-1] path_info = gen_path_info(path) del query_params["path"] if timestamp: query_params["timestamp"] = format_utc_iso_date( parse_iso8601_date_to_utc(timestamp).isoformat(), "%Y-%m-%dT%H:%M:%SZ" ) root_dir_url = reverse("browse-origin-directory", query_params=query_params) assert_contains(resp, '<li class="swh-path">', count=len(path_info) + 1) assert_contains(resp, '<a href="%s">%s</a>' % (root_dir_url, root_dir_sha1[:7])) for p in path_info: query_params["path"] = p["path"] dir_url = reverse("browse-origin-directory", query_params=query_params) assert_contains(resp, '<a href="%s">%s</a>' % (dir_url, p["name"])) assert_contains(resp, "<li>%s</li>" % filename) query_string = "sha1_git:" + content["sha1_git"] url_raw = reverse( "browse-content-raw", url_args={"query_string": query_string}, query_params={"filename": filename}, ) assert_contains(resp, url_raw) if "path" in query_params: del query_params["path"] origin_branches_url = reverse("browse-origin-branches", query_params=query_params) assert_contains(resp, f'href="{escape(origin_branches_url)}"') assert_contains(resp, f"Branches ({snapshot_sizes['revision']})") origin_releases_url = reverse("browse-origin-releases", query_params=query_params) assert_contains(resp, f'href="{escape(origin_releases_url)}">') assert_contains(resp, f"Releases ({snapshot_sizes['release']})") assert_contains(resp, '<li class="swh-branch">', count=len(origin_branches)) query_params["path"] = content_path for branch in origin_branches: root_dir_branch_url = reverse( "browse-origin-content", query_params={"branch": branch["name"], **query_params}, ) assert_contains(resp, '<a href="%s">' % root_dir_branch_url) assert_contains(resp, '<li class="swh-release">', count=len(origin_releases)) query_params["branch"] = None for release in origin_releases: root_dir_release_url = reverse( "browse-origin-content", query_params={"release": release["name"], **query_params}, ) assert_contains(resp, '<a href="%s">' % root_dir_release_url) url = reverse("browse-origin-content", query_params=query_params) resp = check_html_get_response( client, url, status_code=200, template_used="browse/content.html" ) snapshot = archive_data.snapshot_get(origin_visit["snapshot"]) head_rev_id = archive_data.snapshot_get_head(snapshot) swhid_context = { "origin": origin_info["url"], "visit": gen_swhid(SNAPSHOT, snapshot["id"]), "anchor": gen_swhid(REVISION, head_rev_id), "path": f"/{content_path}", } swh_cnt_id = gen_swhid(CONTENT, content["sha1_git"], metadata=swhid_context) swh_cnt_id_url = reverse("browse-swhid", url_args={"swhid": swh_cnt_id}) assert_contains(resp, swh_cnt_id) assert_contains(resp, swh_cnt_id_url) assert_contains(resp, "swh-take-new-snapshot") _check_origin_link(resp, origin_info["url"]) assert_not_contains(resp, "swh-metadata-popover")
def _directory_view_checks( client, root_directory_sha1, directory_entries, path=None, origin_url=None, snapshot_id=None, revision_id=None, ): dirs = [e for e in directory_entries if e["type"] in ("dir", "rev")] files = [e for e in directory_entries if e["type"] == "file"] url_args = {"sha1_git": root_directory_sha1} query_params = {"origin_url": origin_url, "snapshot": snapshot_id} url = reverse( "browse-directory", url_args=url_args, query_params={ **query_params, "path": path }, ) root_dir_url = reverse( "browse-directory", url_args=url_args, query_params=query_params, ) resp = check_html_get_response(client, url, status_code=200, template_used="browse/directory.html") assert_contains( resp, '<a href="' + root_dir_url + '">' + root_directory_sha1[:7] + "</a>", ) assert_contains(resp, '<td class="swh-directory">', count=len(dirs)) assert_contains(resp, '<td class="swh-content">', count=len(files)) for d in dirs: if d["type"] == "rev": dir_url = reverse("browse-revision", url_args={"sha1_git": d["target"]}) else: dir_path = d["name"] if path: dir_path = "%s/%s" % (path, d["name"]) dir_url = reverse( "browse-directory", url_args={"sha1_git": root_directory_sha1}, query_params={ **query_params, "path": dir_path }, ) assert_contains(resp, dir_url) for f in files: file_path = "%s/%s" % (root_directory_sha1, f["name"]) if path: file_path = "%s/%s/%s" % (root_directory_sha1, path, f["name"]) query_string = "sha1_git:" + f["target"] file_url = reverse( "browse-content", url_args={"query_string": query_string}, query_params={ **query_params, "path": file_path }, ) assert_contains(resp, file_url) path_info = gen_path_info(path) assert_contains(resp, '<li class="swh-path">', count=len(path_info) + 1) assert_contains( resp, '<a href="%s">%s</a>' % (root_dir_url, root_directory_sha1[:7])) for p in path_info: dir_url = reverse( "browse-directory", url_args={"sha1_git": root_directory_sha1}, query_params={ **query_params, "path": p["path"] }, ) assert_contains(resp, '<a href="%s">%s</a>' % (dir_url, p["name"])) assert_contains(resp, "vault-cook-directory") swh_dir_id = gen_swhid(DIRECTORY, directory_entries[0]["dir_id"]) swh_dir_id_url = reverse("browse-swhid", url_args={"swhid": swh_dir_id}) swhid_context = {} if origin_url: swhid_context["origin"] = origin_url if snapshot_id: swhid_context["visit"] = gen_swhid(SNAPSHOT, snapshot_id) if root_directory_sha1 != directory_entries[0]["dir_id"]: swhid_context["anchor"] = gen_swhid(DIRECTORY, root_directory_sha1) if root_directory_sha1 != directory_entries[0]["dir_id"]: swhid_context["anchor"] = gen_swhid(DIRECTORY, root_directory_sha1) if revision_id: swhid_context["anchor"] = gen_swhid(REVISION, revision_id) swhid_context["path"] = f"/{path}/" if path else None swh_dir_id = gen_swhid(DIRECTORY, directory_entries[0]["dir_id"], metadata=swhid_context) swh_dir_id_url = reverse("browse-swhid", url_args={"swhid": swh_dir_id}) assert_contains(resp, swh_dir_id) assert_contains(resp, swh_dir_id_url) assert_not_contains(resp, "swh-metadata-popover")