def discover_webmention_endpoint(url: str) -> Optional[str]: """Discover the Webmention endpoint of a given URL, if any. Passes all the tests at https://webmention.rocks! """ wurl = _discover_webmention_endoint(url) if wurl is None: return None if not is_url_valid(wurl): return None return wurl
def links_from_note(note): tags_href = set() for t in note.get("tag", []): h = t.get("href") if h: tags_href.add(h) links = set() soup = BeautifulSoup(note["content"]) for link in soup.find_all("a"): h = link.get("href") if h.startswith("http") and h not in tags_href and is_url_valid(h): links.add(h) return links
def links_from_note(note: Dict[str, Any]) -> Set[str]: note_host = urlparse(ap._get_id(note["id"]) or "").netloc links = set() if "content" in note: soup = BeautifulSoup(note["content"], "html5lib") for link in soup.find_all("a"): h = link.get("href") ph = urlparse(h) if (ph.scheme in {"http", "https"} and ph.netloc != note_host and is_url_valid(h)): links.add(h) # FIXME(tsileo): support summary and name fields return links
def links_from_note(note): tags_href = set() for t in note.get("tag", []): h = t.get("href") if h: tags_href.add(h) links = set() if "content" in note: soup = BeautifulSoup(note["content"]) for link in soup.find_all("a"): h = link.get("href") if h.startswith("http") and h not in tags_href and is_url_valid(h): links.add(h) # FIXME(tsileo): support summary and name fields return links
def test_urlutils_reject_invalid_scheme(): assert not urlutils.is_url_valid("ftp://localhost:123")
def test_urlutils_accept_valid_url(_): assert urlutils.is_url_valid("https://microblog.pub")
def test_urlutils_reject_domain_that_resolve_to_private_ip(_): assert not urlutils.is_url_valid("http://resolve-to-private.com")
def test_urlutils_reject_private_ip(): assert not urlutils.is_url_valid("http://192.168.1.10:8000")
def test_urlutils_reject_localhost(): assert not urlutils.is_url_valid("http://localhost:8000")