def get_display_image(liste_url, listenote, listenbnote): liste_image = [] for url in liste_url: preview = link_preview(url) link_image = preview.image response = requests.get(link_image) image = Image.open(BytesIO(response.content)) image = image.resize((170, 290), Image.ANTIALIAS) liste_image.append(image) #we scrap and create a list of the images fig, ax = plt.subplots(1, 5, figsize=(25, 6)) #creation of a figure 1x5 for i in range(len(liste_image)): imagebox = OffsetImage( liste_image[i], zoom=1) #we place each image of the list in the figure ab = AnnotationBbox(imagebox, (0.5, 0.5)) ax[i].add_artist(ab) ax[i].axis('off') Letitre = "Note: " + str(round(listenote[i], 1)) + "/5 pour " + str( listenbnote[i] ) + " note(s)" #we create a title to insert in each subplot ax[i].set_title(Letitre, fontdict={ 'fontsize': 13, 'fontweight': 'bold' }) return st.pyplot(fig) #the result is a figure
def _fetch_preview(self): try: preview = linkpreview.link_preview(self.link) except: logging.warning(f"could not get preview for {self.link}") return GLib.idle_add(self._display_preview, preview)
async def view(ctx, link, number): embed = discord.Embed(title=f'Gave {number} views to the {link}', color=discord.Color.from_rgb(68, 128, 123), timestamp=datetime.datetime.now( datetime.timezone.utc)) bot.foot = link_preview(link, requests.get(link).content.decode("utf-8")).image embed.set_image(url=bot.foot) bot.log_channel = bot.get_channel(log_channel_id) for num in range(int(number)): requests.get(link) print(num) await bot.log_channel.send(embed=embed)
def link2md(url): body = "" try: preview = link_preview(url) body += '> **' + preview.title + '**' + '\n' body += '> <img src="' + preview.image + '" height="64" style="float:left"/>' body += ' ' + preview.description + '\n' except Exception: pass body += '> ' + url + '\n' return body
def main(article_url, is_colored=True): domain = urlparse(article_url).netloc page = requests.get(article_url).text for link in links_in_page(page, domain): try: preview = linkpreview.link_preview(link) if preview.title in titles_to_ignore: continue title = preview.title except requests.HTTPError: title = "Untitled" print_link(title, link, is_colored)
def linkpreview(url): preview = link_preview(url) #print("title:", preview.title) '''print("description:", preview.description) print("image:", preview.image) print("force_title:", preview.force_title) print("absolute_image:", preview.absolute_image) ''' return jsonify({ 'success': 'true', 'title': preview.title, 'description': preview.description, 'imgurl': preview.image, 'url': url })
def test_preview(): data = get_top_stories() for (i, id) in enumerate(data): print('\n') print('########################################"') if i == 5: break story = get_story_from_id(id) print("Title: {} - by: {} - score: {} - URL: {}".format( story['title'], story['by'], story['score'], story['url'])) preview = link_preview(story['url']) print("title: {} - description: {} - image: {}".format( preview.title, preview.description, preview.image)) title, description, image = web_preview(story['url'], parser='lxml') print("title: {} - description: {} - image: {}".format( title, description, image))
def extract_meta_of_webpage(url: str): if not url.startswith("http://") and not url.startswith("https://"): url = f"https://{url.strip()}" r = requests.get(url) r.raise_for_status() preview_text = "" try: preview = link_preview(content=r.text.encode()) preview_text = f"{preview.force_title if preview.force_title else ''}\n{preview.description if preview.description else ''}" except Exception as e: pass if len(preview_text) < 8: soup = BeautifulSoup(r.text, features="lxml") metas = soup.find_all('meta') preview_text = "\n".join([ meta.attrs['content'] for meta in metas if 'name' in meta.attrs and meta.attrs['name'] in ['description', 'twitter:description', 'twitter:title'] ]) return preview_text
def url_reader(url): utils.debug("Getting preview") p = link_preview(url) return [f"{p.title}: ", p.description]
def test_link_preview(httpserver: HTTPServer): httpserver.expect_request("/preview1").respond_with_data( get_sample("twitter-card/with-image.html"), headers={"content-type": "text/html"}, ) httpserver.expect_request("/preview2").respond_with_data( get_sample("generic/h1-img.html"), headers={"content-type": "text/html"}, ) httpserver.expect_request("/preview-3.json").respond_with_data( '{}', headers={"content-type": "application/json"}, ) url = httpserver.url_for("/preview1") preview = link_preview(url) assert preview.title == "a title" assert preview.force_title == "a title" assert preview.description is None assert preview.image == "/img/heck.jpg" assert preview.absolute_image == "%s%s" % (url, preview.image) url = httpserver.url_for("/preview2") preview = link_preview(url) assert preview.title == "This title is from the first h1 tag." assert preview.description is None assert preview.image == "http://*****:*****@abc.com/the-bunny(720p)", content="OK") assert preview.force_title == "abc.com/the-bunny(720p)" preview = link_preview("https://192.168.1.1", content="OK") assert preview.force_title == "192.168.1.1" preview = link_preview("https://192.168.1.1:9696", content="OK") assert preview.force_title == "192.168.1.1:9696" preview = link_preview(httpserver.url_for('/preview-3.json')) assert preview.title is None assert preview.description is None assert preview.image is None assert preview.absolute_image is None assert preview.force_title == "Preview 3"
def get_preview(url): try: preview = link_preview(url) except: return {'error': 'no preview available'} return {'title': preview.title, 'image': preview.absolute_image}
import django gn = GoogleNews(lang='ar') keyword_results = gn.search('أندرويد') data = keyword_results['entries'] os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'news_feed.settings') django.setup() from articles.models import article as classy all_news = [] test_counter = 0 for i in data: try: preview = link_preview(i.link) components = {'title': i.title, 'link': i.link, 'image': preview.image} if str(components['image']) == 'None': pass else: new_data = classy(title=components['title'], link=components['link'], image=components['image']) new_data.save() print('\n') time.sleep(20) except: pass
from linkpreview import link_preview url = "http://localhost" content = """ <!DOCTYPE html> <html> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width"> <!-- ... ---> <title>a title</title> </head> <body> <!-- ... ---> </body> </html> """ preview = link_preview(url, content) print("title:", preview.title) print("description:", preview.description) print("image:", preview.image) print("force_title:", preview.force_title) print("absolute_image:", preview.absolute_image)